From 6aafb6912c2d9f68ed63e8c0557c0236b37edce0 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 6 Jun 2017 11:33:00 +0200 Subject: [PATCH 01/49] sbt 1.0.0-M6 --- build.sbt | 1 - project/Build.scala | 24 ++---------------------- project/build.properties | 2 +- project/scripted.sbt | 2 +- 4 files changed, 4 insertions(+), 25 deletions(-) diff --git a/build.sbt b/build.sbt index 93e194a97d37..9fbaee50a7f4 100644 --- a/build.sbt +++ b/build.sbt @@ -15,7 +15,6 @@ val `dotty-library-optimised` = Build.`dotty-library-optimised` val `dotty-sbt-bridge` = Build.`dotty-sbt-bridge` val `dotty-sbt-bridge-bootstrapped` = Build.`dotty-sbt-bridge-bootstrapped` val `dotty-language-server` = Build.`dotty-language-server` -val sjsSandbox = Build.sjsSandbox val `dotty-bench` = Build.`dotty-bench` val `dotty-bench-bootstrapped` = Build.`dotty-bench-bootstrapped` val `dotty-bench-optimised` = Build.`dotty-bench-optimised` diff --git a/project/Build.scala b/project/Build.scala index 0e28abc47f51..2abe1fe2e992 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -7,18 +7,12 @@ import java.nio.file.{ Files, FileSystemException } import java.util.Calendar import scala.reflect.io.Path -import sbtassembly.AssemblyKeys.assembly -import xerial.sbt.Pack._ import sbt.Package.ManifestAttributes -import com.typesafe.sbteclipse.plugin.EclipsePlugin._ - import dotty.tools.sbtplugin.DottyPlugin.autoImport._ import dotty.tools.sbtplugin.DottyIDEPlugin.{ prepareCommand, runProcess } import dotty.tools.sbtplugin.DottyIDEPlugin.autoImport._ -import org.scalajs.sbtplugin.ScalaJSPlugin -import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._ import pl.project13.scala.sbt.JmhPlugin import JmhPlugin.JmhKeys.Jmh @@ -151,7 +145,6 @@ object Build { // Settings used when compiling dotty with a non-bootstrapped dotty lazy val commonBootstrappedSettings = commonSettings ++ Seq( - EclipseKeys.skipProject := true, version := dottyVersion, scalaVersion := dottyNonBootstrappedVersion, @@ -285,8 +278,6 @@ object Build { crossPaths := false, // Do not depend on the Scala library autoScalaLibrary := false, - // Let the sbt eclipse plugin know that this is a Java-only project - EclipseKeys.projectFlavor := EclipseProjectFlavor.Java, //Remove javac invalid options in Compile doc javacOptions in (Compile, doc) --= Seq("-Xlint:unchecked", "-Xlint:deprecation") ) @@ -370,10 +361,6 @@ object Build { settings( resourceDirectory in Test := baseDirectory.value / "test" / "resources", - // specify main and ignore tests when assembling - mainClass in assembly := Some("dotty.tools.bot.Main"), - test in assembly := {}, - libraryDependencies ++= { val circeVersion = "0.7.0" val http4sVersion = "0.15.3" @@ -462,19 +449,12 @@ object Build { Seq(file) }.taskValue, - // include sources in eclipse (downloads source code for all dependencies) - //http://stackoverflow.com/questions/10472840/how-to-attach-sources-to-sbt-managed-dependencies-in-scala-ide#answer-11683728 - com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseKeys.withSource := true, - // get libraries onboard - libraryDependencies ++= Seq("com.typesafe.sbt" % "sbt-interface" % sbtVersion.value, + libraryDependencies ++= Seq("org.scala-sbt" % "compiler-interface" % sbtVersion.value, ("org.scala-lang.modules" %% "scala-xml" % "1.0.6").withDottyCompat(), "com.novocode" % "junit-interface" % "0.11" % "test", "org.scala-lang" % "scala-library" % scalacVersion % "test"), - // enable improved incremental compilation algorithm - incOptions := incOptions.value.withNameHashing(true), - // For convenience, change the baseDirectory when running the compiler baseDirectory in (Compile, run) := baseDirectory.value / "..", // .. but not when running test @@ -721,7 +701,7 @@ object Build { description := "sbt compiler bridge for Dotty", resolvers += Resolver.typesafeIvyRepo("releases"), // For org.scala-sbt:api libraryDependencies ++= Seq( - "com.typesafe.sbt" % "sbt-interface" % sbtVersion.value, + "org.scala-sbt" % "compiler-interface" % "1.0.0-X16", "org.scala-sbt" % "api" % sbtVersion.value % "test", ("org.specs2" %% "specs2-core" % "3.9.1" % "test").withDottyCompat(), ("org.specs2" %% "specs2-junit" % "3.9.1" % "test").withDottyCompat() diff --git a/project/build.properties b/project/build.properties index 64317fdae59f..cd66fd542cf2 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.15 +sbt.version=1.0.0-M6 diff --git a/project/scripted.sbt b/project/scripted.sbt index 76fdf267ff0d..a7d7ecccf2a9 100644 --- a/project/scripted.sbt +++ b/project/scripted.sbt @@ -1,2 +1,2 @@ // Used by the subproject dotty-bridge -libraryDependencies += "org.scala-sbt" % "scripted-plugin" % sbtVersion.value +libraryDependencies += "org.scala-sbt" %% "scripted-plugin" % sbtVersion.value From 4ce83b8d41cc898e6f6d100e7f89645e1d7a0e5b Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 6 Jun 2017 14:28:30 +0200 Subject: [PATCH 02/49] Upgrade to sbt 1.0.0-M6 --- build.sbt | 6 +- project/Build.scala | 65 +++++---- project/inject-sbt-dotty.sbt | 5 +- project/scripted.sbt | 4 +- .../dotty/tools/sbtplugin/DottyPlugin.scala | 127 +++++++++++------- 5 files changed, 126 insertions(+), 81 deletions(-) diff --git a/build.sbt b/build.sbt index 9fbaee50a7f4..b547156c2d76 100644 --- a/build.sbt +++ b/build.sbt @@ -22,9 +22,9 @@ val `scala-library` = Build.`scala-library` val `scala-compiler` = Build.`scala-compiler` val `scala-reflect` = Build.`scala-reflect` val scalap = Build.scalap -val dist = Build.dist -val `dist-bootstrapped` = Build.`dist-bootstrapped` -val `dist-optimised` = Build.`dist-optimised` +// val dist = Build.dist +// val `dist-bootstrapped` = Build.`dist-bootstrapped` +// val `dist-optimised` = Build.`dist-optimised` val `sbt-dotty` = Build.`sbt-dotty` val `vscode-dotty` = Build.`vscode-dotty` diff --git a/project/Build.scala b/project/Build.scala index 2abe1fe2e992..2210837300ee 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -10,6 +10,7 @@ import scala.reflect.io.Path import sbt.Package.ManifestAttributes +// import sbt.ScriptedPlugin.autoImport._ import dotty.tools.sbtplugin.DottyPlugin.autoImport._ import dotty.tools.sbtplugin.DottyIDEPlugin.{ prepareCommand, runProcess } import dotty.tools.sbtplugin.DottyIDEPlugin.autoImport._ @@ -128,7 +129,7 @@ object Build { resourceDirectory in Test := baseDirectory.value / "test-resources", // Prevent sbt from rewriting our dependencies - ivyScala ~= (_ map (_ copy (overrideScalaVersion = false))) + ivyScala ~= (_.map(_.withOverrideScalaVersion(false))) ) // Settings used for projects compiled only with Scala 2 @@ -150,8 +151,8 @@ object Build { // Avoid having to run `dotty-sbt-bridge/publishLocal` before compiling a bootstrapped project scalaCompilerBridgeSource := - (dottyOrganization %% "dotty-sbt-bridge" % dottyVersion % Configurations.Component.name) - .artifacts(Artifact.sources("dotty-sbt-bridge").copy(url = + (dottyOrganization %% "dotty-sbt-bridge" % "NOT_PUBLISHED" % Configurations.Component.name) + .artifacts(Artifact.sources("dotty-sbt-bridge").withUrl( // We cannot use the `packageSrc` task because a setting cannot depend // on a task. Instead, we make `compile` below depend on the bridge `packageSrc` Some((artifactPath in (`dotty-sbt-bridge`, Compile, packageSrc)).value.toURI.toURL))), @@ -165,7 +166,7 @@ object Build { // contain `scalaInstance.value.libraryJar` which in our case is the // non-bootstrapped dotty-library that will then take priority over // the bootstrapped dotty-library on the classpath or sourcepath. - classpathOptions ~= (_.copy(autoBoot = false)), + classpathOptions ~= (_.withAutoBoot(false)), // We still need a Scala bootclasspath equal to the JVM bootclasspath, // otherwise sbt 0.13 incremental compilation breaks (https://github.com/sbt/sbt/issues/3142) scalacOptions ++= Seq("-bootclasspath", sys.props("sun.boot.class.path")), @@ -187,9 +188,9 @@ object Build { libraryDependencies ++= { if (bootstrapFromPublishedJars.value) Seq( - dottyOrganization %% "dotty-library" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name, - dottyOrganization %% "dotty-compiler" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name - ).map(_.withDottyCompat()) + dottyOrganization % "dotty-library_2.11" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name, + dottyOrganization % "dotty-compiler_2.11" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name + )//.map(_.withDottyCompat()) else Seq() }, @@ -197,25 +198,29 @@ object Build { // Compile using the non-bootstrapped and non-published dotty managedScalaInstance := false, scalaInstance := { + val updateResult = update.value val (libraryJar, compilerJar) = if (bootstrapFromPublishedJars.value) { - val jars = update.value.select( + val jars = updateResult.select( configuration = configurationFilter(Configurations.ScalaTool.name), + module = moduleFilter(), artifact = artifactFilter(extension = "jar") ) (jars.find(_.getName.startsWith("dotty-library_2.12")).get, jars.find(_.getName.startsWith("dotty-compiler_2.12")).get) } else - ((packageBin in (`dotty-library`, Compile)).value, - (packageBin in (`dotty-compiler`, Compile)).value) + ((packageBin in (`dotty-library`, Compile)).value: @sbtUnchecked, + (packageBin in (`dotty-compiler`, Compile)).value: @sbtUnchecked) // All compiler dependencies except the library val otherDependencies = (dependencyClasspath in (`dotty-compiler`, Compile)).value .filterNot(_.get(artifact.key).exists(_.name == "dotty-library")) .map(_.data) - val loader = state.value.classLoaderCache(libraryJar :: compilerJar :: otherDependencies.toList) - new ScalaInstance(scalaVersion.value, loader, libraryJar, compilerJar, otherDependencies, None) + val allJars = libraryJar :: compilerJar :: otherDependencies.toList + val classLoader = state.value.classLoaderCache(allJars) + new sbt.internal.inc.ScalaInstance(scalaVersion.value, + classLoader, libraryJar, compilerJar, allJars.toArray, None) } ) @@ -450,8 +455,8 @@ object Build { }.taskValue, // get libraries onboard - libraryDependencies ++= Seq("org.scala-sbt" % "compiler-interface" % sbtVersion.value, - ("org.scala-lang.modules" %% "scala-xml" % "1.0.6").withDottyCompat(), + libraryDependencies ++= Seq("org.scala-sbt" % "compiler-interface" % "1.0.0-X16", + ("org.scala-lang.modules" % "scala-xml_2.12" % "1.0.6"), "com.novocode" % "junit-interface" % "0.11" % "test", "org.scala-lang" % "scala-library" % scalacVersion % "test"), @@ -482,7 +487,6 @@ object Build { // Override run to be able to run compiled classfiles dotr := { val args: Seq[String] = spaceDelimited("").parsed - val java: String = Process("which" :: "java" :: Nil).!! val attList = (dependencyClasspath in Runtime).value val _ = packageAll.value val scalaLib = attList @@ -490,13 +494,18 @@ object Build { .find(_.contains("scala-library")) .toList.mkString(":") - if (java == "") - println("Couldn't find java executable on path, please install java to a default location") - else if (scalaLib == "") { + if (scalaLib == "") { println("Couldn't find scala-library on classpath, please run using script in bin dir instead") } else { val dottyLib = packageAll.value("dotty-library") - s"""$java -classpath .:$dottyLib:$scalaLib ${args.mkString(" ")}""".! + val exitCode = new java.lang.ProcessBuilder("java", "-classpath", s""".:$dottyLib:$scalaLib ${args.mkString(" ")}""") + .inheritIO() + .start() + .waitFor() + if (exitCode != 0) + throw new FeedbackProvidedException { + override def toString = "dotr failed" + } } }, run := Def.inputTaskDyn { @@ -702,9 +711,9 @@ object Build { resolvers += Resolver.typesafeIvyRepo("releases"), // For org.scala-sbt:api libraryDependencies ++= Seq( "org.scala-sbt" % "compiler-interface" % "1.0.0-X16", - "org.scala-sbt" % "api" % sbtVersion.value % "test", - ("org.specs2" %% "specs2-core" % "3.9.1" % "test").withDottyCompat(), - ("org.specs2" %% "specs2-junit" % "3.9.1" % "test").withDottyCompat() + "org.scala-sbt" % "zinc-apiinfo_2.12" % "1.0.0-X16" % "test", + ("org.specs2" %% "specs2-core" % "3.9.1" % "test"),//.withDottyCompat() + ("org.specs2" %% "specs2-junit" % "3.9.1" % "test")//.withDottyCompat() ), // The sources should be published with crossPaths := false since they // need to be compiled by the project using the bridge. @@ -833,8 +842,13 @@ object Build { lazy val `sbt-dotty` = project.in(file("sbt-dotty")). settings(commonSettings). settings( + scalaVersion := "2.12.2", // Keep in sync with inject-sbt-dotty.sbt - libraryDependencies += Dependencies.`jackson-databind`, + libraryDependencies ++= Seq( + Dependencies.`jackson-databind`, + "org.scala-sbt" % "compiler-interface" % "1.0.0-X16", + // "org.scala-sbt" %% "scripted-plugin" % sbtVersion.value + ), unmanagedSourceDirectories in Compile += baseDirectory.value / "../language-server/src/dotty/tools/languageserver/config", @@ -873,7 +887,7 @@ object Build { val coursier = baseDirectory.value / "out/coursier" val packageJson = baseDirectory.value / "package.json" if (!coursier.exists || packageJson.lastModified > coursier.lastModified) - runProcess(Seq("npm", "run", "update-all"), wait = true, directory = baseDirectory.value) + runProcess(Seq("npm", "run", "update-all"), wait = true, directory = baseDirectory.value: @sbtUnchecked) val tsc = baseDirectory.value / "node_modules" / ".bin" / "tsc" runProcess(Seq(tsc.getAbsolutePath, "--pretty", "--project", baseDirectory.value.getAbsolutePath), wait = true) @@ -882,7 +896,7 @@ object Build { // (--extensionDevelopmentPath=...) runProcess(codeCommand.value ++ Seq("--install-extension", "daltonjorge.scala"), wait = true) - sbt.inc.Analysis.Empty + sbt.internal.inc.Analysis.Empty }, sbt.Keys.`package`:= { runProcess(Seq("vsce", "package"), wait = true, directory = baseDirectory.value) @@ -1161,5 +1175,4 @@ object Build { case BootstrappedOptimised => commonOptimisedSettings }) } - } diff --git a/project/inject-sbt-dotty.sbt b/project/inject-sbt-dotty.sbt index 5ccc77fc2783..d63d6da5d1d4 100644 --- a/project/inject-sbt-dotty.sbt +++ b/project/inject-sbt-dotty.sbt @@ -5,6 +5,9 @@ unmanagedSourceDirectories in Compile += baseDirectory.value / "../sbt-dotty/src" // Keep in sync with `sbt-dotty` config in Build.scala -libraryDependencies += Dependencies.`jackson-databind` +libraryDependencies ++= Seq( + Dependencies.`jackson-databind`, + "org.scala-sbt" % "compiler-interface" % "1.0.0-X16" +) unmanagedSourceDirectories in Compile += baseDirectory.value / "../language-server/src/dotty/tools/languageserver/config" diff --git a/project/scripted.sbt b/project/scripted.sbt index a7d7ecccf2a9..c3b5976d568e 100644 --- a/project/scripted.sbt +++ b/project/scripted.sbt @@ -1,2 +1,4 @@ // Used by the subproject dotty-bridge -libraryDependencies += "org.scala-sbt" %% "scripted-plugin" % sbtVersion.value +// libraryDependencies += "org.scala-sbt" %% "scripted-plugin" % sbtVersion.value +// val sbtV = sbtVersion.value +//addSbtPlugin("org.scala-sbt" %% "scripted-plugin" % "1.0.0-M6") diff --git a/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala b/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala index 04b87f8004a4..f413efb5520d 100644 --- a/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala +++ b/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala @@ -2,7 +2,9 @@ package dotty.tools.sbtplugin import sbt._ import sbt.Keys._ -import sbt.inc.{ ClassfileManager, IncOptions } +// import sbt.inc.{ ClassfileManager, IncOptions } +import xsbti.compile._ +import java.util.Optional object DottyPlugin extends AutoPlugin { object autoImport { @@ -49,44 +51,38 @@ object DottyPlugin extends AutoPlugin { nightly } - implicit class DottyCompatModuleID(moduleID: ModuleID) { - /** If this ModuleID cross-version is a Dotty version, replace it - * by the Scala 2.x version that the Dotty version is retro-compatible with, - * otherwise do nothing. - * - * This setting is useful when your build contains dependencies that have only - * been published with Scala 2.x, if you have: - * {{{ - * libraryDependencies += "a" %% "b" % "c" - * }}} - * you can replace it by: - * {{{ - * libraryDependencies += ("a" %% "b" % "c").withDottyCompat() - * }}} - * This will have no effect when compiling with Scala 2.x, but when compiling - * with Dotty this will change the cross-version to a Scala 2.x one. This - * works because Dotty is currently retro-compatible with Scala 2.x. - * - * NOTE: Dotty's retro-compatibility with Scala 2.x will be dropped before - * Dotty is released, you should not rely on it. - */ - def withDottyCompat(): ModuleID = - moduleID.crossVersion match { - case _: CrossVersion.Binary => - moduleID.cross(CrossVersion.binaryMapped { version => - CrossVersion.partialVersion(version) match { - case Some((0, minor)) => - // Dotty v0.4 or greater is compatible with 2.12.x - if (minor >= 4) "2.12" - else "2.11" - case _ => - version - } - }) - case _ => - moduleID - } - } + // implicit class DottyCompatModuleID(moduleID: ModuleID) { + // /** If this ModuleID cross-version is a Dotty version, replace it + // * by the Scala 2.x version that the Dotty version is retro-compatible with, + // * otherwise do nothing. + // * + // * This setting is useful when your build contains dependencies that have only + // * been published with Scala 2.x, if you have: + // * {{{ + // * libraryDependencies += "a" %% "b" % "c" + // * }}} + // * you can replace it by: + // * {{{ + // * libraryDependencies += ("a" %% "b" % "c").withDottyCompat() + // * }}} + // * This will have no effect when compiling with Scala 2.x, but when compiling + // * with Dotty this will change the cross-version to a Scala 2.x one. This + // * works because Dotty is currently retro-compatible with Scala 2.x. + // * + // * NOTE: Dotty's retro-compatibility with Scala 2.x will be dropped before + // * Dotty is released, you should not rely on it. + // */ + // def withDottyCompat(): ModuleID = + // moduleID.crossVersion match { + // case _: librarymanagement.Binary => + // moduleID.cross(CrossVersion.binaryMapped { + // case version if version.startsWith("0.") => "2.11" + // case version => version + // }) + // case _ => + // moduleID + // } + // } } import autoImport._ @@ -108,6 +104,27 @@ object DottyPlugin extends AutoPlugin { } } + // Copy-pasted from sbt where it's private + private case class WrappedClassFileManager(internal: ClassFileManager, + external: Option[ClassFileManager]) + extends ClassFileManager { + + override def delete(classes: Array[File]): Unit = { + external.foreach(_.delete(classes)) + internal.delete(classes) + } + + override def complete(success: Boolean): Unit = { + external.foreach(_.complete(success)) + internal.complete(success) + } + + override def generated(classes: Array[File]): Unit = { + external.foreach(_.generated(classes)) + internal.generated(classes) + } + } + /** Patches the IncOptions so that .tasty and .hasTasty files are pruned as needed. * * This code is adapted from `scalaJSPatchIncOptions` in Scala.js, which needs @@ -119,25 +136,34 @@ object DottyPlugin extends AutoPlugin { * corresponding .tasty or .hasTasty file is also deleted. */ def dottyPatchIncOptions(incOptions: IncOptions): IncOptions = { - val inheritedNewClassfileManager = incOptions.newClassfileManager - val newClassfileManager = () => new ClassfileManager { - private[this] val inherited = inheritedNewClassfileManager() + val inheritedNewClassFileManager = ClassFileManagerUtil.getDefaultClassFileManager(incOptions) + val tastyFileManager = new ClassFileManager { + private[this] val inherited = inheritedNewClassFileManager - def delete(classes: Iterable[File]): Unit = { + def delete(classes: Array[File]): Unit = { val tastySuffixes = List(".tasty", ".hasTasty") inherited.delete(classes flatMap { classFile => - val dottyFiles = if (classFile.getPath endsWith ".class") { + if (classFile.getPath endsWith ".class") { val prefix = classFile.getAbsolutePath.stripSuffix(".class") tastySuffixes.map(suffix => new File(prefix + suffix)).filter(_.exists) } else Nil - classFile :: dottyFiles }) } - def generated(classes: Iterable[File]): Unit = inherited.generated(classes) - def complete(success: Boolean): Unit = inherited.complete(success) + def generated(classes: Array[File]): Unit = {} + def complete(success: Boolean): Unit = {} + } + val inheritedHooks = incOptions.externalHooks + val hooks = new ExternalHooks { + override def externalClassFileManager() = Option(inheritedHooks.externalClassFileManager.orElse(null)) match { + case Some(prevManager) => + Optional.of(WrappedClassFileManager(prevManager, Some(tastyFileManager))) + case None => + Optional.of(tastyFileManager) + } + override def externalLookup() = inheritedHooks.externalLookup() } - incOptions.withNewClassfileManager(newClassfileManager) + incOptions.withExternalHooks(hooks) } override def projectSettings: Seq[Setting[_]] = { @@ -161,10 +187,11 @@ object DottyPlugin extends AutoPlugin { }, incOptions in Compile := { + val inc = (incOptions in Compile).value if (isDotty.value) - dottyPatchIncOptions((incOptions in Compile).value) + dottyPatchIncOptions(inc) else - (incOptions in Compile).value + inc }, scalaBinaryVersion := { From c91171da3abf2d95b5aee318ed6de5f46a4a64ac Mon Sep 17 00:00:00 2001 From: Thierry Treyer Date: Wed, 10 May 2017 17:51:12 +0200 Subject: [PATCH 03/49] Fix sbt bridge First draft at fixing the sbt bridge to match the Zinc 1.0 API. --- .../dotty/tools/backend/jvm/GenBCode.scala | 4 +- .../tools/dotc/core/SymDenotations.scala | 9 ++ .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 121 ++++++++++-------- .../tools/dotc/sbt/ExtractDependencies.scala | 38 ++++-- .../src/dotty/tools/dotc/sbt/ShowAPI.scala | 26 ++-- sbt-bridge/src/xsbt/CompilerInterface.scala | 3 +- sbt-bridge/src/xsbt/DelegatingReporter.scala | 33 ++--- sbt-bridge/src/xsbt/ScaladocInterface.scala | 13 +- 8 files changed, 144 insertions(+), 103 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index ad84bde680d1..93a273af1e9b 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -407,7 +407,9 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter if (ctx.compilerCallback != null) ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(outFile), className) if (ctx.sbtCallback != null) - ctx.sbtCallback.generatedClass(sourceFile.jfile.orElse(null), outFile.file, className) + // ctx.sbtCallback.generatedClass(sourceFile.jfile.orElse(null), outFile.file, className) + // TODO: Check + ctx.sbtCallback.generatedNonLocalClass(sourceFile.jfile.orElse(null), outFile.file, jclassName, className) } catch { case e: FileConflictException => diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 145913261053..8f53fa063740 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -11,6 +11,7 @@ import collection.BitSet import dotty.tools.io.AbstractFile import Decorators.SymbolIteratorDecorator import ast._ +import ast.Trees._ import annotation.tailrec import CheckRealizable._ import util.SimpleIdentityMap @@ -334,6 +335,14 @@ object SymDenotations { case Nil => Nil } + final def children(implicit ctx: Context): List[Symbol] = + this.annotations.filter(_.symbol == ctx.definitions.ChildAnnot).map { annot => + // refer to definition of Annotation.makeChild + annot.tree match { + case Apply(TypeApply(_, List(tpTree)), _) => tpTree.symbol + } + } + /** The denotation is completed: info is not a lazy type and attributes have defined values */ final def isCompleted: Boolean = !myInfo.isInstanceOf[LazyType] diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 532aef36b723..30401ab2a5c2 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -47,19 +47,19 @@ class ExtractAPI extends Phase { if ((ctx.sbtCallback != null || forceRun) && !unit.isJava) { val sourceFile = unit.source.file val apiTraverser = new ExtractAPICollector - val source = apiTraverser.apiSource(unit.tpdTree) + val sources = apiTraverser.apiSource(unit.tpdTree) if (dumpInc) { // Append to existing file that should have been created by ExtractDependencies val pw = new PrintWriter(Path(sourceFile.jpath).changeExtension("inc").toFile .bufferedWriter(append = true), true) try { - pw.println(DefaultShowAPI(source)) + sources.foreach(source => pw.println(DefaultShowAPI(source))) } finally pw.close() } if (ctx.sbtCallback != null) - ctx.sbtCallback.api(sourceFile.file, source) + sources.foreach(ctx.sbtCallback.api(sourceFile.file, _)) } } } @@ -113,7 +113,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder /** This cache is necessary for correctness, see the comment about inherited * members in `apiClassStructure` */ - private[this] val classLikeCache = new mutable.HashMap[ClassSymbol, api.ClassLike] + private[this] val classLikeCache = new mutable.HashMap[ClassSymbol, api.ClassLikeDef] /** This cache is optional, it avoids recomputing representations */ private[this] val typeCache = new mutable.HashMap[Type, api.Type] /** This cache is necessary to avoid unstable name hashing when `typeCache` is present, @@ -122,6 +122,8 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder */ private[this] val refinedTypeCache = new mutable.HashMap[(api.Type, api.Definition), api.Structure] + private[this] val allNonLocalClassesInSrc = new mutable.HashSet[xsbti.api.ClassLike] + private[this] object Constants { val emptyStringArray = Array[String]() val local = new api.ThisQualifier @@ -153,25 +155,25 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder /** Extract the API representation of a source file */ - def apiSource(tree: Tree): api.SourceAPI = { - val classes = new mutable.ListBuffer[api.ClassLike] + def apiSource(tree: Tree): Seq[api.ClassLike] = { def apiClasses(tree: Tree): Unit = tree match { case PackageDef(_, stats) => stats.foreach(apiClasses) case tree: TypeDef => - classes += apiClass(tree.symbol.asClass) + apiClass(tree.symbol.asClass) case _ => } apiClasses(tree) forceThunks() - new api.SourceAPI(Array(), classes.toArray) + + allNonLocalClassesInSrc.toSeq } - def apiClass(sym: ClassSymbol): api.ClassLike = + def apiClass(sym: ClassSymbol): api.ClassLikeDef = classLikeCache.getOrElseUpdate(sym, computeClass(sym)) - private def computeClass(sym: ClassSymbol): api.ClassLike = { + private def computeClass(sym: ClassSymbol): api.ClassLikeDef = { import xsbti.api.{DefinitionType => dt} val defType = if (sym.is(Trait)) dt.Trait @@ -184,14 +186,23 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val name = if (sym.is(ModuleClass)) sym.fullName.sourceModuleName else sym.fullName - val tparams = sym.typeParams.map(apiTypeParameter) + val tparams = sym.typeParams.map(apiTypeParameter).toArray val structure = apiClassStructure(sym) + val acc = apiAccess(sym) + val modifiers = apiModifiers(sym) + val anns = apiAnnotations(sym).toArray + // from SymDenotations.topLevelClass + val topLevel = (sym.isEffectiveRoot || (sym is PackageClass) || (sym.owner is PackageClass)) + val childrenOfSealedClass = sym.children.sorted(classFirstSort).map(c => apiType(c.info)).toArray + + val cl = new api.ClassLike( + name.toString, acc, modifiers, anns, defType, strict2lzy(selfType), strict2lzy(structure), Constants.emptyStringArray, + childrenOfSealedClass, topLevel, tparams) + + allNonLocalClassesInSrc += cl - new api.ClassLike( - defType, strict2lzy(selfType), strict2lzy(structure), Constants.emptyStringArray, - tparams.toArray, name.toString, apiAccess(sym), apiModifiers(sym), - apiAnnotations(sym).toArray) + new api.ClassLikeDef(name.toString, acc, modifiers, anns, tparams, defType); } private[this] val LegacyAppClass = ctx.requiredClass("dotty.runtime.LegacyApp") @@ -240,48 +251,48 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder info.baseClasses.tail.map(ref.baseType) } - def apiDefinitions(defs: List[Symbol]): List[api.Definition] = { - // The hash generated by sbt for definitions is supposed to be symmetric so - // we shouldn't have to sort them, but it actually isn't symmetric for - // definitions which are classes, therefore we need to sort classes to - // ensure a stable hash. - // Modules and classes come first and are sorted by name, all other - // definitions come later and are not sorted. - object classFirstSort extends Ordering[Symbol] { - override def compare(a: Symbol, b: Symbol) = { - val aIsClass = a.isClass - val bIsClass = b.isClass - if (aIsClass == bIsClass) { - if (aIsClass) { - if (a.is(Module) == b.is(Module)) - a.fullName.toString.compareTo(b.fullName.toString) - else if (a.is(Module)) - -1 - else - 1 - } else - 0 - } else if (aIsClass) - -1 - else - 1 - } + // The hash generated by sbt for definitions is supposed to be symmetric so + // we shouldn't have to sort them, but it actually isn't symmetric for + // definitions which are classes, therefore we need to sort classes to + // ensure a stable hash. + // Modules and classes come first and are sorted by name, all other + // definitions come later and are not sorted. + private object classFirstSort extends Ordering[Symbol] { + override def compare(a: Symbol, b: Symbol) = { + val aIsClass = a.isClass + val bIsClass = b.isClass + if (aIsClass == bIsClass) { + if (aIsClass) { + if (a.is(Module) == b.is(Module)) + a.fullName.toString.compareTo(b.fullName.toString) + else if (a.is(Module)) + -1 + else + 1 + } else + 0 + } else if (aIsClass) + -1 + else + 1 } + } + def apiDefinitions(defs: List[Symbol]): List[api.ClassDefinition] = { defs.sorted(classFirstSort).map(apiDefinition) } - def apiDefinition(sym: Symbol): api.Definition = { + def apiDefinition(sym: Symbol): api.ClassDefinition = { if (sym.isClass) { apiClass(sym.asClass) } else if (sym.isType) { apiTypeMember(sym.asType) } else if (sym.is(Mutable, butNot = Accessor)) { - new api.Var(apiType(sym.info), sym.name.toString, - apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray) + new api.Var(sym.name.toString, apiAccess(sym), apiModifiers(sym), + apiAnnotations(sym).toArray, apiType(sym.info)) } else if (sym.isStable) { - new api.Val(apiType(sym.info), sym.name.toString, - apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray) + new api.Val(sym.name.toString, apiAccess(sym), apiModifiers(sym), + apiAnnotations(sym).toArray, apiType(sym.info)) } else { apiDef(sym.asTerm) } @@ -324,8 +335,8 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val vparamss = paramLists(sym.info) val retTp = sym.info.finalResultType.widenExpr - new api.Def(vparamss.toArray, apiType(retTp), tparams.toArray, - sym.name.toString, apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray) + new api.Def(sym.name.toString, apiAccess(sym), apiModifiers(sym), + apiAnnotations(sym).toArray, tparams.toArray, vparamss.toArray, apiType(retTp)) } def apiTypeMember(sym: TypeSymbol): api.TypeMember = { @@ -337,10 +348,10 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val tpe = sym.info if (sym.isAliasType) - new api.TypeAlias(apiType(tpe.bounds.hi), typeParams, name, access, modifiers, as.toArray) + new api.TypeAlias(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.hi)) else { assert(sym.isAbstractType) - new api.TypeDeclaration(apiType(tpe.bounds.lo), apiType(tpe.bounds.hi), typeParams, name, access, modifiers, as.to) + new api.TypeDeclaration(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.lo), apiType(tpe.bounds.hi)) } } @@ -405,11 +416,11 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder def typeRefinement(name: String, tp: TypeBounds): api.TypeMember = tp match { case TypeAlias(alias) => - new api.TypeAlias(apiType(alias), - Array(), name, Constants.public, Constants.emptyModifiers, Array()) + new api.TypeAlias(name, + Constants.public, Constants.emptyModifiers, Array(), Array(), apiType(alias)) case TypeBounds(lo, hi) => - new api.TypeDeclaration(apiType(lo), apiType(hi), - Array(), name, Constants.public, Constants.emptyModifiers, Array()) + new api.TypeDeclaration(name, + Constants.public, Constants.emptyModifiers, Array(), Array(), apiType(lo), apiType(hi)) } val decl = rt.refinedInfo match { case rinfo: TypeBounds => @@ -441,7 +452,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder // `apiFoo == apiBar` always imply `apiFoo eq apiBar`. This is what // `refinedTypeCache` is for. refinedTypeCache.getOrElseUpdate((parent, decl), { - val adecl: Array[api.Definition] = if (decl == null) Array() else Array(decl) + val adecl: Array[api.ClassDefinition] = if (decl == null) Array() else Array(decl) new api.Structure(strict2lzy(Array(parent)), strict2lzy(adecl), strict2lzy(Array())) }) case tp: RecType => diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 32b9827e8535..cdec3e4b3366 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -13,7 +13,7 @@ import java.io.File import java.util.{Arrays, Comparator} -import xsbti.DependencyContext +import xsbti.api.DependencyContext /** This phase sends information on classes' dependencies to sbt via callbacks. * @@ -68,8 +68,8 @@ class ExtractDependencies extends Phase { } if (ctx.sbtCallback != null) { - extractDeps.usedNames.foreach(name => - ctx.sbtCallback.usedName(sourceFile.file, name.toString)) + extractDeps.usedNames.foreach{ case (enclosingName, names) => + names.foreach(name => ctx.sbtCallback.usedName(enclosingName.toString, name.toString)) } extractDeps.topLevelDependencies.foreach(dep => recordDependency(sourceFile.file, dep, DependencyContext.DependencyByMemberRef)) extractDeps.topLevelInheritanceDependencies.foreach(dep => @@ -93,7 +93,7 @@ class ExtractDependencies extends Phase { def className(classSegments: List[String]) = classSegments.mkString(".").stripSuffix(".class") def binaryDependency(file: File, className: String) = - ctx.sbtCallback.binaryDependency(file, className, currentSourceFile, context) + ctx.sbtCallback.binaryDependency(file, className, currentClass.fullName.toString, currentSourceFile, context) depFile match { case ze: ZipArchive#Entry => @@ -112,7 +112,7 @@ class ExtractDependencies extends Phase { ctx.warning(s"sbt-deps: Ignoring dependency $depFile of class ${depFile.getClass}") } } else if (depFile.file != currentSourceFile) { - ctx.sbtCallback.sourceDependency(depFile.file, currentSourceFile, context) + ctx.sbtCallback.classDependency(dep.enclosingClass.fullName.toString, currentClass.fullName.toString, context) } } } @@ -129,14 +129,14 @@ class ExtractDependencies extends Phase { private class ExtractDependenciesCollector(implicit val ctx: Context) extends tpd.TreeTraverser { import tpd._ - private[this] val _usedNames = new mutable.HashSet[Name] + private[this] val _usedNames = new mutable.HashMap[Name, mutable.Set[Name]].withDefault(_ => new mutable.HashSet[Name]) private[this] val _topLevelDependencies = new mutable.HashSet[Symbol] private[this] val _topLevelInheritanceDependencies = new mutable.HashSet[Symbol] /** The names used in this class, this does not include names which are only * defined and not referenced. */ - def usedNames: Set[Name] = _usedNames + def usedNames: collection.Map[Name, Set[Name]] = _usedNames /** The set of top-level classes that the compilation unit depends on * because it refers to these classes or something defined in them. @@ -149,15 +149,29 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp */ def topLevelInheritanceDependencies: Set[Symbol] = _topLevelInheritanceDependencies - private def addUsedName(name: Name) = - _usedNames += name + private def addUsedName(enclosingName: Name, name: Name) = + _usedNames(enclosingName) += name - private def addDependency(sym: Symbol): Unit = + private def addDependency(sym: Symbol)(implicit ctx: Context): Unit = if (!ignoreDependency(sym)) { val tlClass = sym.topLevelClass if (tlClass.ne(NoSymbol)) // Some synthetic type aliases like AnyRef do not belong to any class _topLevelDependencies += sym.topLevelClass - addUsedName(sym.name) + addUsedName(nonLocalEnclosingClass(sym).fullName, sym.name) + } + + private def isLocal(sym: Symbol)(implicit ctx: Context): Boolean = { + val owner = sym.maybeOwner + owner.isTerm || + owner.is(Trait) && isLocal(owner) || + sym.isConstructor && isLocal(owner) + } + + private def nonLocalEnclosingClass(sym: Symbol)(implicit ctx: Context): Symbol = + sym.enclosingClass match { + case NoSymbol => NoSymbol + case sym: Symbol if (isLocal(sym)) => sym + case sym: Symbol => nonLocalEnclosingClass(sym.owner) } private def ignoreDependency(sym: Symbol) = @@ -188,7 +202,7 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp case Thicket(Ident(name) :: Ident(rename) :: Nil) => addImported(name) if (rename ne nme.WILDCARD) - addUsedName(rename) + addUsedName(nonLocalEnclosingClass(ctx.owner).fullName, rename) case _ => } case Inlined(call, _, _) => diff --git a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala index 0e6b19867950..35bdc8594066 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala @@ -18,17 +18,17 @@ object DefaultShowAPI { def apply(d: Definition) = ShowAPI.showDefinition(d)(defaultNesting) def apply(d: Type) = ShowAPI.showType(d)(defaultNesting) - def apply(a: SourceAPI) = ShowAPI.showApi(a)(defaultNesting) + def apply(a: ClassLike) = ShowAPI.showApi(a)(defaultNesting) } object ShowAPI { private lazy val numDecls = Try { java.lang.Integer.parseInt(sys.props.get("sbt.inc.apidiff.decls").get) } getOrElse 0 - private def truncateDecls(decls: Array[Definition]): Array[Definition] = if (numDecls <= 0) decls else decls.take(numDecls) + private def truncateDecls(decls: Array[ClassDefinition]): Array[ClassDefinition] = if (numDecls <= 0) decls else decls.take(numDecls) private def lines(ls: Seq[String]): String = ls.mkString("\n", "\n", "\n") - def showApi(a: SourceAPI)(implicit nesting: Int) = - a.packages.map(pkg => "package " + pkg.name).mkString("\n") + lines(truncateDecls(a.definitions).map(showDefinition)) + def showApi(c: ClassLike)(implicit nesting: Int) = + showDefinition(c) def showDefinition(d: Definition)(implicit nesting: Int): String = d match { case v: Val => showMonoDef(v, "val") + ": " + showType(v.tpe) @@ -36,7 +36,9 @@ object ShowAPI { case d: Def => showPolyDef(d, "def") + showValueParams(d.valueParameters) + ": " + showType(d.returnType) case ta: TypeAlias => showPolyDef(ta, "type") + " = " + showType(ta.tpe) case td: TypeDeclaration => showPolyDef(td, "type") + showBounds(td.lowerBound, td.upperBound) - case cl: ClassLike => showPolyDef(cl, showDefinitionType(cl.definitionType)) + " extends " + showTemplate(cl) + case cl: ClassLike => showMonoDef(d, showDefinitionType(cl.definitionType)) + + showTypeParameters(cl.typeParameters) + " extends " + showTemplate(cl) + case cl: ClassLikeDef => showPolyDef(cl, showDefinitionType(cl.definitionType)) } private def showTemplate(cl: ClassLike)(implicit nesting: Int) = @@ -61,14 +63,17 @@ object ShowAPI { case s: Structure => s.parents.map(showType).mkString(" with ") + ( if (nesting <= 0) "{ }" - else truncateDecls(s.declared).map(showNestedDefinition).mkString(" {", "\n", "}")) + else truncateDecls(s.declared).map(showNestedDefinition).mkString(" {", "\n", "}") + ) case e: Existential => showType(e.baseType) + ( if (nesting <= 0) " forSome { }" - else e.clause.map(t => "type " + showNestedTypeParameter(t)).mkString(" forSome { ", "; ", " }")) + else e.clause.map(t => "type " + showNestedTypeParameter(t)).mkString(" forSome { ", "; ", " }") + ) case p: Polymorphic => showType(p.baseType) + ( if (nesting <= 0) " [ ]" - else showNestedTypeParameters(p.parameters)) + else showNestedTypeParameters(p.parameters) + ) } private def showPath(p: Path): String = p.components.map(showPathComponent).mkString(".") @@ -104,9 +109,7 @@ object ShowAPI { private def showValueParams(ps: Seq[ParameterList])(implicit nesting: Int): String = ps.map(pl => pl.parameters.map(mp => - mp.name + ": " + showParameterModifier(showType(mp.tpe), mp.modifier) + (if (mp.hasDefault) "= ..." else "") - ).mkString(if (pl.isImplicit) "(implicit " else "(", ", ", ")") - ).mkString("") + mp.name + ": " + showParameterModifier(showType(mp.tpe), mp.modifier) + (if (mp.hasDefault) "= ..." else "")).mkString(if (pl.isImplicit) "(implicit " else "(", ", ", ")")).mkString("") private def showParameterModifier(base: String, pm: ParameterModifier): String = pm match { case ParameterModifier.Plain => base @@ -154,3 +157,4 @@ object ShowAPI { private def showNestedTypeParameters(tps: Seq[TypeParameter])(implicit nesting: Int) = showTypeParameters(tps)(nesting - 1) private def showNestedDefinition(d: Definition)(implicit nesting: Int) = showDefinition(d)(nesting - 1) } + diff --git a/sbt-bridge/src/xsbt/CompilerInterface.scala b/sbt-bridge/src/xsbt/CompilerInterface.scala index bf1488dad93b..b2418d70cd9d 100644 --- a/sbt-bridge/src/xsbt/CompilerInterface.scala +++ b/sbt-bridge/src/xsbt/CompilerInterface.scala @@ -3,8 +3,7 @@ */ package xsbt -import xsbti.{ AnalysisCallback, Logger, Problem, Reporter, Severity, DependencyContext } -import xsbti.api.SourceAPI +import xsbti.{ AnalysisCallback, Logger, Problem, Reporter, Severity } import xsbti.compile._ import Log.debug import java.io.File diff --git a/sbt-bridge/src/xsbt/DelegatingReporter.scala b/sbt-bridge/src/xsbt/DelegatingReporter.scala index ffc4792ecef0..0b096a2336e3 100644 --- a/sbt-bridge/src/xsbt/DelegatingReporter.scala +++ b/sbt-bridge/src/xsbt/DelegatingReporter.scala @@ -9,7 +9,8 @@ import reporting._ import reporting.diagnostic.MessageContainer import reporting.diagnostic.messages import core.Contexts._ -import xsbti.{Maybe, Position} +import xsbti.Position +import java.util.Optional final class DelegatingReporter(delegate: xsbti.Reporter) extends Reporter with UniqueMessagePositions @@ -32,13 +33,13 @@ final class DelegatingReporter(delegate: xsbti.Reporter) extends Reporter val pos = cont.pos val src = pos.source new Position { - val sourceFile: Maybe[java.io.File] = maybe(Option(src.file.file)) - val sourcePath: Maybe[String] = maybe(Option(src.file.path)) - val line: Maybe[Integer] = Maybe.just(pos.line) + val sourceFile: Optional[java.io.File] = maybe(Option(src.file.file)) + val sourcePath: Optional[String] = maybe(Option(src.file.path)) + val line: Optional[Integer] = Optional.of(pos.line) val lineContent: String = pos.lineContent.stripLineEnd - val offset: Maybe[Integer] = Maybe.just(pos.point) - val pointer: Maybe[Integer] = Maybe.just(pos.point - src.startOfLine(pos.point)) - val pointerSpace: Maybe[String] = Maybe.just( + val offset: Optional[Integer] = Optional.of(pos.point) + val pointer: Optional[Integer] = Optional.of(pos.point - src.startOfLine(pos.point)) + val pointerSpace: Optional[String] = Optional.of( ((lineContent: Seq[Char]).take(pointer.get).map { case '\t' => '\t'; case x => ' ' }).mkString ) } @@ -54,18 +55,18 @@ final class DelegatingReporter(delegate: xsbti.Reporter) extends Reporter delegate.log(position, sb.toString(), severity) } - private[this] def maybe[T](opt: Option[T]): Maybe[T] = opt match { - case None => Maybe.nothing[T] - case Some(s) => Maybe.just[T](s) + private[this] def maybe[T](opt: Option[T]): Optional[T] = opt match { + case None => Optional.empty[T] + case Some(s) => Optional.of[T](s) } private[this] val noPosition = new Position { - val line: Maybe[Integer] = Maybe.nothing[Integer] + val line: Optional[Integer] = Optional.empty[Integer] val lineContent: String = "" - val offset: Maybe[Integer] = Maybe.nothing[Integer] - val pointer: Maybe[Integer] = Maybe.nothing[Integer] - val pointerSpace: Maybe[String] = Maybe.nothing[String] - val sourceFile: Maybe[java.io.File] = Maybe.nothing[java.io.File] - val sourcePath: Maybe[String] = Maybe.nothing[String] + val offset: Optional[Integer] = Optional.empty[Integer] + val pointer: Optional[Integer] = Optional.empty[Integer] + val pointerSpace: Optional[String] = Optional.empty[String] + val sourceFile: Optional[java.io.File] = Optional.empty[java.io.File] + val sourcePath: Optional[String] = Optional.empty[String] } } diff --git a/sbt-bridge/src/xsbt/ScaladocInterface.scala b/sbt-bridge/src/xsbt/ScaladocInterface.scala index 1eae8374d28b..387b54e13673 100644 --- a/sbt-bridge/src/xsbt/ScaladocInterface.scala +++ b/sbt-bridge/src/xsbt/ScaladocInterface.scala @@ -5,6 +5,7 @@ package xsbt import xsbti.{ Logger, Severity } import java.net.URL +import java.util.Optional class ScaladocInterface { def run(args: Array[String], log: Logger, delegate: xsbti.Reporter) = @@ -20,13 +21,13 @@ class DottydocRunner(args: Array[String], log: Logger, delegate: xsbti.Reporter) ) private[this] val NoPosition = new xsbti.Position { - val line = xsbti.Maybe.nothing[Integer] + val line = Optional.empty[Integer] val lineContent = "" - val offset = xsbti.Maybe.nothing[Integer] - val sourcePath = xsbti.Maybe.nothing[String] - val sourceFile = xsbti.Maybe.nothing[java.io.File] - val pointer = xsbti.Maybe.nothing[Integer] - val pointerSpace = xsbti.Maybe.nothing[String] + val offset = Optional.empty[Integer] + val sourcePath = Optional.empty[String] + val sourceFile = Optional.empty[java.io.File] + val pointer = Optional.empty[Integer] + val pointerSpace = Optional.empty[String] } private def getStringSetting(name: String): Option[String] = From 024f5fbc1346de015b7a9030cbf50848d791c7ae Mon Sep 17 00:00:00 2001 From: Thierry Treyer Date: Wed, 10 May 2017 18:14:09 +0200 Subject: [PATCH 04/49] Add method SymDenotation.isTopLevelClass --- compiler/src/dotty/tools/dotc/core/SymDenotations.scala | 5 ++++- compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala | 3 +-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 8f53fa063740..386968e59943 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -928,13 +928,16 @@ object SymDenotations { */ final def topLevelClass(implicit ctx: Context): Symbol = { def topLevel(d: SymDenotation): Symbol = { - if (d.isEffectiveRoot || (d is PackageClass) || (d.owner is PackageClass)) d.symbol + if (d.isTopLevelClass) d.symbol else topLevel(d.owner) } val sym = topLevel(this) if (sym.isClass) sym else sym.moduleClass } + final def isTopLevelClass(implicit ctx: Context): Boolean = + this.isEffectiveRoot || (this is PackageClass) || (this.owner is PackageClass) + /** The package class containing this denotation */ final def enclosingPackageClass(implicit ctx: Context): Symbol = if (this is PackageClass) symbol else owner.enclosingPackageClass diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 30401ab2a5c2..2390931a93cf 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -192,8 +192,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val acc = apiAccess(sym) val modifiers = apiModifiers(sym) val anns = apiAnnotations(sym).toArray - // from SymDenotations.topLevelClass - val topLevel = (sym.isEffectiveRoot || (sym is PackageClass) || (sym.owner is PackageClass)) + val topLevel = sym.isTopLevelClass val childrenOfSealedClass = sym.children.sorted(classFirstSort).map(c => apiType(c.info)).toArray val cl = new api.ClassLike( From e1018d1ffac893ec54c5ac97844f0d286df01178 Mon Sep 17 00:00:00 2001 From: Thierry Treyer Date: Mon, 15 May 2017 11:50:28 +0200 Subject: [PATCH 05/49] Remove SimpleType After sbt/zinc#101, SimpleType simply doesn't exist anymore. --- compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 2390931a93cf..608e05e9ec9d 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -386,7 +386,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder sym.owner.thisType else tp.prefix - new api.Projection(simpleType(prefix), sym.name.toString) + new api.Projection(apiType(prefix), sym.name.toString) case AppliedType(tycon, args) => def processArg(arg: Type): api.Type = arg match { case arg @ TypeBounds(lo, hi) => // Handle wildcard parameters @@ -402,7 +402,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder apiType(arg) } - val apiTycon = simpleType(tycon) + val apiTycon = apiType(tycon) val apiArgs = args.map(processArg) new api.Parameterized(apiTycon, apiArgs.toArray) case tl: TypeLambda => @@ -498,15 +498,6 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder } } - // TODO: Get rid of this method. See https://github.com/sbt/zinc/issues/101 - def simpleType(tp: Type): api.SimpleType = apiType(tp) match { - case tp: api.SimpleType => - tp - case _ => - ctx.debuglog("sbt-api: Not a simple type: " + tp.show) - Constants.emptyType - } - def apiLazy(tp: => Type): api.Type = { // TODO: The sbt api needs a convenient way to make a lazy type. // For now, we repurpose Structure for this. From 7865186c4b8413f6b603ff23eb729153bd02fb21 Mon Sep 17 00:00:00 2001 From: Thierry Treyer Date: Sun, 21 May 2017 21:40:05 +0200 Subject: [PATCH 06/49] Update bridge for compatibility with Zinc 1.0.0-X15 --- .../tools/dotc/sbt/ExtractDependencies.scala | 83 +++++++++++++++++-- 1 file changed, 74 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index cdec3e4b3366..f30871b7faa0 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -11,9 +11,11 @@ import scala.collection.{Set, mutable} import dotty.tools.io.{AbstractFile, Path, ZipArchive, PlainFile} import java.io.File -import java.util.{Arrays, Comparator} +import java.util.{Arrays, Comparator, EnumSet} import xsbti.api.DependencyContext +import xsbti.UseScope + /** This phase sends information on classes' dependencies to sbt via callbacks. * @@ -68,8 +70,22 @@ class ExtractDependencies extends Phase { } if (ctx.sbtCallback != null) { - extractDeps.usedNames.foreach{ case (enclosingName, names) => - names.foreach(name => ctx.sbtCallback.usedName(enclosingName.toString, name.toString)) } + extractDeps.usedNames.foreach{ + case (rawClassName, usedNames) => + val className = rawClassName.toString.trim + usedNames.defaultNames.foreach { rawUsedName => + val useName = rawUsedName.decode.toString.trim + val useScopes = + usedNames.scopedNames.get(rawUsedName) match { + case None => EnumSet.of(UseScope.Default) + case Some(existingScopes) => + existingScopes.add(UseScope.Default) + existingScopes + } + + ctx.sbtCallback.usedName(className, useName, useScopes) + } + } extractDeps.topLevelDependencies.foreach(dep => recordDependency(sourceFile.file, dep, DependencyContext.DependencyByMemberRef)) extractDeps.topLevelInheritanceDependencies.foreach(dep => @@ -118,6 +134,33 @@ class ExtractDependencies extends Phase { } } +private final class NameUsedInClass { + // Default names and other scopes are separated for performance reasons + val defaultNames: mutable.Set[Name] = new mutable.HashSet[Name] + val scopedNames: mutable.Map[Name, EnumSet[UseScope]] = new mutable.HashMap[Name, EnumSet[UseScope]].withDefault(_ => EnumSet.noneOf(classOf[UseScope])) + + // We have to leave with commas on ends + override def toString(): String = { + val builder = new StringBuilder(": ") + defaultNames.foreach { name => + builder.append(name.toString.trim) + val otherScopes = scopedNames.get(name) + scopedNames.get(name) match { + case None => + case Some(otherScopes) => + builder.append(" in [") + otherScopes.forEach(new java.util.function.Consumer[UseScope]() { + def accept(scope: UseScope): Unit = + builder.append(scope.name()).append(", ") + }) + builder.append("]") + } + builder.append(", ") + } + builder.toString() + } +} + /** Extract the dependency information of a compilation unit. * * To understand why we track the used names see the section "Name hashing @@ -126,17 +169,17 @@ class ExtractDependencies extends Phase { * specially, see the subsection "Dependencies introduced by member reference and * inheritance" in the "Name hashing algorithm" section. */ -private class ExtractDependenciesCollector(implicit val ctx: Context) extends tpd.TreeTraverser { +private class ExtractDependenciesCollector(implicit val ctx: Context) extends tpd.TreeTraverser { thisTreeTraverser => import tpd._ - private[this] val _usedNames = new mutable.HashMap[Name, mutable.Set[Name]].withDefault(_ => new mutable.HashSet[Name]) + private[this] val _usedNames = new mutable.HashMap[Name, NameUsedInClass].withDefault(_ => new NameUsedInClass) private[this] val _topLevelDependencies = new mutable.HashSet[Symbol] private[this] val _topLevelInheritanceDependencies = new mutable.HashSet[Symbol] /** The names used in this class, this does not include names which are only * defined and not referenced. */ - def usedNames: collection.Map[Name, Set[Name]] = _usedNames + def usedNames: collection.Map[Name, NameUsedInClass] = _usedNames /** The set of top-level classes that the compilation unit depends on * because it refers to these classes or something defined in them. @@ -149,8 +192,12 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp */ def topLevelInheritanceDependencies: Set[Symbol] = _topLevelInheritanceDependencies - private def addUsedName(enclosingName: Name, name: Name) = - _usedNames(enclosingName) += name + private def addUsedName(enclosingName: Name, name: Name) = { + val nameUsed = _usedNames(enclosingName) + nameUsed.defaultNames += name + // TODO: Set correct scope + nameUsed.scopedNames(name).add(UseScope.Default) + } private def addDependency(sym: Symbol)(implicit ctx: Context): Unit = if (!ignoreDependency(sym)) { @@ -183,12 +230,25 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp private def addInheritanceDependency(sym: Symbol): Unit = _topLevelInheritanceDependencies += sym.topLevelClass + private object PatMatDependencyTraverser extends ExtractTypesCollector { + override protected def addDependency(symbol: Symbol): Unit = { + if (!ignoreDependency(symbol) && symbol.is(Sealed)) { + val nameUsed = _usedNames(nonLocalEnclosingClass(symbol).fullName) + + nameUsed.defaultNames += symbol.name + nameUsed.scopedNames(symbol.name).add(UseScope.PatMatTarget) + } + } + } + /** Traverse the tree of a source file and record the dependencies which * can be retrieved using `topLevelDependencies`, `topLevelInheritanceDependencies`, * and `usedNames` */ override def traverse(tree: Tree)(implicit ctx: Context): Unit = { tree match { + case v @ ValDef(_, tpt, _) if v.symbol.is(Case) && v.symbol.is(Synthetic) => + PatMatDependencyTraverser.traverse(tpt.tpe) case Import(expr, selectors) => def lookupImported(name: Name) = expr.tpe.member(name).symbol def addImported(name: Name) = { @@ -254,7 +314,7 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp * The tests in sbt `types-in-used-names-a`, `types-in-used-names-b`, * `as-seen-from-a` and `as-seen-from-b` rely on this. */ - private object usedTypeTraverser extends TypeTraverser { + private class ExtractTypesCollector extends TypeTraverser { val seen = new mutable.HashSet[Type] def traverse(tp: Type): Unit = if (!seen.contains(tp)) { seen += tp @@ -277,5 +337,10 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp traverseChildren(tp) } } + + protected def addDependency(symbol: Symbol): Unit = + thisTreeTraverser.addDependency(symbol) } + + private object usedTypeTraverser extends ExtractTypesCollector } From 3f8a49c6557d979450677d40bf153c31a761b212 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 6 Jun 2017 14:44:55 +0200 Subject: [PATCH 07/49] Fix for zinc 1.0.0-X16 --- sbt-bridge/src/xsbt/CompilerInterface.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sbt-bridge/src/xsbt/CompilerInterface.scala b/sbt-bridge/src/xsbt/CompilerInterface.scala index b2418d70cd9d..144dcbc6e17e 100644 --- a/sbt-bridge/src/xsbt/CompilerInterface.scala +++ b/sbt-bridge/src/xsbt/CompilerInterface.scala @@ -42,7 +42,7 @@ class CachedCompilerImpl(args: Array[String], output: Output, resident: Boolean) case multi: MultipleOutput => ??? case single: SingleOutput => - List("-d", single.outputDirectory.getAbsolutePath.toString) + List("-d", single.getOutputDirectory.getAbsolutePath.toString) } def commandArguments(sources: Array[File]): Array[String] = From 848e41bed8c1b5243f571da83f2ff4aeac06c307 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 6 Jun 2017 15:03:55 +0200 Subject: [PATCH 08/49] ExtractAPI: fix children of sealed classes --- compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 608e05e9ec9d..b93d16bc73b7 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -193,7 +193,12 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val modifiers = apiModifiers(sym) val anns = apiAnnotations(sym).toArray val topLevel = sym.isTopLevelClass - val childrenOfSealedClass = sym.children.sorted(classFirstSort).map(c => apiType(c.info)).toArray + val childrenOfSealedClass = sym.children.sorted(classFirstSort).map(c => + if (c.isClass) + apiType(c.typeRef) + else + apiType(c.valRef) + ).toArray val cl = new api.ClassLike( name.toString, acc, modifiers, anns, defType, strict2lzy(selfType), strict2lzy(structure), Constants.emptyStringArray, From bfd2b0f59fac913e37e66844d912e49d5112af6e Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 6 Jun 2017 15:37:54 +0200 Subject: [PATCH 09/49] ExtractAPI: add missing startSource callback --- compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala | 3 +++ 1 file changed, 3 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index b93d16bc73b7..8ebd41e3d9c7 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -46,6 +46,9 @@ class ExtractAPI extends Phase { val forceRun = dumpInc || ctx.settings.YforceSbtPhases.value if ((ctx.sbtCallback != null || forceRun) && !unit.isJava) { val sourceFile = unit.source.file + if (ctx.sbtCallback != null) + ctx.sbtCallback.startSource(sourceFile) + val apiTraverser = new ExtractAPICollector val sources = apiTraverser.apiSource(unit.tpdTree) From d3f50a4e6af5f30368a799760fc6d450babb6441 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 6 Jun 2017 21:21:52 +0200 Subject: [PATCH 10/49] Fix various issues --- .../dotty/tools/backend/jvm/GenBCode.scala | 28 +++++++-- .../src/dotty/tools/dotc/core/Names.scala | 3 +- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 17 +++++- .../tools/dotc/sbt/ExtractDependencies.scala | 57 ++++++++++++------- 4 files changed, 76 insertions(+), 29 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 93a273af1e9b..ca8c5f0807fc 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -17,6 +17,7 @@ import java.util.Optional import scala.reflect.ClassTag import dotty.tools.dotc.core._ +import dotty.tools.dotc.sbt.ExtractDependencies import Periods._ import SymDenotations._ import Contexts._ @@ -134,6 +135,8 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter private val poison3 = Item3(Int.MaxValue, null, null, null, null) private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator) + private val srcClassNames = new mutable.HashMap[String, String] + /* * Pipeline that takes ClassDefs from queue-1, lowers them into an intermediate form, placing them on queue-2 */ @@ -230,6 +233,15 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter // ----------- hand over to pipeline-2 + val srcClassName = ctx.atPhase(ctx.typerPhase) { implicit ctx => + ExtractDependencies.extractedName(claszSymbol) + } + for (cls <- List(mirrorC, plainC, beanC)) { + if (cls != null) { + srcClassNames += (cls.name -> srcClassName) + } + } + val item2 = Item2(arrivalPos, mirrorC, plainC, beanC, @@ -403,13 +415,21 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter else getFileForClassfile(outFolder, jclassName, ".class") bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, outFile) - val className = jclassName.replace('/', '.') + val srcClassName = srcClassNames(jclassName) + if (ctx.compilerCallback != null) - ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(outFile), className) - if (ctx.sbtCallback != null) + ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(outFile), srcClassName) + if (ctx.sbtCallback != null) { // ctx.sbtCallback.generatedClass(sourceFile.jfile.orElse(null), outFile.file, className) // TODO: Check - ctx.sbtCallback.generatedNonLocalClass(sourceFile.jfile.orElse(null), outFile.file, jclassName, className) + val isLocal = srcClassName.contains("_$") + if (isLocal) + ctx.sbtCallback.generatedLocalClass(sourceFile.jfile.orElse(null), outFile.file) + else { + ctx.sbtCallback.generatedNonLocalClass(sourceFile.jfile.orElse(null), outFile.file, + jclassName, srcClassName) + } + } } catch { case e: FileConflictException => diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index af92bbb4ecdb..62f0e38860eb 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -424,7 +424,8 @@ object Names { "dotty$tools$dotc$core$NameOps$NameDecorator$$functionArityFor$extension", "dotty$tools$dotc$typer$Checking$CheckNonCyclicMap$$apply", "$plus$plus", - "readConstant") + "readConstant", + "extractedName") .contains(elem.getMethodName)) } diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 8ebd41e3d9c7..32325b43425c 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -112,6 +112,7 @@ class ExtractAPI extends Phase { private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder { import tpd._ import xsbti.api + import ExtractDependencies.extractedName /** This cache is necessary for correctness, see the comment about inherited * members in `apiClassStructure` @@ -187,7 +188,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val selfType = apiType(sym.givenSelfType) - val name = if (sym.is(ModuleClass)) sym.fullName.sourceModuleName else sym.fullName + val name = extractedName(sym) val tparams = sym.typeParams.map(apiTypeParameter).toArray @@ -204,12 +205,22 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder ).toArray val cl = new api.ClassLike( - name.toString, acc, modifiers, anns, defType, strict2lzy(selfType), strict2lzy(structure), Constants.emptyStringArray, + name, acc, modifiers, anns, defType, strict2lzy(selfType), strict2lzy(structure), Constants.emptyStringArray, childrenOfSealedClass, topLevel, tparams) + // if (name.toString.contains("DottyPredef")) { + // println("sym: " + sym) + // println("name: " + name) + // ctx.atPhase(ctx.flattenPhase.next) { implicit ctx => + // println("flatten: " + sym.fullName.toString) + // println("flattenm: " + sym.fullName.mangledString) + // } + // println("flattenx: " + toDenot(sym.binaryName.toString) + // } + allNonLocalClassesInSrc += cl - new api.ClassLikeDef(name.toString, acc, modifiers, anns, tparams, defType); + new api.ClassLikeDef(name, acc, modifiers, anns, tparams, defType) } private[this] val LegacyAppClass = ctx.requiredClass("dotty.runtime.LegacyApp") diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index f30871b7faa0..10954569302d 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -37,6 +37,8 @@ import xsbti.UseScope * @see ExtractAPI */ class ExtractDependencies extends Phase { + import ExtractDependencies._ + override def phaseName: String = "sbt-deps" // This phase should be run directly after `Frontend`, if it is run after @@ -69,12 +71,13 @@ class ExtractDependencies extends Phase { } finally pw.close() } + // println("extractDeps.usedNames: " + extractDeps.usedNames) if (ctx.sbtCallback != null) { extractDeps.usedNames.foreach{ case (rawClassName, usedNames) => - val className = rawClassName.toString.trim + val className = rawClassName.toString usedNames.defaultNames.foreach { rawUsedName => - val useName = rawUsedName.decode.toString.trim + val useName = rawUsedName.toString val useScopes = usedNames.scopedNames.get(rawUsedName) match { case None => EnumSet.of(UseScope.Default) @@ -109,7 +112,7 @@ class ExtractDependencies extends Phase { def className(classSegments: List[String]) = classSegments.mkString(".").stripSuffix(".class") def binaryDependency(file: File, className: String) = - ctx.sbtCallback.binaryDependency(file, className, currentClass.fullName.toString, currentSourceFile, context) + ctx.sbtCallback.binaryDependency(file, className, extractedName(currentClass), currentSourceFile, context) depFile match { case ze: ZipArchive#Entry => @@ -128,12 +131,22 @@ class ExtractDependencies extends Phase { ctx.warning(s"sbt-deps: Ignoring dependency $depFile of class ${depFile.getClass}") } } else if (depFile.file != currentSourceFile) { - ctx.sbtCallback.classDependency(dep.enclosingClass.fullName.toString, currentClass.fullName.toString, context) + ctx.sbtCallback.classDependency(extractedName(dep.enclosingClass), extractedName(currentClass), context) } } } } +object ExtractDependencies { + def extractedName(sym: Symbol)(implicit ctx: Context): String = + // ctx.atPhase(ctx.flattenPhase.next) { implicit ctx => + if (sym.is(ModuleClass)) + sym.fullName.stripModuleClassSuffix.toString + else + sym.fullName.toString + // } +} + private final class NameUsedInClass { // Default names and other scopes are separated for performance reasons val defaultNames: mutable.Set[Name] = new mutable.HashSet[Name] @@ -171,15 +184,16 @@ private final class NameUsedInClass { */ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tpd.TreeTraverser { thisTreeTraverser => import tpd._ + import ExtractDependencies._ - private[this] val _usedNames = new mutable.HashMap[Name, NameUsedInClass].withDefault(_ => new NameUsedInClass) + private[this] val _usedNames = new mutable.HashMap[String, NameUsedInClass] private[this] val _topLevelDependencies = new mutable.HashSet[Symbol] private[this] val _topLevelInheritanceDependencies = new mutable.HashSet[Symbol] /** The names used in this class, this does not include names which are only * defined and not referenced. */ - def usedNames: collection.Map[Name, NameUsedInClass] = _usedNames + def usedNames: collection.Map[String, NameUsedInClass] = _usedNames /** The set of top-level classes that the compilation unit depends on * because it refers to these classes or something defined in them. @@ -192,8 +206,9 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp */ def topLevelInheritanceDependencies: Set[Symbol] = _topLevelInheritanceDependencies - private def addUsedName(enclosingName: Name, name: Name) = { - val nameUsed = _usedNames(enclosingName) + private def addUsedName(enclosingSym: Symbol, name: Name) = { + val enclosingName = extractedName(enclosingSym) + val nameUsed = _usedNames.getOrElseUpdate(enclosingName, new NameUsedInClass) nameUsed.defaultNames += name // TODO: Set correct scope nameUsed.scopedNames(name).add(UseScope.Default) @@ -204,21 +219,20 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp val tlClass = sym.topLevelClass if (tlClass.ne(NoSymbol)) // Some synthetic type aliases like AnyRef do not belong to any class _topLevelDependencies += sym.topLevelClass - addUsedName(nonLocalEnclosingClass(sym).fullName, sym.name) + addUsedName(nonLocalEnclosingClass(ctx.owner), sym.name) } - private def isLocal(sym: Symbol)(implicit ctx: Context): Boolean = { - val owner = sym.maybeOwner - owner.isTerm || - owner.is(Trait) && isLocal(owner) || - sym.isConstructor && isLocal(owner) - } + private def isLocal(sym: Symbol)(implicit ctx: Context): Boolean = + sym.ownersIterator.exists(_.isTerm) private def nonLocalEnclosingClass(sym: Symbol)(implicit ctx: Context): Symbol = sym.enclosingClass match { case NoSymbol => NoSymbol - case sym: Symbol if (isLocal(sym)) => sym - case sym: Symbol => nonLocalEnclosingClass(sym.owner) + case csym => + if (isLocal(csym)) + nonLocalEnclosingClass(csym.owner) + else + csym } private def ignoreDependency(sym: Symbol) = @@ -231,9 +245,10 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp _topLevelInheritanceDependencies += sym.topLevelClass private object PatMatDependencyTraverser extends ExtractTypesCollector { - override protected def addDependency(symbol: Symbol): Unit = { + override protected def addDependency(symbol: Symbol)(implicit ctx: Context): Unit = { if (!ignoreDependency(symbol) && symbol.is(Sealed)) { - val nameUsed = _usedNames(nonLocalEnclosingClass(symbol).fullName) + val encName = nonLocalEnclosingClass(ctx.owner).fullName.stripModuleClassSuffix.mangledString + val nameUsed = _usedNames.getOrElseUpdate(encName, new NameUsedInClass) nameUsed.defaultNames += symbol.name nameUsed.scopedNames(symbol.name).add(UseScope.PatMatTarget) @@ -262,7 +277,7 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp case Thicket(Ident(name) :: Ident(rename) :: Nil) => addImported(name) if (rename ne nme.WILDCARD) - addUsedName(nonLocalEnclosingClass(ctx.owner).fullName, rename) + addUsedName(nonLocalEnclosingClass(ctx.owner), rename) case _ => } case Inlined(call, _, _) => @@ -338,7 +353,7 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp } } - protected def addDependency(symbol: Symbol): Unit = + protected def addDependency(symbol: Symbol)(implicit ctx: Context): Unit = thisTreeTraverser.addDependency(symbol) } From f02dacba553b0f914686976eb5bf3ad30e345551 Mon Sep 17 00:00:00 2001 From: Thierry Treyer Date: Wed, 7 Jun 2017 19:33:09 +0200 Subject: [PATCH 11/49] Propagate class files through GenBCode pipeline Some of sbt's callbacks need the full name of the class that can only be accessed during the first step of the GenBCode pipeline. So we call the callbacks in the first step, generating at the same time the class files, and keep those files through the whole pipeline so the last step can write the bytecode in them. --- .../dotty/tools/backend/jvm/GenBCode.scala | 124 ++++++++++-------- 1 file changed, 67 insertions(+), 57 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index ca8c5f0807fc..426e6a03a2e0 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -92,15 +92,17 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter /* ---------------- q2 ---------------- */ - case class Item2(arrivalPos: Int, - mirror: asm.tree.ClassNode, - plain: asm.tree.ClassNode, - bean: asm.tree.ClassNode, - outFolder: scala.tools.nsc.io.AbstractFile) { + case class SubItem2(classNode: asm.tree.ClassNode, + file: scala.tools.nsc.io.AbstractFile) + + case class Item2(arrivalPos: Int, + mirror: SubItem2, + plain: SubItem2, + bean: SubItem2) { def isPoison = { arrivalPos == Int.MaxValue } } - private val poison2 = Item2(Int.MaxValue, null, null, null, null) + private val poison2 = Item2(Int.MaxValue, null, null, null) private val q2 = new _root_.java.util.LinkedList[Item2] /* ---------------- q3 ---------------- */ @@ -114,14 +116,14 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter */ case class SubItem3( jclassName: String, - jclassBytes: Array[Byte] + jclassBytes: Array[Byte], + jclassFile: scala.tools.nsc.io.AbstractFile ) case class Item3(arrivalPos: Int, mirror: SubItem3, plain: SubItem3, - bean: SubItem3, - outFolder: scala.tools.nsc.io.AbstractFile) { + bean: SubItem3) { def isPoison = { arrivalPos == Int.MaxValue } } @@ -132,11 +134,9 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter else 1 } } - private val poison3 = Item3(Int.MaxValue, null, null, null, null) + private val poison3 = Item3(Int.MaxValue, null, null, null) private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator) - private val srcClassNames = new mutable.HashMap[String, String] - /* * Pipeline that takes ClassDefs from queue-1, lowers them into an intermediate form, placing them on queue-2 */ @@ -231,21 +231,54 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter ) } else null - // ----------- hand over to pipeline-2 + // ----------- create files + + // @smarter try/catch around getFileForClassfile needed? + val mirrorFileC = + if (mirrorC != null && outF != null) + getFileForClassfile(outF, mirrorC.name, ".class") + else null + + val plainFileC = + if (outF == null) null + else getFileForClassfile(outF, plainC.name, ".class") + + val beanFileC = + if (beanC != null && outF != null) + getFileForClassfile(outF, beanC.name, ".class") + else null + + // ----------- sbt's callbacks - val srcClassName = ctx.atPhase(ctx.typerPhase) { implicit ctx => + val fullClassName = ctx.atPhase(ctx.typerPhase) { implicit ctx => ExtractDependencies.extractedName(claszSymbol) } - for (cls <- List(mirrorC, plainC, beanC)) { + val isLocal = fullClassName.contains("_$") + + for ((cls, clsFile) <- List((plainC, plainFileC), (mirrorC, mirrorFileC), (beanC, beanFileC))) { if (cls != null) { - srcClassNames += (cls.name -> srcClassName) + if (ctx.compilerCallback != null) + ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), fullClassName) + if (ctx.sbtCallback != null) { + // ctx.sbtCallback.generatedClass(sourceFile.jfile.orElse(null), clsFile.file, fullClassName) + // TODO: Check + if (isLocal) + ctx.sbtCallback.generatedLocalClass(sourceFile.jfile.orElse(null), clsFile.file) + else { + ctx.sbtCallback.generatedNonLocalClass(sourceFile.jfile.orElse(null), clsFile.file, + cls.name, fullClassName) + } + } } } + // ----------- hand over to pipeline-2 + val item2 = Item2(arrivalPos, - mirrorC, plainC, beanC, - outF) + SubItem2(mirrorC, mirrorFileC), + SubItem2(plainC, plainFileC), + SubItem2(beanC, beanFileC)) q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done. @@ -275,12 +308,12 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter } else { try { - localOptimizations(item.plain) + localOptimizations(item.plain.classNode) addToQ3(item) } catch { case ex: Throwable => ex.printStackTrace() - ctx.error(s"Error while emitting ${item.plain.name}\n${ex.getMessage}") + ctx.error(s"Error while emitting ${item.plain.classNode.name}\n${ex.getMessage}") } } } @@ -294,11 +327,14 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter cw.toByteArray } - val Item2(arrivalPos, mirror, plain, bean, outFolder) = item + val Item2(arrivalPos, + SubItem2(mirror, mirrorFile), + SubItem2(plain, plainFile), + SubItem2(bean, beanFile)) = item - val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror)) - val plainC = SubItem3(plain.name, getByteArray(plain)) - val beanC = if (bean == null) null else SubItem3(bean.name, getByteArray(bean)) + val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror), mirrorFile) + val plainC = SubItem3(plain.name, getByteArray(plain), plainFile) + val beanC = if (bean == null) null else SubItem3(bean.name, getByteArray(bean), beanFile) if (AsmUtils.traceSerializedClassEnabled && plain.name.contains(AsmUtils.traceSerializedClassPattern)) { if (mirrorC != null) AsmUtils.traceClass(mirrorC.jclassBytes) @@ -306,7 +342,7 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter if (beanC != null) AsmUtils.traceClass(beanC.jclassBytes) } - q3 add Item3(arrivalPos, mirrorC, plainC, beanC, outFolder) + q3 add Item3(arrivalPos, mirrorC, plainC, beanC) } @@ -406,35 +442,10 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter /* Pipeline that writes classfile representations to disk. */ private def drainQ3() = { - def sendToDisk(cfr: SubItem3, outFolder: scala.tools.nsc.io.AbstractFile): Unit = { + def sendToDisk(cfr: SubItem3): Unit = { if (cfr != null){ - val SubItem3(jclassName, jclassBytes) = cfr - try { - val outFile = - if (outFolder == null) null - else getFileForClassfile(outFolder, jclassName, ".class") - bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, outFile) - - val srcClassName = srcClassNames(jclassName) - - if (ctx.compilerCallback != null) - ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(outFile), srcClassName) - if (ctx.sbtCallback != null) { - // ctx.sbtCallback.generatedClass(sourceFile.jfile.orElse(null), outFile.file, className) - // TODO: Check - val isLocal = srcClassName.contains("_$") - if (isLocal) - ctx.sbtCallback.generatedLocalClass(sourceFile.jfile.orElse(null), outFile.file) - else { - ctx.sbtCallback.generatedNonLocalClass(sourceFile.jfile.orElse(null), outFile.file, - jclassName, srcClassName) - } - } - } - catch { - case e: FileConflictException => - ctx.error(s"error writing $jclassName: ${e.getMessage}") - } + val SubItem3(jclassName, jclassBytes, jclassFile) = cfr + bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, jclassFile) } } @@ -447,10 +458,9 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter moreComing = !incoming.isPoison if (moreComing) { val item = incoming - val outFolder = item.outFolder - sendToDisk(item.mirror, outFolder) - sendToDisk(item.plain, outFolder) - sendToDisk(item.bean, outFolder) + sendToDisk(item.mirror) + sendToDisk(item.plain) + sendToDisk(item.bean) expected += 1 } } From d7dba97d93da8f0f597fa43a7c8697ab06a467b7 Mon Sep 17 00:00:00 2001 From: Thierry Treyer Date: Wed, 7 Jun 2017 22:17:14 +0200 Subject: [PATCH 12/49] Catch FileConflictException creating class files A FileConflictException may occure when creating a class file. Add check for this error and refactor a bit of code around it. --- .../dotty/tools/backend/jvm/GenBCode.scala | 34 +++++++++---------- 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 426e6a03a2e0..40841c7a51b2 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -233,20 +233,18 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter // ----------- create files - // @smarter try/catch around getFileForClassfile needed? - val mirrorFileC = - if (mirrorC != null && outF != null) - getFileForClassfile(outF, mirrorC.name, ".class") - else null - - val plainFileC = - if (outF == null) null - else getFileForClassfile(outF, plainC.name, ".class") - - val beanFileC = - if (beanC != null && outF != null) - getFileForClassfile(outF, beanC.name, ".class") - else null + val classNodes = List(mirrorC, plainC, beanC) + val classFiles = classNodes.map(cls => + if (outF != null && cls != null) { + try { + getFileForClassfile(outF, cls.name, ".class") + } catch { + case e: FileConflictException => + ctx.error(s"error writing ${cls.name}: ${e.getMessage}") + null + } + } else null + ) // ----------- sbt's callbacks @@ -255,7 +253,7 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter } val isLocal = fullClassName.contains("_$") - for ((cls, clsFile) <- List((plainC, plainFileC), (mirrorC, mirrorFileC), (beanC, beanFileC))) { + for ((cls, clsFile) <- classNodes.zip(classFiles)) { if (cls != null) { if (ctx.compilerCallback != null) ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), fullClassName) @@ -276,9 +274,9 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter val item2 = Item2(arrivalPos, - SubItem2(mirrorC, mirrorFileC), - SubItem2(plainC, plainFileC), - SubItem2(beanC, beanFileC)) + SubItem2(mirrorC, classFiles(0)), + SubItem2(plainC, classFiles(1)), + SubItem2(beanC, classFiles(2))) q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done. From fb9c61189a1b036596f4e6f8c817d8b589adb41a Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Thu, 12 Oct 2017 17:32:11 +0200 Subject: [PATCH 13/49] Adapt to latest Zinc --- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 88 ++++---- .../tools/dotc/sbt/ExtractDependencies.scala | 178 ++++++++++----- .../dotty/tools/dotc/sbt/ThunkHolder.scala | 91 +------- .../tools/dotc/transform/patmat/Space.scala | 2 +- project/Build.scala | 96 ++++---- project/VersionUtil.scala | 2 +- project/build.properties | 2 +- project/plugins.sbt | 14 +- project/scripted.sbt | 4 +- sbt-bridge/src/xsbt/CompilerInterface.scala | 14 +- sbt-bridge/src/xsbt/DelegatingReporter.scala | 10 +- sbt-bridge/src/xsbt/Message.scala | 4 +- sbt-bridge/src/xsbt/Problem.scala | 12 + sbt-bridge/src/xsbt/ScaladocInterface.scala | 4 +- .../test/xsbt/DependencySpecification.scala | 136 +++++------ .../test/xsbt/ExtractAPISpecification.scala | 41 ++-- .../xsbt/ExtractUsedNamesSpecification.scala | 211 ++++++++++-------- .../xsbt/ScalaCompilerForUnitTesting.scala | 112 +++++----- sbt-bridge/test/xsbti/TestCallback.scala | 111 ++++++--- .../tools/sbtplugin/DottyIDEPlugin.scala | 8 +- .../dotty/tools/sbtplugin/DottyPlugin.scala | 102 ++++----- 21 files changed, 657 insertions(+), 585 deletions(-) create mode 100644 sbt-bridge/src/xsbt/Problem.scala diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 32325b43425c..a8e8c2a93e83 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -47,7 +47,7 @@ class ExtractAPI extends Phase { if ((ctx.sbtCallback != null || forceRun) && !unit.isJava) { val sourceFile = unit.source.file if (ctx.sbtCallback != null) - ctx.sbtCallback.startSource(sourceFile) + ctx.sbtCallback.startSource(sourceFile.file) val apiTraverser = new ExtractAPICollector val sources = apiTraverser.apiSource(unit.tpdTree) @@ -130,13 +130,13 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder private[this] object Constants { val emptyStringArray = Array[String]() - val local = new api.ThisQualifier - val public = new api.Public - val privateLocal = new api.Private(local) - val protectedLocal = new api.Protected(local) - val unqualified = new api.Unqualified - val thisPath = new api.This - val emptyType = new api.EmptyType + val local = api.ThisQualifier.create() + val public = api.Public.create() + val privateLocal = api.Private.create(local) + val protectedLocal = api.Protected.create(local) + val unqualified = api.Unqualified.create() + val thisPath = api.This.create() + val emptyType = api.EmptyType.create() val emptyModifiers = new api.Modifiers(false, false, false, false, false,false, false, false) } @@ -150,9 +150,9 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder * @param marker A special annotation to differentiate our type */ private def withMarker(tp: api.Type, marker: api.Annotation) = - new api.Annotated(tp, Array(marker)) + api.Annotated.of(tp, Array(marker)) private def marker(name: String) = - new api.Annotation(new api.Constant(Constants.emptyType, name), Array()) + api.Annotation.of(api.Constant.of(Constants.emptyType, name), Array()) val typeArgRefMarker = marker("TypeArgRef") val orMarker = marker("Or") val byNameMarker = marker("ByName") @@ -201,11 +201,11 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder if (c.isClass) apiType(c.typeRef) else - apiType(c.valRef) + apiType(c.termRef) ).toArray - val cl = new api.ClassLike( - name, acc, modifiers, anns, defType, strict2lzy(selfType), strict2lzy(structure), Constants.emptyStringArray, + val cl = api.ClassLike.of( + name, acc, modifiers, anns, defType, api.SafeLazy.strict(selfType), api.SafeLazy.strict(structure), Constants.emptyStringArray, childrenOfSealedClass, topLevel, tparams) // if (name.toString.contains("DottyPredef")) { @@ -220,7 +220,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder allNonLocalClassesInSrc += cl - new api.ClassLikeDef(name, acc, modifiers, anns, tparams, defType) + api.ClassLikeDef.of(name, acc, modifiers, anns, tparams, defType) } private[this] val LegacyAppClass = ctx.requiredClass("dotty.runtime.LegacyApp") @@ -260,7 +260,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder // this works because of `classLikeCache` val apiInherited = lzy(apiDefinitions(inherited).toArray) - new api.Structure(strict2lzy(apiBases.toArray), strict2lzy(apiDecls.toArray), apiInherited) + api.Structure.of(api.SafeLazy.strict(apiBases.toArray), api.SafeLazy.strict(apiDecls.toArray), apiInherited) } def linearizedAncestorTypes(info: ClassInfo): List[Type] = { @@ -306,10 +306,10 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder } else if (sym.isType) { apiTypeMember(sym.asType) } else if (sym.is(Mutable, butNot = Accessor)) { - new api.Var(sym.name.toString, apiAccess(sym), apiModifiers(sym), + api.Var.of(sym.name.toString, apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray, apiType(sym.info)) } else if (sym.isStable) { - new api.Val(sym.name.toString, apiAccess(sym), apiModifiers(sym), + api.Val.of(sym.name.toString, apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray, apiType(sym.info)) } else { apiDef(sym.asTerm) @@ -336,9 +336,9 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder } else (0 until pnames.length).map(Function.const(false)) val params = (pnames, ptypes, defaults).zipped.map((pname, ptype, isDefault) => - new api.MethodParameter(pname.toString, apiType(ptype), + api.MethodParameter.of(pname.toString, apiType(ptype), isDefault, api.ParameterModifier.Plain)) - new api.ParameterList(params.toArray, mt.isImplicitMethod) :: paramLists(restpe, params.length) + api.ParameterList.of(params.toArray, mt.isImplicitMethod) :: paramLists(restpe, params.length) case _ => Nil } @@ -353,7 +353,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val vparamss = paramLists(sym.info) val retTp = sym.info.finalResultType.widenExpr - new api.Def(sym.name.toString, apiAccess(sym), apiModifiers(sym), + api.Def.of(sym.name.toString, apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray, tparams.toArray, vparamss.toArray, apiType(retTp)) } @@ -366,17 +366,17 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val tpe = sym.info if (sym.isAliasType) - new api.TypeAlias(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.hi)) + api.TypeAlias.of(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.hi)) else { assert(sym.isAbstractType) - new api.TypeDeclaration(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.lo), apiType(tpe.bounds.hi)) + api.TypeDeclaration.of(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.lo), apiType(tpe.bounds.hi)) } } // Hack to represent dotty types which don't have an equivalent in xsbti def combineApiTypes(apiTps: api.Type*): api.Type = { - new api.Structure(strict2lzy(apiTps.toArray), - strict2lzy(Array()), strict2lzy(Array())) + api.Structure.of(api.SafeLazy.strict(apiTps.toArray), + api.SafeLazy.strict(Array()), api.SafeLazy.strict(Array())) } def apiType(tp: Type): api.Type = { @@ -405,7 +405,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder sym.owner.thisType else tp.prefix - new api.Projection(apiType(prefix), sym.name.toString) + api.Projection.of(apiType(prefix), sym.name.toString) case AppliedType(tycon, args) => def processArg(arg: Type): api.Type = arg match { case arg @ TypeBounds(lo, hi) => // Handle wildcard parameters @@ -413,8 +413,8 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder Constants.emptyType else { val name = "_" - val ref = new api.ParameterRef(name) - new api.Existential(ref, + val ref = api.ParameterRef.of(name) + api.Existential.of(ref, Array(apiTypeParameter(name, 0, lo, hi))) } case _ => @@ -423,21 +423,21 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val apiTycon = apiType(tycon) val apiArgs = args.map(processArg) - new api.Parameterized(apiTycon, apiArgs.toArray) + api.Parameterized.of(apiTycon, apiArgs.toArray) case tl: TypeLambda => val apiTparams = tl.typeParams.map(apiTypeParameter) val apiRes = apiType(tl.resType) - new api.Polymorphic(apiRes, apiTparams.toArray) + api.Polymorphic.of(apiRes, apiTparams.toArray) case rt: RefinedType => val name = rt.refinedName.toString val parent = apiType(rt.parent) def typeRefinement(name: String, tp: TypeBounds): api.TypeMember = tp match { case TypeAlias(alias) => - new api.TypeAlias(name, + api.TypeAlias.of(name, Constants.public, Constants.emptyModifiers, Array(), Array(), apiType(alias)) case TypeBounds(lo, hi) => - new api.TypeDeclaration(name, + api.TypeDeclaration.of(name, Constants.public, Constants.emptyModifiers, Array(), Array(), apiType(lo), apiType(hi)) } val decl = rt.refinedInfo match { @@ -471,7 +471,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder // `refinedTypeCache` is for. refinedTypeCache.getOrElseUpdate((parent, decl), { val adecl: Array[api.ClassDefinition] = if (decl == null) Array() else Array(decl) - new api.Structure(strict2lzy(Array(parent)), strict2lzy(adecl), strict2lzy(Array())) + api.Structure.of(api.SafeLazy.strict(Array(parent)), api.SafeLazy.strict(adecl), api.SafeLazy.strict(Array())) }) case tp: RecType => apiType(tp.parent) @@ -494,15 +494,15 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder case ExprType(resultType) => withMarker(apiType(resultType), byNameMarker) case ConstantType(constant) => - new api.Constant(apiType(constant.tpe), constant.stringValue) + api.Constant.of(apiType(constant.tpe), constant.stringValue) case AnnotatedType(tpe, annot) => - new api.Annotated(apiType(tpe), Array(apiAnnotation(annot))) + api.Annotated.of(apiType(tpe), Array(apiAnnotation(annot))) case tp: ThisType => apiThis(tp.cls) case tp: ParamRef => // TODO: Distinguishing parameters based on their names alone is not enough, // the binder is also needed (at least for type lambdas). - new api.ParameterRef(tp.paramName.toString) + api.ParameterRef.of(tp.paramName.toString) case tp: LazyRef => apiType(tp.ref) case tp: TypeVar => @@ -521,13 +521,13 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder // TODO: The sbt api needs a convenient way to make a lazy type. // For now, we repurpose Structure for this. val apiTp = lzy(Array(apiType(tp))) - new api.Structure(apiTp, strict2lzy(Array()), strict2lzy(Array())) + api.Structure.of(apiTp, api.SafeLazy.strict(Array()), api.SafeLazy.strict(Array())) } def apiThis(sym: Symbol): api.Singleton = { val pathComponents = sym.ownersIterator.takeWhile(!_.isEffectiveRoot) - .map(s => new api.Id(s.name.toString)) - new api.Singleton(new api.Path(pathComponents.toArray.reverse ++ Array(Constants.thisPath))) + .map(s => api.Id.of(s.name.toString)) + api.Singleton.of(api.Path.of(pathComponents.toArray.reverse ++ Array(Constants.thisPath))) } def apiTypeParameter(tparam: ParamInfo): api.TypeParameter = @@ -535,7 +535,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder tparam.paramInfo.bounds.lo, tparam.paramInfo.bounds.hi) def apiTypeParameter(name: String, variance: Int, lo: Type, hi: Type): api.TypeParameter = - new api.TypeParameter(name, Array(), Array(), apiVariance(variance), + api.TypeParameter.of(name, Array(), Array(), apiVariance(variance), apiType(lo), apiType(hi)) def apiVariance(v: Int): api.Variance = { @@ -559,11 +559,11 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder if (sym.privateWithin eq NoSymbol) Constants.unqualified else - new api.IdQualifier(sym.privateWithin.fullName.toString) + api.IdQualifier.of(sym.privateWithin.fullName.toString) if (sym.is(Protected)) - new api.Protected(qualifier) + api.Protected.of(qualifier) else - new api.Private(qualifier) + api.Private.of(qualifier) } } @@ -608,8 +608,8 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder // However, we still need to extract the annotation type in the way sbt expect // because sbt uses this information to find tests to run (for example // junit tests are annotated @org.junit.Test). - new api.Annotation( + api.Annotation.of( apiType(annot.tree.tpe), // Used by sbt to find tests to run - Array(new api.AnnotationArgument("FULLTREE", annot.tree.show.toString))) + Array(api.AnnotationArgument.of("FULLTREE", annot.tree.show.toString))) } } diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 10954569302d..5d80684b4edf 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -3,6 +3,7 @@ package sbt import ast.{Trees, tpd} import core._, core.Decorators._ +import util.NoSource.{file => NoSourceFile} import Contexts._, Flags._, Phases._, Trees._, Types._, Symbols._ import Names._, NameOps._, StdNames._ @@ -52,7 +53,16 @@ class ExtractDependencies extends Phase { val forceRun = dumpInc || ctx.settings.YforceSbtPhases.value if ((ctx.sbtCallback != null || forceRun) && !unit.isJava) { val sourceFile = unit.source.file - val extractDeps = new ExtractDependenciesCollector + val responsibleOfImports = firstClassOrModule(unit.tpdTree) match { + case None => + ctx.warning("""|No class, trait or object is defined in the compilation unit. + |The incremental compiler cannot record the dependency information in such case. + |Some errors like unused import referring to a non-existent class might not be reported. + |""".stripMargin, unit.tpdTree.pos) + defn.RootClass + case Some(sym) => sym + } + val extractDeps = new ExtractDependenciesCollector(responsibleOfImports) extractDeps.traverse(unit.tpdTree) if (dumpInc) { @@ -71,7 +81,6 @@ class ExtractDependencies extends Phase { } finally pw.close() } - // println("extractDeps.usedNames: " + extractDeps.usedNames) if (ctx.sbtCallback != null) { extractDeps.usedNames.foreach{ case (rawClassName, usedNames) => @@ -90,13 +99,59 @@ class ExtractDependencies extends Phase { } } extractDeps.topLevelDependencies.foreach(dep => - recordDependency(sourceFile.file, dep, DependencyContext.DependencyByMemberRef)) + recordDependency(sourceFile.file, dep._2, DependencyContext.DependencyByMemberRef)(ctx.withOwner(dep._1))) extractDeps.topLevelInheritanceDependencies.foreach(dep => - recordDependency(sourceFile.file, dep, DependencyContext.DependencyByInheritance)) + recordDependency(sourceFile.file, dep._2, DependencyContext.DependencyByInheritance)(ctx.withOwner(dep._1))) + } + } + } + + private def firstClassOrModule(tree: tpd.Tree)(implicit ctx: Context): Option[Symbol] = { + import tpd._ + val acc = new TreeAccumulator[Option[Symbol]] { + def apply(x: Option[Symbol], t: Tree)(implicit ctx: Context) = + if (x.isDefined) x + else t match { + case moduleDef: Thicket => + Some(moduleDef.symbol) + case typeDef: TypeDef => + Some(typeDef.symbol) + case other => + foldOver(x, other) + } + } + acc(None, tree) + } + + private def classFile(sym: Symbol)(implicit ctx: Context): Option[AbstractFile] = { + // package can never have a corresponding class file; this test does not + // catch package objects (that do not have this flag set) + if (sym.is(Package)) None + else { + val file = Option(sym.associatedFile) + + Option(sym.associatedFile).flatMap { + case NoSourceFile => + if (isTopLevelModule(sym)) { + val linked = sym.companionClass + if (linked == NoSymbol) + None + else + classFile(linked) + } else + None + case file => + Some(file) } } } + protected def isTopLevelModule(sym: Symbol)(implicit ctx: Context): Boolean = + // enteringPhase(currentRun.picklerPhase.next) { + sym.is(ModuleClass) && sym.owner.is(PackageClass) + // } + + /** Record that `currentSourceFile` depends on the file where `dep` was loaded from. * * @param currentSourceFile The source file of the current unit @@ -105,34 +160,41 @@ class ExtractDependencies extends Phase { */ def recordDependency(currentSourceFile: File, dep: Symbol, context: DependencyContext) (implicit ctx: Context) = { - val depFile = dep.associatedFile - if (depFile != null) { - if (depFile.path.endsWith(".class")) { - /** Transform `List(java, lang, String.class)` into `java.lang.String` */ - def className(classSegments: List[String]) = - classSegments.mkString(".").stripSuffix(".class") - def binaryDependency(file: File, className: String) = - ctx.sbtCallback.binaryDependency(file, className, extractedName(currentClass), currentSourceFile, context) - - depFile match { - case ze: ZipArchive#Entry => - for (zip <- ze.underlyingSource; zipFile <- Option(zip.file)) { - val classSegments = Path(ze.path).segments - binaryDependency(zipFile, className(classSegments)) - } - case pf: PlainFile => - val packages = dep.ownersIterator - .filter(x => x.is(PackageClass) && !x.isEffectiveRoot).length - // We can recover the fully qualified name of a classfile from - // its path - val classSegments = pf.givenPath.segments.takeRight(packages + 1) - binaryDependency(pf.file, className(classSegments)) - case _ => - ctx.warning(s"sbt-deps: Ignoring dependency $depFile of class ${depFile.getClass}") - } - } else if (depFile.file != currentSourceFile) { - ctx.sbtCallback.classDependency(extractedName(dep.enclosingClass), extractedName(currentClass), context) + val onSource = dep.sourceFile + if (onSource == null) { + // Dependency is external -- source is undefined + classFile(dep) match { + case Some(at) => + def className(classSegments: List[String]) = + classSegments.mkString(".").stripSuffix(".class") + def binaryDependency(file: File, className: String) = { + ctx.sbtCallback.binaryDependency(file, className, extractedName(currentClass), currentSourceFile, context) + } + + at match { + case ze: ZipArchive#Entry => + for (zip <- ze.underlyingSource; zipFile <- Option(zip.file)) { + val classSegments = Path(ze.path).segments + binaryDependency(zipFile, className(classSegments)) + } + case pf: PlainFile => + val packages = dep.ownersIterator + .filter(x => x.is(PackageClass) && !x.isEffectiveRoot).length + // We can recover the fully qualified name of a classfile from + // its path + val classSegments = pf.givenPath.segments.takeRight(packages + 1) + binaryDependency(pf.file, className(classSegments)) + case _ => + ctx.warning(s"sbt-deps: Ignoring dependency $at of class ${at.getClass}") + } + + case None => + ctx.debuglog(s"No file for external symbol $dep") } + } else if (onSource.file != currentSourceFile) { + ctx.sbtCallback.classDependency(extractedName(dep.enclosingClass), extractedName(currentClass), context) + } else { + () } } } @@ -161,11 +223,13 @@ private final class NameUsedInClass { scopedNames.get(name) match { case None => case Some(otherScopes) => - builder.append(" in [") - otherScopes.forEach(new java.util.function.Consumer[UseScope]() { - def accept(scope: UseScope): Unit = + // Pickling tests fail when this is turned in an anonymous class + class Consumer extends java.util.function.Consumer[UseScope]() { + override def accept(scope: UseScope): Unit = builder.append(scope.name()).append(", ") - }) + } + builder.append(" in [") + otherScopes.forEach(new Consumer) builder.append("]") } builder.append(", ") @@ -182,13 +246,13 @@ private final class NameUsedInClass { * specially, see the subsection "Dependencies introduced by member reference and * inheritance" in the "Name hashing algorithm" section. */ -private class ExtractDependenciesCollector(implicit val ctx: Context) extends tpd.TreeTraverser { thisTreeTraverser => +private class ExtractDependenciesCollector(responsibleForImports: Symbol)(implicit val ctx: Context) extends tpd.TreeTraverser { thisTreeTraverser => import tpd._ import ExtractDependencies._ private[this] val _usedNames = new mutable.HashMap[String, NameUsedInClass] - private[this] val _topLevelDependencies = new mutable.HashSet[Symbol] - private[this] val _topLevelInheritanceDependencies = new mutable.HashSet[Symbol] + private[this] val _topLevelDependencies = new mutable.HashSet[(Symbol, Symbol)] + private[this] val _topLevelInheritanceDependencies = new mutable.HashSet[(Symbol, Symbol)] /** The names used in this class, this does not include names which are only * defined and not referenced. @@ -199,15 +263,18 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp * because it refers to these classes or something defined in them. * This is always a superset of `topLevelInheritanceDependencies` by definition. */ - def topLevelDependencies: Set[Symbol] = _topLevelDependencies + def topLevelDependencies: Set[(Symbol, Symbol)] = _topLevelDependencies /** The set of top-level classes that the compilation unit extends or that * contain a non-top-level class that the compilaion unit extends. */ - def topLevelInheritanceDependencies: Set[Symbol] = _topLevelInheritanceDependencies + def topLevelInheritanceDependencies: Set[(Symbol, Symbol)] = _topLevelInheritanceDependencies private def addUsedName(enclosingSym: Symbol, name: Name) = { - val enclosingName = extractedName(enclosingSym) + val enclosingName = enclosingSym match { + case sym if sym == defn.RootClass => ExtractDependencies.extractedName(responsibleForImports) + case sym => extractedName(sym) + } val nameUsed = _usedNames.getOrElseUpdate(enclosingName, new NameUsedInClass) nameUsed.defaultNames += name // TODO: Set correct scope @@ -217,8 +284,14 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp private def addDependency(sym: Symbol)(implicit ctx: Context): Unit = if (!ignoreDependency(sym)) { val tlClass = sym.topLevelClass - if (tlClass.ne(NoSymbol)) // Some synthetic type aliases like AnyRef do not belong to any class - _topLevelDependencies += sym.topLevelClass + if (tlClass.ne(NoSymbol)) { + if (currentClass == defn.RootClass) { + _topLevelDependencies += ((responsibleForImports, tlClass)) + } else { + // Some synthetic type aliases like AnyRef do not belong to any class + _topLevelDependencies += ((currentClass, tlClass)) + } + } addUsedName(nonLocalEnclosingClass(ctx.owner), sym.name) } @@ -241,10 +314,10 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp sym.isAnonymousFunction || sym.isAnonymousClass - private def addInheritanceDependency(sym: Symbol): Unit = - _topLevelInheritanceDependencies += sym.topLevelClass + private def addInheritanceDependency(sym: Symbol)(implicit ctx: Context): Unit = + _topLevelInheritanceDependencies += ((currentClass, sym.topLevelClass)) - private object PatMatDependencyTraverser extends ExtractTypesCollector { + private class PatMatDependencyTraverser(ctx0: Context) extends ExtractTypesCollector(ctx0) { override protected def addDependency(symbol: Symbol)(implicit ctx: Context): Unit = { if (!ignoreDependency(symbol) && symbol.is(Sealed)) { val encName = nonLocalEnclosingClass(ctx.owner).fullName.stripModuleClassSuffix.mangledString @@ -263,7 +336,7 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp override def traverse(tree: Tree)(implicit ctx: Context): Unit = { tree match { case v @ ValDef(_, tpt, _) if v.symbol.is(Case) && v.symbol.is(Synthetic) => - PatMatDependencyTraverser.traverse(tpt.tpe) + new PatMatDependencyTraverser(ctx).traverse(tpt.tpe) case Import(expr, selectors) => def lookupImported(name: Name) = expr.tpe.member(name).symbol def addImported(name: Name) = { @@ -276,8 +349,9 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp addImported(name) case Thicket(Ident(name) :: Ident(rename) :: Nil) => addImported(name) - if (rename ne nme.WILDCARD) + if (rename ne nme.WILDCARD) { addUsedName(nonLocalEnclosingClass(ctx.owner), rename) + } case _ => } case Inlined(call, _, _) => @@ -285,10 +359,10 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp // record it as a dependency traverse(call) case t: TypeTree => - usedTypeTraverser.traverse(t.tpe) + new usedTypeTraverser(ctx).traverse(t.tpe) case ref: RefTree => addDependency(ref.symbol) - usedTypeTraverser.traverse(ref.tpe) + new usedTypeTraverser(ctx).traverse(ref.tpe) case t @ Template(_, parents, _, _) => t.parents.foreach(p => addInheritanceDependency(p.tpe.classSymbol)) case _ => @@ -329,7 +403,7 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp * The tests in sbt `types-in-used-names-a`, `types-in-used-names-b`, * `as-seen-from-a` and `as-seen-from-b` rely on this. */ - private class ExtractTypesCollector extends TypeTraverser { + private class ExtractTypesCollector(ctx0: Context) extends TypeTraverser()(ctx0) { val seen = new mutable.HashSet[Type] def traverse(tp: Type): Unit = if (!seen.contains(tp)) { seen += tp @@ -357,5 +431,5 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp thisTreeTraverser.addDependency(symbol) } - private object usedTypeTraverser extends ExtractTypesCollector + private class usedTypeTraverser(ctx0: Context) extends ExtractTypesCollector(ctx0) } diff --git a/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala b/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala index 350819e3a3ce..2315c40955e4 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala @@ -25,97 +25,8 @@ private[sbt] trait ThunkHolder { * It will be forced by the next call to `forceThunks()` */ def lzy[T <: AnyRef](t: => T): api.Lazy[T] = { - val l = SafeLazyWrapper(() => t) + val l = api.SafeLazy.apply(() => t) thunks += l l } - - /** Store the parameter `s` in a `Lazy` container, since `s` is not by-name, there - * is nothing to force. - * - * TODO: Get rid of this method. It is only needed because some xsbti.api classes - * take lazy arguments when they could be strict, but this can be fixed in sbt, - * see https://github.com/sbt/zinc/issues/114 - */ - def strict2lzy[T <: AnyRef](t: T): api.Lazy[T] = - SafeLazyWrapper.strict(t) -} - -/** Wrapper around SafeLazy implementations. - * - * `xsbti.SafeLazy` is part of sbt but it is not part of the `interface` jar - * that dotty depends on, therefore we can only access it by reflection, - * and this will only succeed when dotty is run by sbt (otherwise - * `xsbti.SafeLazy` won't be on the classpath at all). - * - * For testing purposes, we still want to be able to run the sbt phases outside - * of sbt, using `-Yforce-sbt-phases` and `-Ydump-sbt-inc`, therefore we - * provide a copy of SafeLazy in `dotty.tools.dotc.sbt.SafeLazy` that we use - * when `xsbti.SafeLazy` is unavailable. - * - * This raises a question: why bother with `xsbti.SafeLazy` if we have our own - * version anyway? Because sbt uses Java serialization to persist the output of - * the incremental compilation analysis when sbt is stopped and restarted. If - * we used `dotty.tools.dotc.sbt.SafeLazy` with sbt, deserialization would fail - * and every restart of sbt would require a full recompilation. - * - * Note: this won't be needed once we switch to zinc 1.0 where `SafeLazy` becomes - * part of the `interface` jar, see https://github.com/sbt/zinc/issues/113 - */ -private object SafeLazyWrapper { - - @sharable private[this] val safeLazy = - try { - Class.forName("xsbti.SafeLazy") - } catch { - case e: ClassNotFoundException => - null - } - - @sharable private[this] val safeLazyApply = - if (safeLazy != null) - safeLazy.getMethod("apply", classOf[xsbti.F0[_]]) - else - null - @sharable private[this] val safeLazyStrict = - if (safeLazy != null) - safeLazy.getMethod("strict", classOf[Object]) - else - null - - def apply[T <: AnyRef](eval: () => T): xsbti.api.Lazy[T] = - if (safeLazyApply != null) - safeLazyApply - .invoke(null, new xsbti.F0[T] { def apply() = eval() }) - .asInstanceOf[xsbti.api.Lazy[T]] - else - SafeLazy(eval) - - def strict[T <: AnyRef](value: T): xsbti.api.Lazy[T] = - if (safeLazyStrict != null) - safeLazyStrict - .invoke(null, value) - .asInstanceOf[xsbti.api.Lazy[T]] - else - SafeLazy.strict(value) -} - -// Adapted from https://github.com/sbt/sbt/blob/0.13/compile/api/src/main/scala/xsbti/SafeLazy.scala -private object SafeLazy { - def apply[T <: AnyRef](eval: () => T): xsbti.api.Lazy[T] = - new Impl(eval) - - def strict[T <: AnyRef](value: T): xsbti.api.Lazy[T] = - new Strict(value) - - private[this] final class Impl[T <: AnyRef](private[this] var eval: () => T) extends xsbti.api.AbstractLazy[T] { - private[this] lazy val _t = { - val t = eval() - eval = null // clear the reference, ensuring the only memory we hold onto is the result - t - } - def get(): T = _t - } - - private[this] final class Strict[T <: AnyRef](val get: T) extends xsbti.api.Lazy[T] with java.io.Serializable } diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index ee17927a2598..cfbfea837365 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -501,7 +501,7 @@ class SpaceEngine(implicit ctx: Context) extends SpaceLogic { /** Decompose a type into subspaces -- assume the type can be decomposed */ def decompose(tp: Type): List[Space] = { - val children = tp.classSymbol.children + val children = tp.classSymbol.denot.children debug.println(s"candidates for ${tp.show} : [${children.map(_.show).mkString(", ")}]") diff --git a/project/Build.scala b/project/Build.scala index 2210837300ee..b2cf87ea902d 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -10,7 +10,6 @@ import scala.reflect.io.Path import sbt.Package.ManifestAttributes -// import sbt.ScriptedPlugin.autoImport._ import dotty.tools.sbtplugin.DottyPlugin.autoImport._ import dotty.tools.sbtplugin.DottyIDEPlugin.{ prepareCommand, runProcess } import dotty.tools.sbtplugin.DottyIDEPlugin.autoImport._ @@ -18,6 +17,16 @@ import dotty.tools.sbtplugin.DottyIDEPlugin.autoImport._ import pl.project13.scala.sbt.JmhPlugin import JmhPlugin.JmhKeys.Jmh +import sbt.ScriptedPlugin.autoImport._ + +import xerial.sbt.pack.PackPlugin.packSettings +import xerial.sbt.pack.PackPlugin.autoImport._ + +import org.scalajs.sbtplugin.ScalaJSPlugin +import ScalaJSPlugin.autoImport._ + +import com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseKeys + import Modes._ /* In sbt 0.13 the Build trait would expose all vals to the shell, where you @@ -129,7 +138,7 @@ object Build { resourceDirectory in Test := baseDirectory.value / "test-resources", // Prevent sbt from rewriting our dependencies - ivyScala ~= (_.map(_.withOverrideScalaVersion(false))) + scalaModuleInfo ~= (_.map(_.withOverrideScalaVersion(false))) ) // Settings used for projects compiled only with Scala 2 @@ -151,7 +160,7 @@ object Build { // Avoid having to run `dotty-sbt-bridge/publishLocal` before compiling a bootstrapped project scalaCompilerBridgeSource := - (dottyOrganization %% "dotty-sbt-bridge" % "NOT_PUBLISHED" % Configurations.Component.name) + (dottyOrganization %% "dotty-sbt-bridge" % "NOT_PUBLISHED") .artifacts(Artifact.sources("dotty-sbt-bridge").withUrl( // We cannot use the `packageSrc` task because a setting cannot depend // on a task. Instead, we make `compile` below depend on the bridge `packageSrc` @@ -190,7 +199,7 @@ object Build { Seq( dottyOrganization % "dotty-library_2.11" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name, dottyOrganization % "dotty-compiler_2.11" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name - )//.map(_.withDottyCompat()) + )//.map(_.withDottyCompat(scalaVersion.value)) else Seq() }, @@ -273,8 +282,8 @@ object Build { // - publishes its own empty artifact "dotty" that depends on "dotty-library" and "dotty-compiler", // this is only necessary for compatibility with sbt which currently hardcodes the "dotty" artifact name lazy val dotty = project.in(file(".")).asDottyRoot(NonBootstrapped) - lazy val `dotty-bootstrapped` = project.asDottyRoot(Bootstrapped) - lazy val `dotty-optimised` = project.asDottyRoot(BootstrappedOptimised) + lazy val `dotty-bootstrapped` = project.asDottyRoot(Bootstrapped).disablePlugins(ScriptedPlugin) + lazy val `dotty-optimised` = project.asDottyRoot(BootstrappedOptimised).disablePlugins(ScriptedPlugin) lazy val `dotty-interfaces` = project.in(file("interfaces")). settings(commonScala2Settings). // Java-only project, so this is fine @@ -352,8 +361,8 @@ object Build { ) lazy val `dotty-doc` = project.in(file("doc-tool")).asDottyDoc(NonBootstrapped) - lazy val `dotty-doc-bootstrapped` = project.in(file("doc-tool")).asDottyDoc(Bootstrapped) - lazy val `dotty-doc-optimised` = project.in(file("doc-tool")).asDottyDoc(BootstrappedOptimised) + lazy val `dotty-doc-bootstrapped` = project.in(file("doc-tool")).asDottyDoc(Bootstrapped).disablePlugins(ScriptedPlugin) + lazy val `dotty-doc-optimised` = project.in(file("doc-tool")).asDottyDoc(BootstrappedOptimised).disablePlugins(ScriptedPlugin) def dottyDoc(implicit mode: Mode): Project = mode match { case NonBootstrapped => `dotty-doc` @@ -420,7 +429,7 @@ object Build { } catch { case _: UnsupportedOperationException | _: FileSystemException => // If the OS doesn't support symbolic links, copy the directory instead. - sbt.IO.copy(pairs, overwrite = true, preserveLastModified = true) + sbt.IO.copy(pairs, CopyOptions(overwrite = true, preserveLastModified = true, preserveExecutable = true)) } pairs.map(_._2) @@ -455,7 +464,7 @@ object Build { }.taskValue, // get libraries onboard - libraryDependencies ++= Seq("org.scala-sbt" % "compiler-interface" % "1.0.0-X16", + libraryDependencies ++= Seq("org.scala-sbt" % "compiler-interface" % "1.0.2", ("org.scala-lang.modules" % "scala-xml_2.12" % "1.0.6"), "com.novocode" % "junit-interface" % "0.11" % "test", "org.scala-lang" % "scala-library" % scalacVersion % "test"), @@ -498,7 +507,8 @@ object Build { println("Couldn't find scala-library on classpath, please run using script in bin dir instead") } else { val dottyLib = packageAll.value("dotty-library") - val exitCode = new java.lang.ProcessBuilder("java", "-classpath", s""".:$dottyLib:$scalaLib ${args.mkString(" ")}""") + val allArgs = Seq("java", "-classpath", s".:$dottyLib:$scalaLib") ++ args + val exitCode = new java.lang.ProcessBuilder(allArgs: _*) .inheritIO() .start() .waitFor() @@ -603,7 +613,8 @@ object Build { // used for tests that compile dotty path.contains("scala-asm") || // needed for the xsbti interface - path.contains("sbt-interface") + path.contains("compiler-interface") || + path.contains("util-interface") } yield "-Xbootclasspath/p:" + path val ci_build = // propagate if this is a ci build @@ -659,8 +670,8 @@ object Build { if (mode == NonBootstrapped) nonBootstrapedDottyCompilerSettings else bootstrapedDottyCompilerSettings lazy val `dotty-compiler` = project.in(file("compiler")).asDottyCompiler(NonBootstrapped) - lazy val `dotty-compiler-bootstrapped` = project.in(file("compiler")).asDottyCompiler(Bootstrapped) - lazy val `dotty-compiler-optimised` = project.in(file("compiler")).asDottyCompiler(BootstrappedOptimised) + lazy val `dotty-compiler-bootstrapped` = project.in(file("compiler")).asDottyCompiler(Bootstrapped).disablePlugins(ScriptedPlugin) + lazy val `dotty-compiler-optimised` = project.in(file("compiler")).asDottyCompiler(BootstrappedOptimised).disablePlugins(ScriptedPlugin) def dottyCompiler(implicit mode: Mode): Project = mode match { case NonBootstrapped => `dotty-compiler` @@ -677,8 +688,8 @@ object Build { ) lazy val `dotty-library` = project.in(file("library")).asDottyLibrary(NonBootstrapped) - lazy val `dotty-library-bootstrapped`: Project = project.in(file("library")).asDottyLibrary(Bootstrapped) - lazy val `dotty-library-optimised`: Project = project.in(file("library")).asDottyLibrary(BootstrappedOptimised) + lazy val `dotty-library-bootstrapped`: Project = project.in(file("library")).asDottyLibrary(Bootstrapped).disablePlugins(ScriptedPlugin) + lazy val `dotty-library-optimised`: Project = project.in(file("library")).asDottyLibrary(BootstrappedOptimised).disablePlugins(ScriptedPlugin) def dottyLibrary(implicit mode: Mode): Project = mode match { case NonBootstrapped => `dotty-library` @@ -710,10 +721,10 @@ object Build { description := "sbt compiler bridge for Dotty", resolvers += Resolver.typesafeIvyRepo("releases"), // For org.scala-sbt:api libraryDependencies ++= Seq( - "org.scala-sbt" % "compiler-interface" % "1.0.0-X16", - "org.scala-sbt" % "zinc-apiinfo_2.12" % "1.0.0-X16" % "test", - ("org.specs2" %% "specs2-core" % "3.9.1" % "test"),//.withDottyCompat() - ("org.specs2" %% "specs2-junit" % "3.9.1" % "test")//.withDottyCompat() + "org.scala-sbt" % "compiler-interface" % "1.0.2", + ("org.scala-sbt" %% "zinc-apiinfo" % "1.0.2" % "test").withDottyCompat(scalaVersion.value), + ("org.specs2" %% "specs2-core" % "3.9.1" % "test").withDottyCompat(scalaVersion.value), + ("org.specs2" %% "specs2-junit" % "3.9.1" % "test").withDottyCompat(scalaVersion.value) ), // The sources should be published with crossPaths := false since they // need to be compiled by the project using the bridge. @@ -727,7 +738,7 @@ object Build { ) lazy val `dotty-sbt-bridge` = project.in(file("sbt-bridge")).asDottySbtBridge(NonBootstrapped) - lazy val `dotty-sbt-bridge-bootstrapped` = project.in(file("sbt-bridge")).asDottySbtBridge(Bootstrapped) + lazy val `dotty-sbt-bridge-bootstrapped` = project.in(file("sbt-bridge")).asDottySbtBridge(Bootstrapped).disablePlugins(ScriptedPlugin) lazy val `dotty-language-server` = project.in(file("language-server")). dependsOn(dottyCompiler(Bootstrapped)). @@ -762,7 +773,7 @@ object Build { runTask(Runtime, mainClass, allArgs: _*) }.dependsOn(compile in (`vscode-dotty`, Compile)).evaluated - ) + ).disablePlugins(ScriptedPlugin) /** A sandbox to play with the Scala.js back-end of dotty. * @@ -807,8 +818,8 @@ object Build { ))) lazy val `dotty-bench` = project.in(file("bench")).asDottyBench(NonBootstrapped) - lazy val `dotty-bench-bootstrapped` = project.in(file("bench")).asDottyBench(Bootstrapped) - lazy val `dotty-bench-optimised` = project.in(file("bench")).asDottyBench(BootstrappedOptimised) + lazy val `dotty-bench-bootstrapped` = project.in(file("bench")).asDottyBench(Bootstrapped).disablePlugins(ScriptedPlugin) + lazy val `dotty-bench-optimised` = project.in(file("bench")).asDottyBench(BootstrappedOptimised).disablePlugins(ScriptedPlugin) // Depend on dotty-library so that sbt projects using dotty automatically // depend on the dotty-library @@ -846,22 +857,18 @@ object Build { // Keep in sync with inject-sbt-dotty.sbt libraryDependencies ++= Seq( Dependencies.`jackson-databind`, - "org.scala-sbt" % "compiler-interface" % "1.0.0-X16", - // "org.scala-sbt" %% "scripted-plugin" % sbtVersion.value + "org.scala-sbt" % "compiler-interface" % "1.0.2" ), unmanagedSourceDirectories in Compile += baseDirectory.value / "../language-server/src/dotty/tools/languageserver/config", - - sbtPlugin := true, - version := "0.1.7", - ScriptedPlugin.scriptedSettings, - ScriptedPlugin.sbtTestDirectory := baseDirectory.value / "sbt-test", - ScriptedPlugin.scriptedLaunchOpts += "-Dplugin.version=" + version.value, - ScriptedPlugin.scriptedLaunchOpts += "-Dplugin.scalaVersion=" + dottyVersion, - // By default scripted tests use $HOME/.ivy2 for the ivy cache. We need to override this value for the CI. - ScriptedPlugin.scriptedLaunchOpts ++= ivyPaths.value.ivyHome.map("-Dsbt.ivy.home=" + _.getAbsolutePath).toList, - ScriptedPlugin.scripted := ScriptedPlugin.scripted.dependsOn(Def.task { + version := "0.1.6-SNAPSHOT", + sbtTestDirectory := baseDirectory.value / "sbt-test", + scriptedLaunchOpts += "-Dplugin.version=" + version.value, + scriptedLaunchOpts += "-Dplugin.scalaVersion=" + dottyVersion, + // By default scripted tests use $HOME/.ivy2 for the ivy cache. We need to override this value for the CI. + scriptedLaunchOpts ++= ivyPaths.value.ivyHome.map("-Dsbt.ivy.home=" + _.getAbsolutePath).toList, + scripted := scripted.dependsOn(Def.task { val x0 = (publishLocal in `dotty-sbt-bridge-bootstrapped`).value val x1 = (publishLocal in `dotty-interfaces`).value val x2 = (publishLocal in `dotty-compiler-bootstrapped`).value @@ -875,7 +882,7 @@ object Build { lazy val `vscode-dotty` = project.in(file("vscode-dotty")). settings(commonSettings). settings( - EclipseKeys.skipProject := true, + EclipseKeys.skipProject := true, version := "0.1.2", // Keep in sync with package.json @@ -1005,7 +1012,8 @@ object Build { compile := { val inputs = (compileInputs in compile).value - import inputs.config._ + val inputOptions = inputs.options() + import inputOptions._ val s = streams.value val logger = s.log @@ -1028,6 +1036,7 @@ object Build { // Compile + val run = (runner in compile).value val cachedCompile = FileFunction.cached(cacheDir / "compile", FilesInfo.lastModified, FilesInfo.exists) { dependencies => @@ -1058,13 +1067,12 @@ object Build { } def doCompile(sourcesArgs: List[String]): Unit = { - val run = (runner in compile).value run.run("dotty.tools.dotc.Main", compilerCp, "-classpath" :: cpStr :: "-d" :: classesDirectory.getAbsolutePath() :: - options ++: + scalacOptions ++: sourcesArgs, - patchedLogger) foreach sys.error + patchedLogger) } // Work around the Windows limitation on command line length. @@ -1087,7 +1095,7 @@ object Build { cachedCompile((sources ++ allMyDependencies).toSet) // We do not have dependency analysis when compiling externally - sbt.inc.Analysis.Empty + sbt.internal.inc.Analysis.Empty } )) } @@ -1117,8 +1125,8 @@ object Build { ) lazy val dist = project.asDist(NonBootstrapped) - lazy val `dist-bootstrapped` = project.asDist(Bootstrapped) - lazy val `dist-optimised` = project.asDist(BootstrappedOptimised) + lazy val `dist-bootstrapped` = project.asDist(Bootstrapped).disablePlugins(ScriptedPlugin) + lazy val `dist-optimised` = project.asDist(BootstrappedOptimised).disablePlugins(ScriptedPlugin) implicit class ProjectDefinitions(val project: Project) extends AnyVal { diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index 399a468591c8..c127c8fee000 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -6,7 +6,7 @@ object VersionUtil { if (System.getProperty("os.name").toLowerCase.contains("windows")) s"cmd.exe /c project\\scripts\\build\\$scriptName.bat -p" else s"project/scripts/build/$scriptName" - Process(cmd).lines.head.trim + Process(cmd).lineStream.head.trim } /** Seven letters of the SHA hash is considered enough to uniquely identify a diff --git a/project/build.properties b/project/build.properties index cd66fd542cf2..b7dd3cb2ae83 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.0.0-M6 +sbt.version=1.0.2 diff --git a/project/plugins.sbt b/project/plugins.sbt index 6b24922932e3..ef60384df746 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -3,17 +3,17 @@ // e.g. addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.1.0") // Scala IDE project file generator -addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.1.0") +addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.2.3") -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.14") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.21") -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.4") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.5") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "1.1") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.0") -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0") +addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.0") -addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.8.2") +addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.9.1") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.24") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.27") diff --git a/project/scripted.sbt b/project/scripted.sbt index c3b5976d568e..a7d7ecccf2a9 100644 --- a/project/scripted.sbt +++ b/project/scripted.sbt @@ -1,4 +1,2 @@ // Used by the subproject dotty-bridge -// libraryDependencies += "org.scala-sbt" %% "scripted-plugin" % sbtVersion.value -// val sbtV = sbtVersion.value -//addSbtPlugin("org.scala-sbt" %% "scripted-plugin" % "1.0.0-M6") +libraryDependencies += "org.scala-sbt" %% "scripted-plugin" % sbtVersion.value diff --git a/sbt-bridge/src/xsbt/CompilerInterface.scala b/sbt-bridge/src/xsbt/CompilerInterface.scala index 144dcbc6e17e..631524e17ebb 100644 --- a/sbt-bridge/src/xsbt/CompilerInterface.scala +++ b/sbt-bridge/src/xsbt/CompilerInterface.scala @@ -3,7 +3,7 @@ */ package xsbt -import xsbti.{ AnalysisCallback, Logger, Problem, Reporter, Severity } +import xsbti.{ AnalysisCallback, Logger, Reporter, Severity } import xsbti.compile._ import Log.debug import java.io.File @@ -15,19 +15,19 @@ import dotty.tools.dotc.interfaces._ import java.net.URLClassLoader final class CompilerInterface { - def newCompiler(options: Array[String], output: Output, initialLog: Logger, - initialDelegate: Reporter, resident: Boolean): CachedCompiler = { + def newCompiler(options: Array[String], output: Output, initialLog: xsbti.Logger, + initialDelegate: xsbti.Reporter): CachedCompiler = { // The classloader that sbt uses to load the compiler bridge is broken // (see CompilerClassLoader#fixBridgeLoader for details). To workaround // this we construct our own ClassLoader and then run the following code // with it: - // new CachedCompilerImpl(options, output, resident) + // new CachedCompilerImpl(options, output) val bridgeLoader = getClass.getClassLoader val fixedLoader = CompilerClassLoader.fixBridgeLoader(bridgeLoader) val cciClass = fixedLoader.loadClass("xsbt.CachedCompilerImpl") cciClass.getConstructors.head - .newInstance(options, output, resident: java.lang.Boolean) + .newInstance(options, output) .asInstanceOf[CachedCompiler] } @@ -36,7 +36,7 @@ final class CompilerInterface { cached.run(sources, changes, callback, log, delegate, progress) } -class CachedCompilerImpl(args: Array[String], output: Output, resident: Boolean) extends CachedCompiler { +class CachedCompilerImpl(args: Array[String], output: Output) extends CachedCompiler { val outputArgs = output match { case multi: MultipleOutput => @@ -66,6 +66,6 @@ class CachedCompilerImpl(args: Array[String], output: Output, resident: Boolean) } } -class InterfaceCompileFailed(override val arguments: Array[String], override val problems: Array[Problem]) extends xsbti.CompileFailed { +class InterfaceCompileFailed(override val arguments: Array[String], override val problems: Array[xsbti.Problem]) extends xsbti.CompileFailed { override val toString = "Compilation failed" } diff --git a/sbt-bridge/src/xsbt/DelegatingReporter.scala b/sbt-bridge/src/xsbt/DelegatingReporter.scala index 0b096a2336e3..6ac533a9cb13 100644 --- a/sbt-bridge/src/xsbt/DelegatingReporter.scala +++ b/sbt-bridge/src/xsbt/DelegatingReporter.scala @@ -9,7 +9,7 @@ import reporting._ import reporting.diagnostic.MessageContainer import reporting.diagnostic.messages import core.Contexts._ -import xsbti.Position +import xsbti.{Position, Severity} import java.util.Optional final class DelegatingReporter(delegate: xsbti.Reporter) extends Reporter @@ -23,9 +23,9 @@ final class DelegatingReporter(delegate: xsbti.Reporter) extends Reporter def doReport(cont: MessageContainer)(implicit ctx: Context): Unit = { val severity = cont match { - case _: messages.Error => xsbti.Severity.Error - case _: messages.Warning => xsbti.Severity.Warn - case _ => xsbti.Severity.Info + case _: messages.Error => Severity.Error + case _: messages.Warning => Severity.Warn + case _ => Severity.Info } val position = @@ -52,7 +52,7 @@ final class DelegatingReporter(delegate: xsbti.Reporter) extends Reporter sb.append(explanation(cont.contained())) } - delegate.log(position, sb.toString(), severity) + delegate.log(Problem(position, sb.toString(), severity)) } private[this] def maybe[T](opt: Option[T]): Optional[T] = opt match { diff --git a/sbt-bridge/src/xsbt/Message.scala b/sbt-bridge/src/xsbt/Message.scala index 48f24f53349f..ef45577a7739 100644 --- a/sbt-bridge/src/xsbt/Message.scala +++ b/sbt-bridge/src/xsbt/Message.scala @@ -3,6 +3,8 @@ */ package xsbt +import java.util.function.Supplier + object Message { - def apply[T](s: => T) = new xsbti.F0[T] { def apply() = s } + def apply[T](s: => T) = new Supplier[T] { def get() = s } } diff --git a/sbt-bridge/src/xsbt/Problem.scala b/sbt-bridge/src/xsbt/Problem.scala new file mode 100644 index 000000000000..f92d8c284f89 --- /dev/null +++ b/sbt-bridge/src/xsbt/Problem.scala @@ -0,0 +1,12 @@ +package xsbt + +import xsbti.{Position, Severity} + +final case class Problem(override val position: Position, + override val message: String, + override val severity: Severity) extends xsbti.Problem { + override val category = "" + override def toString = s"[$severity] $position: $message" + +} + diff --git a/sbt-bridge/src/xsbt/ScaladocInterface.scala b/sbt-bridge/src/xsbt/ScaladocInterface.scala index 387b54e13673..c5124d8996e0 100644 --- a/sbt-bridge/src/xsbt/ScaladocInterface.scala +++ b/sbt-bridge/src/xsbt/ScaladocInterface.scala @@ -13,12 +13,12 @@ class ScaladocInterface { } class DottydocRunner(args: Array[String], log: Logger, delegate: xsbti.Reporter) { - def run(): Unit = delegate.log( + def run(): Unit = delegate.log(Problem( NoPosition, """|The dotty sbt-bridge currently does not support doc generation directly |via sbt. Please see the dotty documentation at dotty.epfl.ch""".stripMargin, Severity.Error - ) + )) private[this] val NoPosition = new xsbti.Position { val line = Optional.empty[Integer] diff --git a/sbt-bridge/test/xsbt/DependencySpecification.scala b/sbt-bridge/test/xsbt/DependencySpecification.scala index 60545091b666..0fbd285ee35d 100644 --- a/sbt-bridge/test/xsbt/DependencySpecification.scala +++ b/sbt-bridge/test/xsbt/DependencySpecification.scala @@ -8,80 +8,80 @@ import xsbt.api.SameAPI import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner -import ScalaCompilerForUnitTesting.ExtractedSourceDependencies +import xsbti.TestCallback.ExtractedClassDependencies @RunWith(classOf[JUnitRunner]) class DependencySpecification extends Specification { "Extracted source dependencies from public members" in { - val sourceDependencies = extractSourceDependenciesPublic - val memberRef = sourceDependencies.memberRef - val inheritance = sourceDependencies.inheritance - memberRef('A) === Set.empty - inheritance('A) === Set.empty - memberRef('B) === Set('A, 'D) - inheritance('B) === Set('D) - memberRef('C) === Set('A) - inheritance('C) === Set.empty - memberRef('D) === Set.empty - inheritance('D) === Set.empty - memberRef('E) === Set.empty - inheritance('E) === Set.empty - memberRef('F) === Set('A, 'B, 'C, 'D, 'E, 'G) - inheritance('F) === Set('A, 'E) - memberRef('H) === Set('B, 'E, 'G) + val classDependencies = extractClassDependenciesPublic + val memberRef = classDependencies.memberRef + val inheritance = classDependencies.inheritance + memberRef("A") === Set.empty + inheritance("A") === Set.empty + memberRef("B") === Set("A", "D") + inheritance("B") === Set("D") + memberRef("C") === Set("A") + inheritance("C") === Set.empty + memberRef("D") === Set.empty + inheritance("D") === Set.empty + memberRef("E") === Set.empty + inheritance("E") === Set.empty + memberRef("F") === Set("A", "B", "C", "D", "E", "G") + inheritance("F") === Set("A", "E") + memberRef("H") === Set("B", "E", "G") // aliases and applied type constructors are expanded so we have inheritance dependency on B - inheritance('H) === Set('B, 'E) + inheritance("H") === Set("B", "E") } "Extracted source dependencies from private members" in { - val sourceDependencies = extractSourceDependenciesPrivate - val memberRef = sourceDependencies.memberRef - val inheritance = sourceDependencies.inheritance - memberRef('A) === Set.empty - inheritance('A) === Set.empty - memberRef('B) === Set.empty - inheritance('B) === Set.empty - memberRef('C) === Set('A) - inheritance('C) === Set('A) - memberRef('D) === Set('B) - inheritance('D) === Set('B) + val classDependencies = extractClassDependenciesPrivate + val memberRef = classDependencies.memberRef + val inheritance = classDependencies.inheritance + memberRef("A") === Set.empty + inheritance("A") === Set.empty + memberRef("B") === Set.empty + inheritance("B") === Set.empty + memberRef("C.Inner1") === Set("A") + inheritance("C.Inner1") === Set("A") + memberRef("D._$Inner2") === Set("B") + inheritance("D._$Inner2") === Set("B") } "Extracted source dependencies with trait as first parent" in { - val sourceDependencies = extractSourceDependenciesTraitAsFirstPatent - val memberRef = sourceDependencies.memberRef - val inheritance = sourceDependencies.inheritance - memberRef('A) === Set.empty - inheritance('A) === Set.empty - memberRef('B) === Set('A) - inheritance('B) === Set('A) + val classDependencies = extractClassDependenciesTraitAsFirstPatent + val memberRef = classDependencies.memberRef + val inheritance = classDependencies.inheritance + memberRef("A") === Set.empty + inheritance("A") === Set.empty + memberRef("B") === Set("A") + inheritance("B") === Set("A") // verify that memberRef captures the oddity described in documentation of `Relations.inheritance` // we are mainly interested whether dependency on A is captured in `memberRef` relation so // the invariant that says that memberRef is superset of inheritance relation is preserved - memberRef('C) === Set('A, 'B) - inheritance('C) === Set('A, 'B) + memberRef("C") === Set("A", "B") + inheritance("C") === Set("A", "B") // same as above but indirect (C -> B -> A), note that only A is visible here - memberRef('D) === Set('A, 'C) - inheritance('D) === Set('A, 'C) + memberRef("D") === Set("A", "C") + inheritance("D") === Set("A", "C") } /* "Extracted source dependencies from macro arguments" in { - val sourceDependencies = extractSourceDependenciesFromMacroArgument - val memberRef = sourceDependencies.memberRef - val inheritance = sourceDependencies.inheritance - - memberRef('A) === Set('B, 'C) - inheritance('A) === Set.empty - memberRef('B) === Set.empty - inheritance('B) === Set.empty - memberRef('C) === Set.empty - inheritance('C) === Set.empty + val classDependencies = extractClassDependenciesFromMacroArgument + val memberRef = classDependencies.memberRef + val inheritance = classDependencies.inheritance + + memberRef("A") === Set("B", "C") + inheritance("A") === Set.empty + memberRef("B") === Set.empty + inheritance("B") === Set.empty + memberRef("C") === Set.empty + inheritance("C") === Set.empty } */ - private def extractSourceDependenciesPublic: ExtractedSourceDependencies = { + private def extractClassDependenciesPublic: ExtractedClassDependencies = { val srcA = "class A" val srcB = "class B extends D[A]" val srcC = """|class C { @@ -96,38 +96,38 @@ class DependencySpecification extends Specification { // E verifies the core type gets pulled out val srcH = "trait H extends G.T[Int] with (E[Int] @unchecked)" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val sourceDependencies = compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, - 'D -> srcD, 'E -> srcE, 'F -> srcF, 'G -> srcG, 'H -> srcH) - sourceDependencies + val compilerForTesting = new ScalaCompilerForUnitTesting + val classDependencies = compilerForTesting.extractDependenciesFromSrcs(srcA, srcB, srcC, + srcD, srcE, srcF, srcG, srcH) + classDependencies } - private def extractSourceDependenciesPrivate: ExtractedSourceDependencies = { + private def extractClassDependenciesPrivate: ExtractedClassDependencies = { val srcA = "class A" val srcB = "class B" val srcC = "class C { private class Inner1 extends A }" val srcD = "class D { def foo: Unit = { class Inner2 extends B } }" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val sourceDependencies = - compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, 'D -> srcD) - sourceDependencies + val compilerForTesting = new ScalaCompilerForUnitTesting + val classDependencies = + compilerForTesting.extractDependenciesFromSrcs(srcA, srcB, srcC, srcD) + classDependencies } - private def extractSourceDependenciesTraitAsFirstPatent: ExtractedSourceDependencies = { + private def extractClassDependenciesTraitAsFirstPatent: ExtractedClassDependencies = { val srcA = "class A" val srcB = "trait B extends A" val srcC = "trait C extends B" val srcD = "class D extends C" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val sourceDependencies = - compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, 'D -> srcD) - sourceDependencies + val compilerForTesting = new ScalaCompilerForUnitTesting + val classDependencies = + compilerForTesting.extractDependenciesFromSrcs(srcA, srcB, srcC, srcD) + classDependencies } /* - private def extractSourceDependenciesFromMacroArgument: ExtractedSourceDependencies = { + private def extractClassDependenciesFromMacroArgument: ExtractedClassDependencies = { val srcA = "class A { println(B.printTree(C.foo)) }" val srcB = """ |import scala.language.experimental.macros @@ -143,9 +143,9 @@ class DependencySpecification extends Specification { val srcC = "object C { val foo = 1 }" val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val sourceDependencies = + val classDependencies = compilerForTesting.extractDependenciesFromSrcs(List(Map('B -> srcB, 'C -> srcC), Map('A -> srcA))) - sourceDependencies + classDependencies } */ } diff --git a/sbt-bridge/test/xsbt/ExtractAPISpecification.scala b/sbt-bridge/test/xsbt/ExtractAPISpecification.scala index ce87134bc16d..8d867511e899 100644 --- a/sbt-bridge/test/xsbt/ExtractAPISpecification.scala +++ b/sbt-bridge/test/xsbt/ExtractAPISpecification.scala @@ -16,26 +16,25 @@ class ExtractAPISpecification extends Specification { def stableExistentialNames: Boolean = { def compileAndGetFooMethodApi(src: String): Def = { - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = false) + val compilerForTesting = new ScalaCompilerForUnitTesting val sourceApi = compilerForTesting.extractApiFromSrc(src) - val FooApi = sourceApi.definitions().find(_.name() == "Foo").get.asInstanceOf[ClassLike] + val FooApi = sourceApi.find(_.name() == "Foo").get.asInstanceOf[ClassLike] val fooMethodApi = FooApi.structure().declared().find(_.name == "foo").get fooMethodApi.asInstanceOf[Def] } val src1 = """ - |class Box[T] - |class Foo { - | def foo: Box[_] = null - | - }""".stripMargin + |class Box[T] + |class Foo { + | def foo: Box[_] = null + | + }""".stripMargin val fooMethodApi1 = compileAndGetFooMethodApi(src1) val src2 = """ - |class Box[T] - |class Foo { - | def bar: Box[_] = null - | def foo: Box[_] = null - | - }""".stripMargin + |class Box[T] + |class Foo { + | def bar: Box[_] = null + | def foo: Box[_] = null + |}""".stripMargin val fooMethodApi2 = compileAndGetFooMethodApi(src2) fooMethodApi1 == fooMethodApi2 @@ -52,13 +51,11 @@ class ExtractAPISpecification extends Specification { * See https://github.com/sbt/sbt/issues/2504 */ "Self variable and no self type" in { - def selectNamer(api: SourceAPI): ClassLike = { + def selectNamer(api: Seq[Definition]): ClassLike = { def selectClass(defs: Iterable[Definition], name: String): ClassLike = defs.collectFirst { case cls: ClassLike if cls.name == name => cls }.get - val global = selectClass(api.definitions, "Global") - val foo = selectClass(global.structure.declared, "Global.Foo") - selectClass(foo.structure.inherited, "Namers.Namer") + selectClass(api, "Namers.Namer") } val src1 = """|class Namers { @@ -70,7 +67,7 @@ class ExtractAPISpecification extends Specification { | class Foo extends Namers |} |""".stripMargin - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = false) + val compilerForTesting = new ScalaCompilerForUnitTesting val apis = compilerForTesting.extractApisFromSrcs(reuseCompilerInstance = false)(List(src1, src2), List(src2)) val _ :: src2Api1 :: src2Api2 :: Nil = apis.toList val namerApi1 = selectNamer(src2Api1) @@ -86,7 +83,7 @@ class ExtractAPISpecification extends Specification { * with our without a self variable. */ "Self type" in { - def collectFirstClass(defs: Array[Definition]): ClassLike = defs.collectFirst { + def collectFirstClass(defs: Iterable[Definition]): ClassLike = defs.collectFirst { case c: ClassLike => c }.get val srcX = "trait X" @@ -99,11 +96,11 @@ class ExtractAPISpecification extends Specification { val srcC6 = "class C6 extends AnyRef with X { self: X with Y => }" // val srcC7 = "class C7 { _ => }" // DOTTY: Syntax not supported val srcC8 = "class C8 { self => }" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = false) + val compilerForTesting = new ScalaCompilerForUnitTesting val apis = compilerForTesting.extractApisFromSrcs(reuseCompilerInstance = true)( List(srcX, srcY, srcC1, srcC2, srcC3, srcC4, srcC5, srcC6, srcC8) - ).map(x => collectFirstClass(x.definitions)) - val emptyType = new EmptyType + ).map(collectFirstClass) + val emptyType = EmptyType.create() def hasSelfType(c: ClassLike): Boolean = c.selfType != emptyType val (withSelfType, withoutSelfType) = apis.partition(hasSelfType) diff --git a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala index 5044c771872e..eda9533fcee4 100644 --- a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala +++ b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala @@ -26,64 +26,64 @@ class ExtractUsedNamesSpecification extends Specification { "Unit" ) - "imported name" in { - val src = """ - |package a { class A } - |package b { - | import a.{A => A2} - |}""".stripMargin - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val usedNames = compilerForTesting.extractUsedNamesFromSrc(src) - val expectedNames = standardNames ++ Set("a", "A", "A2", "b") - usedNames === expectedNames - } + "imported name" in { + val src = """ + |package a { class A } + |package b { + | import a.{A => A2} + |}""".stripMargin + val compilerForTesting = new ScalaCompilerForUnitTesting + val usedNames = compilerForTesting.extractUsedNamesFromSrc(src) + val expectedNames = standardNames ++ Set("a", "A", "A2", "b") + usedNames("a.A") === expectedNames + } // test covers https://github.com/gkossakowski/sbt/issues/6 "names in type tree" in { val srcA = """| - |package a { - | class A { - | class C { class D } - | } - | class B[T] - | class BB - |}""".stripMargin + |package a { + | class A { + | class C { class D } + | } + | class B[T] + | class BB + |}""".stripMargin val srcB = """| - |package b { - | abstract class X { - | def foo: a.A#C#D - | def bar: a.B[a.BB] - | } - |}""".stripMargin - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) + |package b { + | abstract class X { + | def foo: a.A#C#D + | def bar: a.B[a.BB] + | } + |}""".stripMargin + val compilerForTesting = new ScalaCompilerForUnitTesting val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB) // DOTTY: unlike the scalac sbt phase, this does not contain "X", I believe this is safe // TODO: report issue against sbt suggesting that they do the same val expectedNames = standardNames ++ Set("a", "A", "B", "C", "D", "b", "BB") - usedNames === expectedNames + usedNames("b.X") === expectedNames } - // test for https://github.com/gkossakowski/sbt/issues/5 - "symbolic names" in { - val srcA = """| - |class A { - | def `=`: Int = 3 - |}""".stripMargin - val srcB = """| - |class B { - | def foo(a: A) = a.`=` - |}""".stripMargin - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB) + // test for https://github.com/gkossakowski/sbt/issues/5 + "symbolic names" in { + val srcA = """| + |class A { + | def `=`: Int = 3 + |}""".stripMargin + val srcB = """| + |class B { + | def foo(a: A) = a.`=` + |}""".stripMargin + val compilerForTesting = new ScalaCompilerForUnitTesting + val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB) - // DOTTY TODO: "Int" is not actually used, but we collect it because - // it's the inferred return type so it appears in a TypeTree - // We could avoid this by checking if the untyped tree has a return type - // but is it worth it? Revisit this after https://github.com/sbt/sbt/issues/1104 - // has landed. - val expectedNames = standardNames ++ Set("A", "a", "=", "Int") - usedNames === expectedNames - } + // DOTTY TODO: "Int" is not actually used, but we collect it because + // it's the inferred return type so it appears in a TypeTree + // We could avoid this by checking if the untyped tree has a return type + // but is it worth it? Revisit this after https://github.com/sbt/sbt/issues/1104 + // has landed. + val expectedNames = standardNames ++ Set("A", "a", "=", "Int") + usedNames("B") === expectedNames + } "extract names in the types of trees" in { val src1 = """|class X0 @@ -104,55 +104,86 @@ class ExtractUsedNamesSpecification extends Specification { | def foo(m: M): N = ??? | def bar[Param >: P1 <: P0](p: Param): Param = ??? |}""".stripMargin - val src2 = """|object Test { + val src2 = """|object Test_lista { | val x = B.lista - | val y = B.at - | val z = B.as - | B.foo(???) - | B.bar(???) - |}""".stripMargin - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) + |} + |object Test_at { + | val x = B.at + |} + |object Test_as { + | val x = B.as + |} + |object Test_foo { + | val x = B.foo(???) + |} + |object Test_bar { + | val x = B.bar(???) + |} + |""".stripMargin + val compilerForTesting = new ScalaCompilerForUnitTesting val usedNames = compilerForTesting.extractUsedNamesFromSrc(src1, src2) - val expectedNames = standardNames ++ Set("Test", "Test$", "B", "B$", - "Predef", "Predef$", "???", "Nothing", - "lista", "List", "A", - "at", "T", "X1", "X0", - "as", "S", "Y", - "foo", "M", "N", - "bar", "P1", "P0") - usedNames === expectedNames + val expectedNames_lista = standardNames ++ Set("Test_lista", "Test_lista$", "B", "B$", "lista", "List", "A") + val expectedNames_at = standardNames ++ Set("Test_at", "Test_at$", "B", "B$", "at", "A", "T", "X0", "X1") + val expectedNames_as = standardNames ++ Set("Test_as", "Test_as$", "B", "B$", "as", "S", "Y") + val expectedNames_foo = standardNames ++ Set("Test_foo", + "Test_foo$", + "B", + "B$", + "foo", + "M", + "N", + "Predef", + "Predef$", + "???", + "Nothing") + val expectedNames_bar = standardNames ++ Set("Test_bar", + "Test_bar$", + "B", + "B$", + "bar", + "P1", + "P0", + "Predef", + "Predef$", + "???", + "Nothing") + usedNames("Test_lista") === expectedNames_lista + usedNames("Test_at") === expectedNames_at + usedNames("Test_as") === expectedNames_as + usedNames("Test_foo") === expectedNames_foo + usedNames("Test_bar") === expectedNames_bar } - // test for https://github.com/gkossakowski/sbt/issues/3 - "used names from the same compilation unit" in { - val src = "class A { def foo: Int = 0; def bar: Int = foo }" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val usedNames = compilerForTesting.extractUsedNamesFromSrc(src) - val expectedNames = standardNames ++ Set("A", "foo", "Int") - usedNames === expectedNames - } + // test for https://github.com/gkossakowski/sbt/issues/3 + "used names from the same compilation unit" in { + val src = "class A { def foo: Int = 0; def bar: Int = foo }" + val compilerForTesting = new ScalaCompilerForUnitTesting + val usedNames = compilerForTesting.extractUsedNamesFromSrc(src) + val expectedNames = standardNames ++ Set("A", "foo", "Int") + usedNames("A") === expectedNames + } - // pending test for https://issues.scala-lang.org/browse/SI-7173 - "names of constants" in { - val src = "class A { final val foo = 12; def bar: Int = foo }" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val usedNames = compilerForTesting.extractUsedNamesFromSrc(src) - val expectedNames = standardNames ++ Set("A", "foo", "Int") - usedNames === expectedNames - } + // pending test for https://issues.scala-lang.org/browse/SI-7173 + "names of constants" in { + val src = "class A { final val foo = 12; def bar: Int = foo }" + val compilerForTesting = new ScalaCompilerForUnitTesting + val usedNames = compilerForTesting.extractUsedNamesFromSrc(src) + val expectedNames = standardNames ++ Set("A", "foo", "Int") + usedNames("A") === expectedNames + } - // pending test for https://github.com/gkossakowski/sbt/issues/4 - // TODO: we should fix it by having special treatment of `selectDynamic` and `applyDynamic` calls - "names from method calls on Dynamic" in { - val srcA = """|import scala.language.dynamics - |class A extends Dynamic { - | def selectDynamic(name: String): Int = name.length - |}""".stripMargin - val srcB = "class B { def foo(a: A): Int = a.bla }" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB) - val expectedNames = standardNames ++ Set("B", "A", "a", "Int", "selectDynamic", "bla") - usedNames === expectedNames - }.pendingUntilFixed("Call to Dynamic is desugared in type checker so Select nodes is turned into string literal.") + // pending test for https://github.com/gkossakowski/sbt/issues/4 + // TODO: we should fix it by having special treatment of `selectDynamic` and `applyDynamic` calls + "names from method calls on Dynamic" in { + val srcA = """|import scala.language.dynamics + |class A extends Dynamic { + | def selectDynamic(name: String): Int = name.length + |}""".stripMargin + val srcB = "class B { def foo(a: A): Int = a.bla }" + val compilerForTesting = new ScalaCompilerForUnitTesting + val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB) + val expectedNames = standardNames ++ Set("B", "A", "a", "Int", "selectDynamic", "bla") + usedNames("") === expectedNames + }.pendingUntilFixed("Call to Dynamic is desugared in type checker so Select nodes is turned into string literal.") } diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala index fb27f9b9d276..e81d58a07744 100644 --- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala +++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala @@ -4,29 +4,26 @@ package xsbt import xsbti.compile.SingleOutput import java.io.File import xsbti._ -import xsbti.api.SourceAPI -import sbt.IO._ -import xsbti.api.ClassLike -import xsbti.api.Definition -import xsbti.api.Def +import sbt.io.IO +import xsbti.api.{ ClassLike, Def, DependencyContext } +import DependencyContext._ import xsbt.api.SameAPI -import sbt.ConsoleLogger -import xsbti.DependencyContext._ +import sbt.internal.util.ConsoleLogger -import ScalaCompilerForUnitTesting.ExtractedSourceDependencies +import TestCallback.ExtractedClassDependencies /** * Provides common functionality needed for unit tests that require compiling * source code using Scala compiler. */ -class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashing: Boolean = false) { +class ScalaCompilerForUnitTesting { import scala.language.reflectiveCalls /** * Compiles given source code using Scala compiler and returns API representation * extracted by ExtractAPI class. */ - def extractApiFromSrc(src: String): SourceAPI = { + def extractApiFromSrc(src: String): Seq[ClassLike] = { val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src) analysisCallback.apis(tempSrcFile) } @@ -35,27 +32,50 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashin * Compiles given source code using Scala compiler and returns API representation * extracted by ExtractAPI class. */ - def extractApisFromSrcs(reuseCompilerInstance: Boolean)(srcs: List[String]*): Seq[SourceAPI] = { + def extractApisFromSrcs(reuseCompilerInstance: Boolean)(srcs: List[String]*): Seq[Seq[ClassLike]] = { val (tempSrcFiles, analysisCallback) = compileSrcs(srcs.toList, reuseCompilerInstance) tempSrcFiles.map(analysisCallback.apis) } - def extractUsedNamesFromSrc(src: String): Set[String] = { - val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src) - analysisCallback.usedNames(tempSrcFile) - } - /** * Extract used names from src provided as the second argument. + * If `assertDefaultScope` is set to true it will fail if there is any name used in scope other then Default * * The purpose of the first argument is to define names that the second * source is going to refer to. Both files are compiled in the same compiler * Run but only names used in the second src file are returned. */ - def extractUsedNamesFromSrc(definitionSrc: String, actualSrc: String): Set[String] = { + def extractUsedNamesFromSrc( + definitionSrc: String, + actualSrc: String, + assertDefaultScope: Boolean = true + ): Map[String, Set[String]] = { // we drop temp src file corresponding to the definition src file val (Seq(_, tempSrcFile), analysisCallback) = compileSrcs(definitionSrc, actualSrc) - analysisCallback.usedNames(tempSrcFile) + + if (assertDefaultScope) for { + (className, used) <- analysisCallback.usedNamesAndScopes + analysisCallback.TestUsedName(name, scopes) <- used + } assert(scopes.size() == 1 && scopes.contains(UseScope.Default), s"$className uses $name in $scopes") + + val classesInActualSrc = analysisCallback.classNames(tempSrcFile).map(_._1) + classesInActualSrc.map(className => className -> analysisCallback.usedNames(className)).toMap + } + + /** + * Extract used names from the last source file in `sources`. + * + * The previous source files are provided to successfully compile examples. + * Only the names used in the last src file are returned. + */ + def extractUsedNamesFromSrc(sources: String*): Map[String, Set[String]] = { + val (srcFiles, analysisCallback) = compileSrcs(sources: _*) + srcFiles + .map { srcFile => + val classesInSrc = analysisCallback.classNames(srcFile).map(_._1) + classesInSrc.map(className => className -> analysisCallback.usedNames(className)).toMap + } + .reduce(_ ++ _) } /** @@ -70,42 +90,23 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashin * Symbols are used to express extracted dependencies between source code snippets. This way we have * file system-independent way of testing dependencies between source code "files". */ - def extractDependenciesFromSrcs(srcs: List[Map[Symbol, String]]): ExtractedSourceDependencies = { - val rawGroupedSrcs = srcs.map(_.values.toList) - val symbols = srcs.flatMap(_.keys) - val (tempSrcFiles, testCallback) = compileSrcs(rawGroupedSrcs, reuseCompilerInstance = true) - val fileToSymbol = (tempSrcFiles zip symbols).toMap - - val memberRefFileDeps = testCallback.sourceDependencies collect { - // false indicates that those dependencies are not introduced by inheritance + def extractDependenciesFromSrcs(srcs: List[List[String]]): ExtractedClassDependencies = { + val (_, testCallback) = compileSrcs(srcs, reuseCompilerInstance = true) + + val memberRefDeps = testCallback.classDependencies collect { case (target, src, DependencyByMemberRef) => (src, target) } - val inheritanceFileDeps = testCallback.sourceDependencies collect { - // true indicates that those dependencies are introduced by inheritance + val inheritanceDeps = testCallback.classDependencies collect { case (target, src, DependencyByInheritance) => (src, target) } - def toSymbols(src: File, target: File): (Symbol, Symbol) = (fileToSymbol(src), fileToSymbol(target)) - val memberRefDeps = memberRefFileDeps map { case (src, target) => toSymbols(src, target) } - val inheritanceDeps = inheritanceFileDeps map { case (src, target) => toSymbols(src, target) } - def pairsToMultiMap[A, B](pairs: Seq[(A, B)]): Map[A, Set[B]] = { - import scala.collection.mutable.{ HashMap, MultiMap } - val emptyMultiMap = new HashMap[A, scala.collection.mutable.Set[B]] with MultiMap[A, B] - val multiMap = pairs.foldLeft(emptyMultiMap) { - case (acc, (key, value)) => - acc.addBinding(key, value) - } - // convert all collections to immutable variants - multiMap.toMap.mapValues(_.toSet).withDefaultValue(Set.empty) + val localInheritanceDeps = testCallback.classDependencies collect { + case (target, src, LocalDependencyByInheritance) => (src, target) } - - ExtractedSourceDependencies(pairsToMultiMap(memberRefDeps), pairsToMultiMap(inheritanceDeps)) + ExtractedClassDependencies.fromPairs(memberRefDeps, inheritanceDeps, localInheritanceDeps) } - def extractDependenciesFromSrcs(srcs: (Symbol, String)*): ExtractedSourceDependencies = { - val symbols = srcs.map(_._1) - assert(symbols.distinct.size == symbols.size, - s"Duplicate symbols for srcs detected: $symbols") - extractDependenciesFromSrcs(List(srcs.toMap)) + def extractDependenciesFromSrcs(srcs: String*): ExtractedClassDependencies = { + extractDependenciesFromSrcs(List(srcs.toList)) } /** @@ -124,12 +125,12 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashin * The sequence of temporary files corresponding to passed snippets and analysis * callback is returned as a result. */ - private def compileSrcs(groupedSrcs: List[List[String]], + def compileSrcs(groupedSrcs: List[List[String]], reuseCompilerInstance: Boolean): (Seq[File], TestCallback) = { // withTemporaryDirectory { temp => { - val temp = createTemporaryDirectory - val analysisCallback = new TestCallback(nameHashing, includeSynthToNameHashing) + val temp = IO.createTemporaryDirectory + val analysisCallback = new TestCallback val classesDir = new File(temp, "classes") classesDir.mkdir() @@ -158,13 +159,13 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashin } } - private def compileSrcs(srcs: String*): (Seq[File], TestCallback) = { + def compileSrcs(srcs: String*): (Seq[File], TestCallback) = { compileSrcs(List(srcs.toList), reuseCompilerInstance = true) } private def prepareSrcFile(baseDir: File, fileName: String, src: String): File = { val srcFile = new File(baseDir, fileName) - sbt.IO.write(srcFile, src) + IO.write(srcFile, src) srcFile } @@ -184,14 +185,11 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashin def hasErrors: Boolean = false def hasWarnings: Boolean = false def printWarnings(): Unit = () - def problems: Array[Problem] = Array.empty - def log(pos: Position, msg: String, sev: Severity): Unit = println(msg) + def problems(): Array[xsbti.Problem] = Array.empty + def log(problem: xsbti.Problem): Unit = println(problem.message) def comment(pos: Position, msg: String): Unit = () def printSummary(): Unit = () } } -object ScalaCompilerForUnitTesting { - case class ExtractedSourceDependencies(memberRef: Map[Symbol, Set[Symbol]], inheritance: Map[Symbol, Set[Symbol]]) -} diff --git a/sbt-bridge/test/xsbti/TestCallback.scala b/sbt-bridge/test/xsbti/TestCallback.scala index 99c8d963d555..7a065d5abdc9 100644 --- a/sbt-bridge/test/xsbti/TestCallback.scala +++ b/sbt-bridge/test/xsbti/TestCallback.scala @@ -3,33 +3,90 @@ package xsbti import java.io.File import scala.collection.mutable.ArrayBuffer -import xsbti.api.SourceAPI -import xsbti.DependencyContext._ +import xsbti.api.ClassLike +import xsbti.api.DependencyContext +import DependencyContext._ +import java.util.EnumSet -class TestCallback(override val nameHashing: Boolean, override val includeSynthToNameHashing: Boolean) extends AnalysisCallback +class TestCallback extends AnalysisCallback { - val sourceDependencies = new ArrayBuffer[(File, File, DependencyContext)] - val binaryDependencies = new ArrayBuffer[(File, String, File, DependencyContext)] - val products = new ArrayBuffer[(File, File, String)] - val usedNames = scala.collection.mutable.Map.empty[File, Set[String]].withDefaultValue(Set.empty) - val apis: scala.collection.mutable.Map[File, SourceAPI] = scala.collection.mutable.Map.empty - - def sourceDependency(dependsOn: File, source: File, inherited: Boolean): Unit = { - val context = if(inherited) DependencyByInheritance else DependencyByMemberRef - sourceDependency(dependsOn, source, context) - } - def sourceDependency(dependsOn: File, source: File, context: DependencyContext): Unit = { sourceDependencies += ((dependsOn, source, context)) } - def binaryDependency(binary: File, name: String, source: File, inherited: Boolean): Unit = { - val context = if(inherited) DependencyByInheritance else DependencyByMemberRef - binaryDependency(binary, name, source, context) - } - def binaryDependency(binary: File, name: String, source: File, context: DependencyContext): Unit = { binaryDependencies += ((binary, name, source, context)) } - def generatedClass(source: File, module: File, name: String): Unit = { products += ((source, module, name)) } - - def usedName(source: File, name: String): Unit = { usedNames(source) += name } - def api(source: File, sourceAPI: SourceAPI): Unit = { - assert(!apis.contains(source), s"The `api` method should be called once per source file: $source") - apis(source) = sourceAPI - } - def problem(category: String, pos: xsbti.Position, message: String, severity: xsbti.Severity, reported: Boolean): Unit = () + case class TestUsedName(name: String, scopes: EnumSet[UseScope]) + val classDependencies = new ArrayBuffer[(String, String, DependencyContext)] + val binaryDependencies = new ArrayBuffer[(File, String, String, File, DependencyContext)] + val products = new ArrayBuffer[(File, File)] + val usedNamesAndScopes = scala.collection.mutable.Map.empty[String, Set[TestUsedName]].withDefaultValue(Set.empty) + val classNames = scala.collection.mutable.Map.empty[File, Set[(String, String)]].withDefaultValue(Set.empty) + val apis: scala.collection.mutable.Map[File, Seq[ClassLike]] = scala.collection.mutable.Map.empty + + def usedNames = usedNamesAndScopes.mapValues(_.map(_.name)) + + override def startSource(source: File): Unit = { + assert(!apis.contains(source), s"startSource can be called only once per source file: $source") + apis(source) = Seq.empty + } + + override def binaryDependency(binary: File, name: String, fromClassName: String, source: File, context: DependencyContext): Unit = { + binaryDependencies += ((binary, name, fromClassName, source, context)) + } + + def generatedNonLocalClass(source: File, + module: File, + binaryClassName: String, + srcClassName: String): Unit = { + products += ((source, module)) + classNames(source) += ((srcClassName, binaryClassName)) + () + } + + def generatedLocalClass(source: File, module: File): Unit = { + products += ((source, module)) + () + } + + + override def classDependency(onClassName: String, sourceClassName: String, context: DependencyContext): Unit = { + if (onClassName != sourceClassName) classDependencies += ((onClassName, sourceClassName, context)) + } + + override def usedName(className: String, name: String, scopes: EnumSet[UseScope]): Unit = { + usedNamesAndScopes(className) += TestUsedName(name, scopes) + } + override def api(source: File, classApi: ClassLike): Unit = { + apis(source) = classApi +: apis(source) + } + override def problem(category: String, pos: xsbti.Position, message: String, severity: xsbti.Severity, reported: Boolean): Unit = () + override def dependencyPhaseCompleted(): Unit = () + override def apiPhaseCompleted(): Unit = () + override def enabled(): Boolean = true + def mainClass(source: File, className: String): Unit = () + +} + +object TestCallback { + case class ExtractedClassDependencies(memberRef: Map[String, Set[String]], + inheritance: Map[String, Set[String]], + localInheritance: Map[String, Set[String]]) + object ExtractedClassDependencies { + def fromPairs( + memberRefPairs: Seq[(String, String)], + inheritancePairs: Seq[(String, String)], + localInheritancePairs: Seq[(String, String)] + ): ExtractedClassDependencies = { + ExtractedClassDependencies(pairsToMultiMap(memberRefPairs), + pairsToMultiMap(inheritancePairs), + pairsToMultiMap(localInheritancePairs)) + } + + private def pairsToMultiMap[A, B](pairs: Seq[(A, B)]): Map[A, Set[B]] = { + import scala.collection.mutable.{ HashMap, MultiMap } + val emptyMultiMap = new HashMap[A, scala.collection.mutable.Set[B]] with MultiMap[A, B] + val multiMap = pairs.foldLeft(emptyMultiMap) { + case (acc, (key, value)) => + acc.addBinding(key, value) + } + // convert all collections to immutable variants + multiMap.toMap.mapValues(_.toSet).withDefaultValue(Set.empty) + } + } } + diff --git a/sbt-dotty/src/dotty/tools/sbtplugin/DottyIDEPlugin.scala b/sbt-dotty/src/dotty/tools/sbtplugin/DottyIDEPlugin.scala index 240e51479ce3..cfb913bb236f 100644 --- a/sbt-dotty/src/dotty/tools/sbtplugin/DottyIDEPlugin.scala +++ b/sbt-dotty/src/dotty/tools/sbtplugin/DottyIDEPlugin.scala @@ -55,7 +55,7 @@ object DottyIDEPlugin extends AutoPlugin { else { def matchingSetting(setting: Setting[_]) = setting.key.key == scalaVersion.key && - setting.key.scope.project.fold(ref => projRefs.contains(ref), ifGlobal = true, ifThis = true) + setting.key.scope.project.fold(ref => projRefs.contains(ref), ifZero = true, ifThis = true) val newSettings = extracted.session.mergeSettings.collect { case setting if matchingSetting(setting) => @@ -205,9 +205,9 @@ object DottyIDEPlugin extends AutoPlugin { origState } - private def projectConfigTask(config: Configuration): Initialize[Task[Option[ProjectConfig]]] = Def.task { - if ((sources in config).value.isEmpty) None - else { + private def projectConfigTask(config: Configuration): Initialize[Task[Option[ProjectConfig]]] = Def.taskDyn { + if ((sources in config).value.isEmpty) Def.task { None } + else Def.task { // Not needed to generate the config, but this guarantees that the // generated config is usable by an IDE without any extra compilation // step. diff --git a/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala b/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala index f413efb5520d..611677351d0d 100644 --- a/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala +++ b/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala @@ -51,38 +51,35 @@ object DottyPlugin extends AutoPlugin { nightly } - // implicit class DottyCompatModuleID(moduleID: ModuleID) { - // /** If this ModuleID cross-version is a Dotty version, replace it - // * by the Scala 2.x version that the Dotty version is retro-compatible with, - // * otherwise do nothing. - // * - // * This setting is useful when your build contains dependencies that have only - // * been published with Scala 2.x, if you have: - // * {{{ - // * libraryDependencies += "a" %% "b" % "c" - // * }}} - // * you can replace it by: - // * {{{ - // * libraryDependencies += ("a" %% "b" % "c").withDottyCompat() - // * }}} - // * This will have no effect when compiling with Scala 2.x, but when compiling - // * with Dotty this will change the cross-version to a Scala 2.x one. This - // * works because Dotty is currently retro-compatible with Scala 2.x. - // * - // * NOTE: Dotty's retro-compatibility with Scala 2.x will be dropped before - // * Dotty is released, you should not rely on it. - // */ - // def withDottyCompat(): ModuleID = - // moduleID.crossVersion match { - // case _: librarymanagement.Binary => - // moduleID.cross(CrossVersion.binaryMapped { - // case version if version.startsWith("0.") => "2.11" - // case version => version - // }) - // case _ => - // moduleID - // } - // } + implicit class DottyCompatModuleID(moduleID: ModuleID) { + /** If this ModuleID cross-version is a Dotty version, replace it + * by the Scala 2.x version that the Dotty version is retro-compatible with, + * otherwise do nothing. + * + * This setting is useful when your build contains dependencies that have only + * been published with Scala 2.x, if you have: + * {{{ + * libraryDependencies += "a" %% "b" % "c" + * }}} + * you can replace it by: + * {{{ + * libraryDependencies += ("a" %% "b" % "c").withDottyCompat(scalaVersion.value) + * }}} + * This will have no effect when compiling with Scala 2.x, but when compiling + * with Dotty this will change the cross-version to a Scala 2.x one. This + * works because Dotty is currently retro-compatible with Scala 2.x. + * + * NOTE: Dotty's retro-compatibility with Scala 2.x will be dropped before + * Dotty is released, you should not rely on it. + */ + def withDottyCompat(scalaVersion: String): ModuleID = + moduleID.crossVersion match { + case _: librarymanagement.Binary if scalaVersion.startsWith("0.") => + moduleID.cross(CrossVersion.constant("2.12")) + case _ => + moduleID + } + } } import autoImport._ @@ -90,7 +87,7 @@ object DottyPlugin extends AutoPlugin { override def requires: Plugins = plugins.JvmPlugin override def trigger = allRequirements - // Adapted from CrossVersionUtil#sbtApiVersion + // Adapted from CrossVersioconstant nUtil#sbtApiVersion private def sbtFullVersion(v: String): Option[(Int, Int, Int)] = { val ReleaseV = """(\d+)\.(\d+)\.(\d+)(-\d+)?""".r @@ -104,27 +101,6 @@ object DottyPlugin extends AutoPlugin { } } - // Copy-pasted from sbt where it's private - private case class WrappedClassFileManager(internal: ClassFileManager, - external: Option[ClassFileManager]) - extends ClassFileManager { - - override def delete(classes: Array[File]): Unit = { - external.foreach(_.delete(classes)) - internal.delete(classes) - } - - override def complete(success: Boolean): Unit = { - external.foreach(_.complete(success)) - internal.complete(success) - } - - override def generated(classes: Array[File]): Unit = { - external.foreach(_.generated(classes)) - internal.generated(classes) - } - } - /** Patches the IncOptions so that .tasty and .hasTasty files are pruned as needed. * * This code is adapted from `scalaJSPatchIncOptions` in Scala.js, which needs @@ -154,15 +130,14 @@ object DottyPlugin extends AutoPlugin { def complete(success: Boolean): Unit = {} } val inheritedHooks = incOptions.externalHooks - val hooks = new ExternalHooks { - override def externalClassFileManager() = Option(inheritedHooks.externalClassFileManager.orElse(null)) match { + val externalClassFileManager: Optional[ClassFileManager] = Option(inheritedHooks.getExternalClassFileManager.orElse(null)) match { case Some(prevManager) => - Optional.of(WrappedClassFileManager(prevManager, Some(tastyFileManager))) + Optional.of(WrappedClassFileManager.of(prevManager, Optional.of(tastyFileManager))) case None => Optional.of(tastyFileManager) } - override def externalLookup() = inheritedHooks.externalLookup() - } + + val hooks = new DefaultExternalHooks(inheritedHooks.getExternalLookup, externalClassFileManager) incOptions.withExternalHooks(hooks) } @@ -194,6 +169,15 @@ object DottyPlugin extends AutoPlugin { inc }, + scalaCompilerBridgeSource := { + val scalaBridge = scalaCompilerBridgeSource.value + val dottyBridge = (scalaOrganization.value % "dotty-sbt-bridge" % scalaVersion.value).withConfigurations(Some(Configurations.Compile.name)).sources() + if (isDotty.value) + dottyBridge + else + scalaBridge + }, + scalaBinaryVersion := { if (isDotty.value) scalaVersion.value.split("\\.").take(2).mkString(".") // Not needed with sbt >= 0.13.16 From aef68c286f949adeabb0825a2dc0b439cfd1d947 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Sat, 28 Oct 2017 19:02:45 +0200 Subject: [PATCH 14/49] Compile only the bridge, not the whole compiler --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index b2cf87ea902d..fef3ad48ed97 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1161,7 +1161,7 @@ object Build { settings(dottyDocSettings) def asDottySbtBridge(implicit mode: Mode): Project = project.withCommonSettings. - dependsOn(dottyCompiler). + dependsOn(dottyCompiler % Provided). settings(dottySbtBridgeSettings) def asDottyBench(implicit mode: Mode): Project = project.withCommonSettings. From 0f13174d61dc55e66ecafd9eed15f27b7d6a6856 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 12:05:46 +0200 Subject: [PATCH 15/49] Adapt to new main class detection in Zinc 1 --- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 35 ++++++++++++++++--- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index a8e8c2a93e83..08e4ee4c6323 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -2,17 +2,28 @@ package dotty.tools.dotc package sbt import ast.{Trees, tpd} -import core._, core.Decorators._ -import Annotations._, Contexts._, Flags._, Phases._, Trees._, Types._, Symbols._ -import Names._, NameOps._, StdNames._ +import core._ +import core.Decorators._ +import Annotations._ +import Contexts._ +import Flags._ +import Phases._ +import Trees._ +import Types._ +import Symbols._ +import Names._ +import NameOps._ +import StdNames._ import NameKinds.DefaultGetterName import typer.Inliner import typer.ErrorReporting.cyclicErrorMsg import transform.SymUtils._ - import dotty.tools.io.Path import java.io.PrintWriter +import dotty.tools.dotc.config.JavaPlatform +import xsbti.api.DefinitionType + import scala.collection.mutable /** This phase sends a representation of the API of classes to sbt via callbacks. @@ -51,6 +62,7 @@ class ExtractAPI extends Phase { val apiTraverser = new ExtractAPICollector val sources = apiTraverser.apiSource(unit.tpdTree) + val mainClasses = apiTraverser.mainClasses if (dumpInc) { // Append to existing file that should have been created by ExtractDependencies @@ -61,8 +73,10 @@ class ExtractAPI extends Phase { } finally pw.close() } - if (ctx.sbtCallback != null) + if (ctx.sbtCallback != null) { sources.foreach(ctx.sbtCallback.api(sourceFile.file, _)) + mainClasses.foreach(ctx.sbtCallback.mainClass(sourceFile.file, _)) + } } } } @@ -127,6 +141,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder private[this] val refinedTypeCache = new mutable.HashMap[(api.Type, api.Definition), api.Structure] private[this] val allNonLocalClassesInSrc = new mutable.HashSet[xsbti.api.ClassLike] + private[this] val _mainClasses = new mutable.HashSet[String] private[this] object Constants { val emptyStringArray = Array[String]() @@ -177,6 +192,11 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder def apiClass(sym: ClassSymbol): api.ClassLikeDef = classLikeCache.getOrElseUpdate(sym, computeClass(sym)) + def mainClasses: Set[String] = { + forceThunks() + _mainClasses.toSet + } + private def computeClass(sym: ClassSymbol): api.ClassLikeDef = { import xsbti.api.{DefinitionType => dt} val defType = @@ -220,6 +240,11 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder allNonLocalClassesInSrc += cl + val javaPlatform = ctx.platform.asInstanceOf[JavaPlatform] + if (sym.isStatic && defType == DefinitionType.Module && javaPlatform.hasJavaMainMethod(sym)) { + _mainClasses += name + } + api.ClassLikeDef.of(name, acc, modifiers, anns, tparams, defType) } From 323b4512c365e6db7cc5712773671184e2513857 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Mon, 23 Oct 2017 09:34:44 +0200 Subject: [PATCH 16/49] Re-enable `dist-*` projects --- build.sbt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/build.sbt b/build.sbt index b547156c2d76..9fbaee50a7f4 100644 --- a/build.sbt +++ b/build.sbt @@ -22,9 +22,9 @@ val `scala-library` = Build.`scala-library` val `scala-compiler` = Build.`scala-compiler` val `scala-reflect` = Build.`scala-reflect` val scalap = Build.scalap -// val dist = Build.dist -// val `dist-bootstrapped` = Build.`dist-bootstrapped` -// val `dist-optimised` = Build.`dist-optimised` +val dist = Build.dist +val `dist-bootstrapped` = Build.`dist-bootstrapped` +val `dist-optimised` = Build.`dist-optimised` val `sbt-dotty` = Build.`sbt-dotty` val `vscode-dotty` = Build.`vscode-dotty` From c05353aaefcae16f6c3559f0ae954dab469fa53a Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Mon, 23 Oct 2017 09:35:05 +0200 Subject: [PATCH 17/49] Fix `bin/dot*` scripts --- dist/bin/common | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dist/bin/common b/dist/bin/common index b5239ce28af0..660b25f220b3 100755 --- a/dist/bin/common +++ b/dist/bin/common @@ -118,7 +118,7 @@ DOTTY_LIB=$(find_lib "*dotty-library*") SCALA_ASM=$(find_lib "*scala-asm*") SCALA_LIB=$(find_lib "*scala-library*") SCALA_XML=$(find_lib "*scala-xml*") -SBT_INTF=$(find_lib "*sbt-interface*") +SBT_INTF=$(find_lib "*compiler-interface*") # debug DEBUG_STR=-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005 From 4c91ed29a5674e8d5d845e4256cab69d3c6d4e2e Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 11:27:26 +0200 Subject: [PATCH 18/49] Fix `source-dependencies/abstract-type-override` --- .../abstract-type-override/build.sbt | 14 ++++++++------ .../abstract-type-override/test | 2 +- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/build.sbt b/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/build.sbt index 92d2093771e4..6c2c11e0926a 100644 --- a/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/build.sbt @@ -1,7 +1,9 @@ -InputKey[Unit]("check-number-of-compiler-iterations") <<= inputTask { (argTask: TaskKey[Seq[String]]) => - (argTask, compile in Compile) map { (args: Seq[String], a: sbt.inc.Analysis) => - assert(args.size == 1) - val expectedIterationsNumber = args(0).toInt - assert(a.compilations.allCompilations.size == expectedIterationsNumber, "a.compilations.allCompilations.size = %d (expected %d)".format(a.compilations.allCompilations.size, expectedIterationsNumber)) - } +import complete.DefaultParsers._ + +InputKey[Unit]("check-number-of-compiler-iterations") := { + val args = spaceDelimited("").parsed + val a = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + assert(args.size == 1) + val expectedIterationsNumber = args(0).toInt + assert(a.compilations.allCompilations.size == expectedIterationsNumber, "a.compilations.allCompilations.size = %d (expected %d)".format(a.compilations.allCompilations.size, expectedIterationsNumber)) } diff --git a/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/test b/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/test index b0bec415eadb..9ffa4fb17ccd 100644 --- a/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/test +++ b/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/test @@ -11,4 +11,4 @@ $ copy-file changes/Bar1.scala src/main/scala/Bar.scala # second iteration #> compile # check if there are only two compile iterations performed -> check-number-of-compiler-iterations 2 +> checkNumberOfCompilerIterations 2 From 2cf0abe34fda81f43e45915456fc5d3b48c68150 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 11:27:49 +0200 Subject: [PATCH 19/49] Fix `source-dependencies/transitive-memberRef` --- .../transitive-memberRef/build.sbt | 40 +++++++++---------- .../transitive-memberRef/test | 2 +- 2 files changed, 19 insertions(+), 23 deletions(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/build.sbt b/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/build.sbt index d24e304b1bc1..5180981ce13c 100644 --- a/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/build.sbt @@ -1,40 +1,36 @@ logLevel := Level.Debug -incOptions := incOptions.value.withNameHashing(true) - -// disable sbt's heauristic which recompiles everything in case +// disable sbt's heuristic which recompiles everything in case // some fraction (e.g. 50%) of files is scheduled to be recompiled // in this test we want precise information about recompiled files // which that heuristic would distort -incOptions := incOptions.value.copy(recompileAllFraction = 1.0) +incOptions := incOptions.value.withRecompileAllFraction(1.0) /* Performs checks related to compilations: * a) checks in which compilation given set of files was recompiled * b) checks overall number of compilations performed */ TaskKey[Unit]("check-compilations") := { - val analysis = (compile in Compile).value + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] val srcDir = (scalaSource in Compile).value - def relative(f: java.io.File): java.io.File = f.relativeTo(srcDir) getOrElse f val allCompilations = analysis.compilations.allCompilations - val recompiledFiles: Seq[Set[java.io.File]] = allCompilations map { c => - val recompiledFiles = analysis.apis.internal.collect { - case (file, api) if api.compilation.startTime == c.startTime => relative(file) + val recompiledClasses: Seq[Set[String]] = allCompilations map { c => + val recompiledClasses = analysis.apis.internal.collect { + case (clazz, api) if api.compilationTimestamp() == c.getStartTime() => clazz } - recompiledFiles.toSet + recompiledClasses.toSet } - def recompiledFilesInIteration(iteration: Int, fileNames: Set[String]) = { - val files = fileNames.map(new java.io.File(_)) - assert(recompiledFiles(iteration) == files, "%s != %s".format(recompiledFiles(iteration), files)) + def recompiledClassesInIteration(iteration: Int, classNames: Set[String]): Unit = { + assert(recompiledClasses(iteration) == classNames, "%s != %s".format(recompiledClasses(iteration), classNames)) } - // Y.scala is compiled only at the beginning as changes to A.scala do not affect it - recompiledFilesInIteration(0, Set("X.scala", "Y.scala")) - // A.scala is changed and recompiled - recompiledFilesInIteration(1, Set("A.scala")) - // change in A.scala causes recompilation of B.scala, C.scala, D.scala which depend on transtiviely - // and by inheritance on A.scala - // X.scala is also recompiled because it depends by member reference on B.scala - // Note that Y.scala is not recompiled because it depends just on X through member reference dependency - recompiledFilesInIteration(2, Set("B.scala", "C.scala", "D.scala")) + // test.Y is compiled only at the beginning as changes to test.A do not affect it + recompiledClassesInIteration(0, Set("test.X", "test.Y")) + // test.A is changed and recompiled + recompiledClassesInIteration(1, Set("test.A")) + // change in test.A causes recompilation of test.B, test.C, test.D which depend on transitively + // and by inheritance on test.A + // test.X is also recompiled because it depends by member reference on test.B + // Note that test.Y is not recompiled because it depends just on X through member reference dependency + recompiledClassesInIteration(2, Set("test.B", "test.C", "test.D")) assert(allCompilations.size == 3) } diff --git a/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/test b/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/test index 395f90229b5c..a39fe13a99ed 100644 --- a/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/test +++ b/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/test @@ -8,4 +8,4 @@ $ copy-file changes/A1.scala src/main/scala/A.scala # second iteration > compile # check in which compile iteration given source file got recompiled -> check-compilations +> checkCompilations From f232fbe9723b1ab38a4220da6d95f252d6a22fac Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 12:11:36 +0200 Subject: [PATCH 20/49] Disable `source-dependencies/inherited-deps-java` That test depended on NameHashing being disabled, which is no longer supported with Zinc 1.0 --- .../source-dependencies/inherited-deps-java/{test => disabled} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/{test => disabled} (100%) diff --git a/sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/test b/sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/disabled similarity index 100% rename from sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/test rename to sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/disabled From 494beb96d99b51ae719ee056e8c2e2585750de18 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 12:25:33 +0200 Subject: [PATCH 21/49] Fix test `source-dependencies/compactify` --- .../sbt-test/source-dependencies/compactify/build.sbt | 7 ++++--- sbt-dotty/sbt-test/source-dependencies/compactify/test | 6 +++--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/compactify/build.sbt b/sbt-dotty/sbt-test/source-dependencies/compactify/build.sbt index f44ca08623f2..121f59cd756b 100644 --- a/sbt-dotty/sbt-test/source-dependencies/compactify/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/compactify/build.sbt @@ -1,6 +1,7 @@ -TaskKey[Unit]("output-empty") <<= classDirectory in Configurations.Compile map { outputDirectory => - def classes = (outputDirectory ** "*.class").get - if(!classes.isEmpty) sys.error("Classes existed:\n\t" + classes.mkString("\n\t")) else () +TaskKey[Unit]("output-empty") := { + val outputDirectory = (classDirectory in Compile).value + val classes = (outputDirectory ** "*.class").get + if (classes.nonEmpty) sys.error("Classes existed:\n\t" + classes.mkString("\n\t")) else () } // apparently Travis CI stopped allowing long file names diff --git a/sbt-dotty/sbt-test/source-dependencies/compactify/test b/sbt-dotty/sbt-test/source-dependencies/compactify/test index e2abf578b8c0..b56be3e5d4aa 100644 --- a/sbt-dotty/sbt-test/source-dependencies/compactify/test +++ b/sbt-dotty/sbt-test/source-dependencies/compactify/test @@ -1,8 +1,8 @@ # Marked pending due to https://github.com/sbt/sbt/issues/1553 -> output-empty +> outputEmpty > compile --> output-empty +-> outputEmpty $ delete src/main/scala/For.scala src/main/scala/Nested.scala > compile -> output-empty \ No newline at end of file +> outputEmpty \ No newline at end of file From b749648d94bae6df7fbbbe12bcaf5c1e186140b5 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 12:31:43 +0200 Subject: [PATCH 22/49] Mark test `source-dependencies/binary` pending It isn't passing in sbt/sbt either. See sbt/sbt#3691 --- .../sbt-test/source-dependencies/binary/build.sbt | 5 +++++ .../source-dependencies/binary/{test => pending} | 0 .../source-dependencies/binary/project/P.scala | 10 ---------- 3 files changed, 5 insertions(+), 10 deletions(-) create mode 100644 sbt-dotty/sbt-test/source-dependencies/binary/build.sbt rename sbt-dotty/sbt-test/source-dependencies/binary/{test => pending} (100%) diff --git a/sbt-dotty/sbt-test/source-dependencies/binary/build.sbt b/sbt-dotty/sbt-test/source-dependencies/binary/build.sbt new file mode 100644 index 000000000000..40d7ea353760 --- /dev/null +++ b/sbt-dotty/sbt-test/source-dependencies/binary/build.sbt @@ -0,0 +1,5 @@ +lazy val dep = project.in(file("dep")) +lazy val use = project.in(file("use")). + settings( + unmanagedJars in Compile := Attributed.blank(packageBin.in(dep, Compile).value) :: Nil + ) diff --git a/sbt-dotty/sbt-test/source-dependencies/binary/test b/sbt-dotty/sbt-test/source-dependencies/binary/pending similarity index 100% rename from sbt-dotty/sbt-test/source-dependencies/binary/test rename to sbt-dotty/sbt-test/source-dependencies/binary/pending diff --git a/sbt-dotty/sbt-test/source-dependencies/binary/project/P.scala b/sbt-dotty/sbt-test/source-dependencies/binary/project/P.scala index 9cabc95a4e82..e69de29bb2d1 100644 --- a/sbt-dotty/sbt-test/source-dependencies/binary/project/P.scala +++ b/sbt-dotty/sbt-test/source-dependencies/binary/project/P.scala @@ -1,10 +0,0 @@ -import sbt._ -import Keys._ - -object B extends Build -{ - lazy val dep = Project("dep", file("dep")) - lazy val use = Project("use", file("use")) settings( - unmanagedJars in Compile <+= packageBin in (dep, Compile) map Attributed.blank - ) -} From dd7841dcdf1db4303afca8634262c3de4609cf45 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 12:37:48 +0200 Subject: [PATCH 23/49] Fix test source-dependencies/export-jars --- .../sbt-test/source-dependencies/export-jars/build.sbt | 2 ++ .../source-dependencies/export-jars/changes/build2.sbt | 3 +++ .../source-dependencies/export-jars/project/Build.scala | 7 ------- 3 files changed, 5 insertions(+), 7 deletions(-) create mode 100644 sbt-dotty/sbt-test/source-dependencies/export-jars/build.sbt delete mode 100644 sbt-dotty/sbt-test/source-dependencies/export-jars/project/Build.scala diff --git a/sbt-dotty/sbt-test/source-dependencies/export-jars/build.sbt b/sbt-dotty/sbt-test/source-dependencies/export-jars/build.sbt new file mode 100644 index 000000000000..8b39c7cf680e --- /dev/null +++ b/sbt-dotty/sbt-test/source-dependencies/export-jars/build.sbt @@ -0,0 +1,2 @@ +lazy val root = project.in(file(".")).dependsOn(a) +lazy val a = project.in(file("a")) diff --git a/sbt-dotty/sbt-test/source-dependencies/export-jars/changes/build2.sbt b/sbt-dotty/sbt-test/source-dependencies/export-jars/changes/build2.sbt index 0f5735bc81d0..3fa7cbb370fb 100644 --- a/sbt-dotty/sbt-test/source-dependencies/export-jars/changes/build2.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/export-jars/changes/build2.sbt @@ -1 +1,4 @@ +lazy val root = Project("root", file(".")) dependsOn(a) +lazy val a = Project("a", file("a")) + exportJars := true \ No newline at end of file diff --git a/sbt-dotty/sbt-test/source-dependencies/export-jars/project/Build.scala b/sbt-dotty/sbt-test/source-dependencies/export-jars/project/Build.scala deleted file mode 100644 index 4a783acbe158..000000000000 --- a/sbt-dotty/sbt-test/source-dependencies/export-jars/project/Build.scala +++ /dev/null @@ -1,7 +0,0 @@ -import sbt._ - -object Build extends Build -{ - lazy val root = Project("root", file(".")) dependsOn(a) - lazy val a = Project("a", file("a")) -} \ No newline at end of file From 98c9e8da83a54ce47089c0ad2c6489f88f72631b Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Tue, 7 Nov 2017 11:26:56 +0100 Subject: [PATCH 24/49] Update test `source-dependencies/import-class` It has been changed in sbt/zinc. --- sbt-dotty/sbt-test/source-dependencies/import-class/B.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sbt-dotty/sbt-test/source-dependencies/import-class/B.scala b/sbt-dotty/sbt-test/source-dependencies/import-class/B.scala index 0489f4a26c29..eb81ff6bd6e7 100644 --- a/sbt-dotty/sbt-test/source-dependencies/import-class/B.scala +++ b/sbt-dotty/sbt-test/source-dependencies/import-class/B.scala @@ -1 +1,3 @@ import a.A + +class B From 479c6c2f39470959637b869a8076552cfa77aecb Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 13:24:47 +0200 Subject: [PATCH 25/49] Fix test source-dependencies/trait-member-modified --- .../trait-member-modified/build.sbt | 21 ++++++++----------- .../trait-member-modified/test | 2 +- 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/build.sbt b/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/build.sbt index 949d782317c0..1ebe8fad1a41 100644 --- a/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/build.sbt @@ -3,25 +3,22 @@ * b) checks overall number of compilations performed */ TaskKey[Unit]("check-compilations") := { - val analysis = (compile in Compile).value - val srcDir = (scalaSource in Compile).value - def relative(f: java.io.File): java.io.File = f.relativeTo(srcDir) getOrElse f + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] val allCompilations = analysis.compilations.allCompilations - val recompiledFiles: Seq[Set[java.io.File]] = allCompilations map { c => - val recompiledFiles = analysis.apis.internal.collect { - case (file, api) if api.compilation.startTime == c.startTime => relative(file) + val recompiledClasses: Seq[Set[String]] = allCompilations map { c => + val recompiledClasses = analysis.apis.internal.collect { + case (clazz, api) if api.compilationTimestamp() == c.getStartTime() => clazz } - recompiledFiles.toSet + recompiledClasses.toSet } - def recompiledFilesInIteration(iteration: Int, fileNames: Set[String]) = { - val files = fileNames.map(new java.io.File(_)) - assert(recompiledFiles(iteration) == files, "%s != %s".format(recompiledFiles(iteration), files)) + def recompiledFilesInIteration(iteration: Int, classNames: Set[String]): Unit = { + assert(recompiledClasses(iteration) == classNames, "%s != %s".format(recompiledClasses(iteration), classNames)) } assert(allCompilations.size == 2) // B.scala is just compiled at the beginning - recompiledFilesInIteration(0, Set("B.scala")) + recompiledFilesInIteration(0, Set("B")) // A.scala is changed and recompiled - recompiledFilesInIteration(1, Set("A.scala")) + recompiledFilesInIteration(1, Set("A")) } logLevel := Level.Debug diff --git a/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/test b/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/test index f8f7cb076b2b..183e1d40e805 100644 --- a/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/test +++ b/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/test @@ -6,4 +6,4 @@ $ copy-file changes/A1.scala src/main/scala/A.scala # only A.scala should be recompiled > compile # check if there are only two compile iterations performed -> check-compilations +> checkCompilations From 240e133f7390ee7565ea03bfc692ba630a182581 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 13:29:31 +0200 Subject: [PATCH 26/49] Fix test source-dependencies/java-analyis-serialization-error --- .../java-analysis-serialization-error/build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/java-analysis-serialization-error/build.sbt b/sbt-dotty/sbt-test/source-dependencies/java-analysis-serialization-error/build.sbt index 1b1ddefb98ce..f78f33916fce 100644 --- a/sbt-dotty/sbt-test/source-dependencies/java-analysis-serialization-error/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/java-analysis-serialization-error/build.sbt @@ -1 +1 @@ -incOptions := incOptions.value.withNameHashing(true).withApiDebug(true) +incOptions := incOptions.value.withApiDebug(true) From 08e09b8800857bd0eb3af67ce2a4d534dd4eb63f Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 13:38:59 +0200 Subject: [PATCH 27/49] Fix test source-dependencies/canon --- sbt-dotty/sbt-test/source-dependencies/canon/build.sbt | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/canon/build.sbt b/sbt-dotty/sbt-test/source-dependencies/canon/build.sbt index d23dff7054d2..d7524d433978 100644 --- a/sbt-dotty/sbt-test/source-dependencies/canon/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/canon/build.sbt @@ -3,8 +3,10 @@ import complete.DefaultParsers._ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iterations of incremental compilation.") checkIterations := { - val expected: Int = (Space ~> NatBasic).parsed - val actual: Int = (compile in Compile).value.compilations.allCompilations.size - assert(expected == actual, s"Expected $expected compilations, got $actual") + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = analysis.compilations.allCompilations.size + assert(expected == actual, s"Expected $expected compilations, got $actual") } From 939dd314678130b431d8a5d4fc76a841a0b27800 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 13:44:40 +0200 Subject: [PATCH 28/49] Fix test source-dependencies/restore-classes --- .../source-dependencies/restore-classes/build.sbt | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/restore-classes/build.sbt b/sbt-dotty/sbt-test/source-dependencies/restore-classes/build.sbt index 2231204ea3bf..cf38564cc570 100644 --- a/sbt-dotty/sbt-test/source-dependencies/restore-classes/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/restore-classes/build.sbt @@ -5,8 +5,10 @@ crossTarget in Compile := target.value val checkIterations = inputKey[Unit]("Verifies the accumlated number of iterations of incremental compilation.") checkIterations := { - val expected: Int = (Space ~> NatBasic).parsed - val actual: Int = (compile in Compile).value.compilations.allCompilations.size - assert(expected == actual, s"Expected $expected compilations, got $actual") + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = analysis.compilations.allCompilations.size + assert(expected == actual, s"Expected $expected compilations, got $actual") } From 4ee6fd528c2472cc7b404dfbadbca8c02409c9e8 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 13:50:57 +0200 Subject: [PATCH 29/49] Fix test source-dependencies/type-alias --- sbt-dotty/sbt-test/source-dependencies/type-alias/build.sbt | 2 -- 1 file changed, 2 deletions(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/type-alias/build.sbt b/sbt-dotty/sbt-test/source-dependencies/type-alias/build.sbt index c5a1099aacad..00edfde1d056 100644 --- a/sbt-dotty/sbt-test/source-dependencies/type-alias/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/type-alias/build.sbt @@ -1,3 +1 @@ logLevel in compile := Level.Debug - -incOptions := incOptions.value.withNameHashing(true) From 159a19e10a7fe4a547a56617a10b755e35fa203a Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 13:57:40 +0200 Subject: [PATCH 30/49] Fix test source-dependencies/typeref-only --- sbt-dotty/sbt-test/source-dependencies/typeref-only/build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/typeref-only/build.sbt b/sbt-dotty/sbt-test/source-dependencies/typeref-only/build.sbt index 02813797f231..ea80ab8d990d 100644 --- a/sbt-dotty/sbt-test/source-dependencies/typeref-only/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/typeref-only/build.sbt @@ -2,4 +2,4 @@ logLevel := Level.Debug // disable recompile all which causes full recompile which // makes it more difficult to test dependency tracking -incOptions ~= { _.copy(recompileAllFraction = 1.0) } +incOptions := incOptions.value.withRecompileAllFraction(1.0) From 149c8b1b2277a2593aa72f8a13b61f90073698b5 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 14:01:45 +0200 Subject: [PATCH 31/49] Fix test source-dependencies/less-inter-inv-java --- .../source-dependencies/less-inter-inv-java/build.sbt | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/less-inter-inv-java/build.sbt b/sbt-dotty/sbt-test/source-dependencies/less-inter-inv-java/build.sbt index d23dff7054d2..d7524d433978 100644 --- a/sbt-dotty/sbt-test/source-dependencies/less-inter-inv-java/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/less-inter-inv-java/build.sbt @@ -3,8 +3,10 @@ import complete.DefaultParsers._ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iterations of incremental compilation.") checkIterations := { - val expected: Int = (Space ~> NatBasic).parsed - val actual: Int = (compile in Compile).value.compilations.allCompilations.size - assert(expected == actual, s"Expected $expected compilations, got $actual") + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = analysis.compilations.allCompilations.size + assert(expected == actual, s"Expected $expected compilations, got $actual") } From 389cdcc99686baa3c7c8287b833081474bd8f4ca Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 14:10:50 +0200 Subject: [PATCH 32/49] Fix test source-dependencies/backtick-qupted-names --- .../sbt-test/source-dependencies/backtick-quoted-names/build.sbt | 1 - 1 file changed, 1 deletion(-) delete mode 100644 sbt-dotty/sbt-test/source-dependencies/backtick-quoted-names/build.sbt diff --git a/sbt-dotty/sbt-test/source-dependencies/backtick-quoted-names/build.sbt b/sbt-dotty/sbt-test/source-dependencies/backtick-quoted-names/build.sbt deleted file mode 100644 index 8a38ef41424b..000000000000 --- a/sbt-dotty/sbt-test/source-dependencies/backtick-quoted-names/build.sbt +++ /dev/null @@ -1 +0,0 @@ -incOptions := incOptions.value.withNameHashing(true) From ca7774385dded4462942e462c580e468ce6a094c Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 14:35:04 +0200 Subject: [PATCH 33/49] Fix test source-dependencies/replace-test-a --- .../replace-test-a/build.sbt | 14 ++++++++++++++ .../replace-test-a/project/Build.scala | 19 ------------------- .../source-dependencies/replace-test-a/test | 8 ++++---- 3 files changed, 18 insertions(+), 23 deletions(-) create mode 100644 sbt-dotty/sbt-test/source-dependencies/replace-test-a/build.sbt delete mode 100644 sbt-dotty/sbt-test/source-dependencies/replace-test-a/project/Build.scala diff --git a/sbt-dotty/sbt-test/source-dependencies/replace-test-a/build.sbt b/sbt-dotty/sbt-test/source-dependencies/replace-test-a/build.sbt new file mode 100644 index 000000000000..c63b7bc8a91a --- /dev/null +++ b/sbt-dotty/sbt-test/source-dependencies/replace-test-a/build.sbt @@ -0,0 +1,14 @@ +import java.net.URLClassLoader + +lazy val root = project.in(file(".")). + settings( + TaskKey[Unit]("check-first") := checkTask("First").value, + TaskKey[Unit]("check-second") := checkTask("Second").value + ) + +def checkTask(className: String) = Def.task { + val runClasspath = (fullClasspath in Runtime).value + val cp = runClasspath.map(_.data.toURI.toURL).toArray + Class.forName(className, false, new URLClassLoader(cp)) + () +} diff --git a/sbt-dotty/sbt-test/source-dependencies/replace-test-a/project/Build.scala b/sbt-dotty/sbt-test/source-dependencies/replace-test-a/project/Build.scala deleted file mode 100644 index 9c2678540442..000000000000 --- a/sbt-dotty/sbt-test/source-dependencies/replace-test-a/project/Build.scala +++ /dev/null @@ -1,19 +0,0 @@ -import sbt._ -import Keys._ -import java.net.URLClassLoader - -object B extends Build -{ - lazy val root = Project("root", file(".")) settings( ss : _*) - - def ss = Seq( - TaskKey[Unit]("check-first") <<= checkTask("First"), - TaskKey[Unit]("check-second") <<= checkTask("Second") - ) - private def checkTask(className: String) = - fullClasspath in Configurations.Runtime map { runClasspath => - val cp = runClasspath.map(_.data.toURI.toURL).toArray - Class.forName(className, false, new URLClassLoader(cp)) - () - } -} diff --git a/sbt-dotty/sbt-test/source-dependencies/replace-test-a/test b/sbt-dotty/sbt-test/source-dependencies/replace-test-a/test index 4b4ad3a2b953..21dec1db9924 100644 --- a/sbt-dotty/sbt-test/source-dependencies/replace-test-a/test +++ b/sbt-dotty/sbt-test/source-dependencies/replace-test-a/test @@ -1,9 +1,9 @@ $ copy-file changes/first.scala src/main/scala/A.scala > compile -> check-first --> check-second +> checkFirst +-> checkSecond $ copy-file changes/second.scala src/main/scala/A.scala > compile --> check-first -> check-second \ No newline at end of file +-> checkFirst +> checkSecond \ No newline at end of file From e4f092dddf421b96d78dee1699a4cfe3607fc1ef Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 14:43:08 +0200 Subject: [PATCH 34/49] Fix test source-dependencies/inherited_type_params --- .../inherited_type_params/build.sbt | 11 ++++++----- .../source-dependencies/inherited_type_params/test | 2 +- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/build.sbt b/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/build.sbt index a5982f901fd7..e25f1beeaaba 100644 --- a/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/build.sbt @@ -1,7 +1,8 @@ name := "test" -TaskKey[Unit]("check-same") <<= compile in Configurations.Compile map { analysis => - analysis.apis.internal foreach { case (_, api) => - assert( xsbt.api.SameAPI(api.api, api.api) ) - } -} \ No newline at end of file +TaskKey[Unit]("check-same") := { + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + analysis.apis.internal.foreach { case (_, api) => + assert(xsbt.api.SameAPI(api.api, api.api)) + } +} diff --git a/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/test b/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/test index 8434347c5a23..353461049b5f 100644 --- a/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/test +++ b/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/test @@ -1 +1 @@ -> check-same \ No newline at end of file +> checkSame \ No newline at end of file From ce237c749571ef14d013db4a98d2fa6472619185 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 14:45:32 +0200 Subject: [PATCH 35/49] Fix test source-dependencies/same-file-used-names --- .../sbt-test/source-dependencies/same-file-used-names/build.sbt | 1 - 1 file changed, 1 deletion(-) delete mode 100644 sbt-dotty/sbt-test/source-dependencies/same-file-used-names/build.sbt diff --git a/sbt-dotty/sbt-test/source-dependencies/same-file-used-names/build.sbt b/sbt-dotty/sbt-test/source-dependencies/same-file-used-names/build.sbt deleted file mode 100644 index 8a38ef41424b..000000000000 --- a/sbt-dotty/sbt-test/source-dependencies/same-file-used-names/build.sbt +++ /dev/null @@ -1 +0,0 @@ -incOptions := incOptions.value.withNameHashing(true) From b46a4b3652d5e8f5d7b962292442c6048e16fdc3 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 14:48:27 +0200 Subject: [PATCH 36/49] Fix test source-dependencies/ext --- sbt-dotty/sbt-test/source-dependencies/ext/build.sbt | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/ext/build.sbt b/sbt-dotty/sbt-test/source-dependencies/ext/build.sbt index 8aaec76ecfc2..bbc053fa6385 100644 --- a/sbt-dotty/sbt-test/source-dependencies/ext/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/ext/build.sbt @@ -3,7 +3,9 @@ import complete.DefaultParsers._ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iterations of incremental compilation.") checkIterations := { - val expected: Int = (Space ~> NatBasic).parsed - val actual: Int = (compile in Compile).value.compilations.allCompilations.size - assert(expected == actual, s"Expected $expected compilations, got $actual") + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = analysis.compilations.allCompilations.size + assert(expected == actual, s"Expected $expected compilations, got $actual") } \ No newline at end of file From 9281d1ca3fb590874403e42a82316bc2010fee48 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 14:53:23 +0200 Subject: [PATCH 37/49] Fix test source-dependencies/less-inter-inv --- .../sbt-test/source-dependencies/less-inter-inv/build.sbt | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sbt-dotty/sbt-test/source-dependencies/less-inter-inv/build.sbt b/sbt-dotty/sbt-test/source-dependencies/less-inter-inv/build.sbt index d23dff7054d2..d7524d433978 100644 --- a/sbt-dotty/sbt-test/source-dependencies/less-inter-inv/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/less-inter-inv/build.sbt @@ -3,8 +3,10 @@ import complete.DefaultParsers._ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iterations of incremental compilation.") checkIterations := { - val expected: Int = (Space ~> NatBasic).parsed - val actual: Int = (compile in Compile).value.compilations.allCompilations.size - assert(expected == actual, s"Expected $expected compilations, got $actual") + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = analysis.compilations.allCompilations.size + assert(expected == actual, s"Expected $expected compilations, got $actual") } From ff34abb401a276278faa778c4daa1a170d343141 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 14:57:55 +0200 Subject: [PATCH 38/49] Fix test sbt-dotty/example-project --- sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt | 3 ++- .../sbt-dotty/example-project/project/build.properties | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 sbt-dotty/sbt-test/sbt-dotty/example-project/project/build.properties diff --git a/sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt b/sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt index 07fd238624a3..6e0adfaf4d08 100644 --- a/sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt +++ b/sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt @@ -1,3 +1,4 @@ scalaVersion := sys.props("plugin.scalaVersion") -libraryDependencies += ("org.scala-lang.modules" %% "scala-xml" % "1.0.6").withDottyCompat() +libraryDependencies += +("org.scala-lang.modules" %% "scala-xml" % "1.0.6").withDottyCompat(scalaVersion.value) diff --git a/sbt-dotty/sbt-test/sbt-dotty/example-project/project/build.properties b/sbt-dotty/sbt-test/sbt-dotty/example-project/project/build.properties deleted file mode 100644 index 64317fdae59f..000000000000 --- a/sbt-dotty/sbt-test/sbt-dotty/example-project/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=0.13.15 From 805ecebaa413d43ff5a6fc2210f36732aaf3c972 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 15:10:09 +0200 Subject: [PATCH 39/49] Fix test compilerReporter/simple --- .../simple/project/Reporter.scala | 29 +++++++------------ 1 file changed, 10 insertions(+), 19 deletions(-) diff --git a/sbt-dotty/sbt-test/compilerReporter/simple/project/Reporter.scala b/sbt-dotty/sbt-test/compilerReporter/simple/project/Reporter.scala index d2dae1fc202a..1d72caa02cab 100644 --- a/sbt-dotty/sbt-test/compilerReporter/simple/project/Reporter.scala +++ b/sbt-dotty/sbt-test/compilerReporter/simple/project/Reporter.scala @@ -3,38 +3,29 @@ import Keys._ import KeyRanks.DTask object Reporter { - import xsbti.{Reporter, Problem, Position, Severity, Maybe} + import xsbti.{Reporter, Problem, Position, Severity} lazy val check = TaskKey[Unit]("check", "make sure compilation info are forwared to sbt") // compilerReporter is marked private in sbt - lazy val compilerReporter = TaskKey[Option[xsbti.Reporter]]("compilerReporter", "Experimental hook to listen (or send) compilation failure messages.", DTask) - - lazy val reporter = - Some(new xsbti.Reporter { + lazy val compilerReporter = TaskKey[xsbti.Reporter]("compilerReporter", "Experimental hook to listen (or send) compilation failure messages.", DTask) + + lazy val reporter = + new xsbti.Reporter { private val buffer = collection.mutable.ArrayBuffer.empty[Problem] def reset(): Unit = buffer.clear() def hasErrors: Boolean = buffer.exists(_.severity == Severity.Error) def hasWarnings: Boolean = buffer.exists(_.severity == Severity.Warn) def printSummary(): Unit = println(problems.mkString(System.lineSeparator)) def problems: Array[Problem] = buffer.toArray - def log(pos: Position, msg: String, sev: Severity): Unit = { - object MyProblem extends Problem { - def category: String = null - def severity: Severity = sev - def message: String = msg - def position: Position = pos - override def toString = s"custom: $position:$severity: $message" - } - buffer.append(MyProblem) - } + def log(problem: Problem): Unit = buffer.append(problem) def comment(pos: xsbti.Position, msg: String): Unit = () - }) + } lazy val checkSettings = Seq( compilerReporter in (Compile, compile) := reporter, - check <<= (compile in Compile).mapFailure( _ => { - val problems = reporter.get.problems + check := (compile in Compile).failure.map(_ => { + val problems = reporter.problems println(problems.toList) assert(problems.size == 1) @@ -43,6 +34,6 @@ object Reporter { // assert(problems.forall(_.position.offset.isDefined)) assert(problems.count(_.severity == Severity.Error) == 1) // not found: er1, - }) + }).value ) } From 4475e9e3f158b95ccb3a4da04e5d2d7d29d9b88e Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Mon, 23 Oct 2017 07:03:14 +0200 Subject: [PATCH 40/49] Fix test source-dependencies/value-class-underlying --- .../value-class-underlying/C.scala | 1 + .../project/DottyInjectedPlugin.scala | 12 ++++++++++++ .../value-class-underlying/project/plugins.sbt | 1 + 3 files changed, 14 insertions(+) create mode 100644 sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/DottyInjectedPlugin.scala create mode 100644 sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/plugins.sbt diff --git a/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/C.scala b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/C.scala index 1a9a42bde96f..0c09ea764fe5 100644 --- a/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/C.scala +++ b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/C.scala @@ -1,5 +1,6 @@ object C { def main(args: Array[String]): Unit = { val x = B.foo + println("x: " + x) // Need to use x in an expression to see if it crashes or not } } diff --git a/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/DottyInjectedPlugin.scala b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..ce3d46d79921 --- /dev/null +++ b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-language:Scala2" + ) +} diff --git a/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/plugins.sbt b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/plugins.sbt new file mode 100644 index 000000000000..c17caab2d98c --- /dev/null +++ b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % sys.props("plugin.version")) From 5b5b260e3cf0ea51849f6bcef5a40e87fa14c6bb Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Fri, 20 Oct 2017 15:30:44 +0200 Subject: [PATCH 41/49] Disable test source-dependencies/trait-private-var Reverted while implementing class-based NameHashing. See: - https://github.com/sbt/zinc/commit/189a2a52042535dcb830cece29f26ad2a99df759 - https://github.com/sbt/zinc/pull/86 --- .../source-dependencies/trait-private-var/{test => pending} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename sbt-dotty/sbt-test/source-dependencies/trait-private-var/{test => pending} (100%) diff --git a/sbt-dotty/sbt-test/source-dependencies/trait-private-var/test b/sbt-dotty/sbt-test/source-dependencies/trait-private-var/pending similarity index 100% rename from sbt-dotty/sbt-test/source-dependencies/trait-private-var/test rename to sbt-dotty/sbt-test/source-dependencies/trait-private-var/pending From df2ac081c77209432a98190ee7457e3af79cb0f1 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Mon, 6 Nov 2017 21:32:24 +0100 Subject: [PATCH 42/49] Always run scripted tests --- .drone.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.drone.yml b/.drone.yml index eb340cae3d24..8835e0f9a5ce 100644 --- a/.drone.yml +++ b/.drone.yml @@ -47,9 +47,9 @@ pipeline: commands: - cp -R . /tmp/4/ && cd /tmp/4/ - ./project/scripts/sbt sbt-dotty/scripted - when: + # when: # sbt scripted tests are slow and don't run on PRs - event: [ push, tag, deployment ] + # event: [ push, tag, deployment ] # DOCUMENTATION: documentation: From d818a2ce61029c07c0b5746fa6e85bc6990961aa Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Tue, 7 Nov 2017 09:06:43 +0100 Subject: [PATCH 43/49] Split scripted tests in two groups --- .drone.yml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/.drone.yml b/.drone.yml index 8835e0f9a5ce..384bc45cb0c9 100644 --- a/.drone.yml +++ b/.drone.yml @@ -41,12 +41,22 @@ pipeline: - cp -R . /tmp/3/ && cd /tmp/3/ - ./project/scripts/sbt dotty-optimised/testAll - test_sbt: + test_sbt_1of2: group: test image: lampepfl/dotty:2017-10-20 commands: - cp -R . /tmp/4/ && cd /tmp/4/ - - ./project/scripts/sbt sbt-dotty/scripted + - ./project/scripts/sbt "sbt-dotty/scripted compilerReporter/* discovery/* sbt-dotty/* source-dependencies/*1of2" + # when: + # sbt scripted tests are slow and don't run on PRs + # event: [ push, tag, deployment ] + + test_sbt_2of2: + group: test + image: lampepfl/dotty:2017-10-20 + commands: + - cp -R . /tmp/5/ && cd /tmp/5/ + - ./project/scripts/sbt "sbt-dotty/scripted source-dependencies/*2of2" # when: # sbt scripted tests are slow and don't run on PRs # event: [ push, tag, deployment ] From 2075f13a2219b0ac4c8539a5fbdd75fa5a1e1131 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Tue, 7 Nov 2017 14:47:20 +0100 Subject: [PATCH 44/49] Remove `Message` and `Log` Their goal was to offer a simpler alternative to sbt's `F0`. Since `F0` got replaced by `Supplier`, they are no longer required. --- sbt-bridge/src/xsbt/CompilerInterface.scala | 3 +-- sbt-bridge/src/xsbt/Log.scala | 10 ---------- sbt-bridge/src/xsbt/Message.scala | 10 ---------- 3 files changed, 1 insertion(+), 22 deletions(-) delete mode 100644 sbt-bridge/src/xsbt/Log.scala delete mode 100644 sbt-bridge/src/xsbt/Message.scala diff --git a/sbt-bridge/src/xsbt/CompilerInterface.scala b/sbt-bridge/src/xsbt/CompilerInterface.scala index 631524e17ebb..ac828b996957 100644 --- a/sbt-bridge/src/xsbt/CompilerInterface.scala +++ b/sbt-bridge/src/xsbt/CompilerInterface.scala @@ -5,7 +5,6 @@ package xsbt import xsbti.{ AnalysisCallback, Logger, Reporter, Severity } import xsbti.compile._ -import Log.debug import java.io.File import dotty.tools.dotc.core.Contexts.ContextBase @@ -52,7 +51,7 @@ class CachedCompilerImpl(args: Array[String], output: Output) extends CachedComp run(sources.toList, changes, callback, log, delegate, progress) } private[this] def run(sources: List[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, compileProgress: CompileProgress): Unit = { - debug(log, args.mkString("Calling Dotty compiler with arguments (CompilerInterface):\n\t", "\n\t", "")) + log.debug(() => args.mkString("Calling Dotty compiler with arguments (CompilerInterface):\n\t", "\n\t", "")) val ctx = (new ContextBase).initialCtx.fresh .setSbtCallback(callback) .setReporter(new DelegatingReporter(delegate)) diff --git a/sbt-bridge/src/xsbt/Log.scala b/sbt-bridge/src/xsbt/Log.scala deleted file mode 100644 index e514d7abbc46..000000000000 --- a/sbt-bridge/src/xsbt/Log.scala +++ /dev/null @@ -1,10 +0,0 @@ -/* sbt -- Simple Build Tool - * Copyright 2008, 2009 Mark Harrah - */ -package xsbt - -object Log { - def debug(log: xsbti.Logger, msg: => String) = log.debug(Message(msg)) - def settingsError(log: xsbti.Logger): String => Unit = - s => log.error(Message(s)) -} diff --git a/sbt-bridge/src/xsbt/Message.scala b/sbt-bridge/src/xsbt/Message.scala deleted file mode 100644 index ef45577a7739..000000000000 --- a/sbt-bridge/src/xsbt/Message.scala +++ /dev/null @@ -1,10 +0,0 @@ -/* sbt -- Simple Build Tool - * Copyright 2008, 2009 Mark Harrah - */ -package xsbt - -import java.util.function.Supplier - -object Message { - def apply[T](s: => T) = new Supplier[T] { def get() = s } -} From 39ac06b6c43d42d20a987736d8f554b3cae387ce Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Tue, 7 Nov 2017 15:02:21 +0100 Subject: [PATCH 45/49] Upgrade to sbt 1.0.3 --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index b7dd3cb2ae83..9abea1294a1f 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.0.2 +sbt.version=1.0.3 From f35894c5d7d000867d709b4994bb0f309aa6a4fa Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Tue, 7 Nov 2017 15:28:01 +0100 Subject: [PATCH 46/49] Change name of scala-library jar in tests For many tests, we need the `scala-library` jar on the classpath, and we used to search through the classpath for a JAR whose name contained `scala-library-2.`. This works great if the `scala-library` on the classpath comes from Ivy. However, if we use the same version of Scala as sbt, the `scala-library` will not be taken ouf of the Ivy cache but from sbt's boot directory. In this case, the JAR will simply be named `scala-library.jar`. --- compiler/test/dotty/Jars.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/test/dotty/Jars.scala b/compiler/test/dotty/Jars.scala index dd06dc2a6fee..588cbd7167da 100644 --- a/compiler/test/dotty/Jars.scala +++ b/compiler/test/dotty/Jars.scala @@ -26,7 +26,7 @@ object Jars { dottyLib :: dottyCompiler :: dottyInterfaces :: dottyExtras def scalaLibrary: String = sys.env.get("DOTTY_SCALA_LIBRARY") - .getOrElse(findJarFromRuntime("scala-library-2.")) + .getOrElse(findJarFromRuntime("scala-library")) /** Gets the scala 2.* library at runtime, note that doing this is unsafe * unless you know that the library will be on the classpath of the running From ef10d7cc59efc7a02d55b346e74caed8dc7f65b7 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Wed, 8 Nov 2017 10:18:33 +0100 Subject: [PATCH 47/49] Address review comments --- project/Build.scala | 4 +- .../dotty/tools/sbtplugin/DottyPlugin.scala | 60 +++++++++---------- 2 files changed, 32 insertions(+), 32 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index fef3ad48ed97..33803f5246c7 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -160,7 +160,7 @@ object Build { // Avoid having to run `dotty-sbt-bridge/publishLocal` before compiling a bootstrapped project scalaCompilerBridgeSource := - (dottyOrganization %% "dotty-sbt-bridge" % "NOT_PUBLISHED") + (dottyOrganization %% "dotty-sbt-bridge" % dottyVersion) .artifacts(Artifact.sources("dotty-sbt-bridge").withUrl( // We cannot use the `packageSrc` task because a setting cannot depend // on a task. Instead, we make `compile` below depend on the bridge `packageSrc` @@ -199,7 +199,7 @@ object Build { Seq( dottyOrganization % "dotty-library_2.11" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name, dottyOrganization % "dotty-compiler_2.11" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name - )//.map(_.withDottyCompat(scalaVersion.value)) + ).map(_.withDottyCompat(scalaVersion.value)) else Seq() }, diff --git a/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala b/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala index 611677351d0d..201273b9026d 100644 --- a/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala +++ b/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala @@ -51,35 +51,35 @@ object DottyPlugin extends AutoPlugin { nightly } - implicit class DottyCompatModuleID(moduleID: ModuleID) { - /** If this ModuleID cross-version is a Dotty version, replace it - * by the Scala 2.x version that the Dotty version is retro-compatible with, - * otherwise do nothing. - * - * This setting is useful when your build contains dependencies that have only - * been published with Scala 2.x, if you have: - * {{{ - * libraryDependencies += "a" %% "b" % "c" - * }}} - * you can replace it by: - * {{{ - * libraryDependencies += ("a" %% "b" % "c").withDottyCompat(scalaVersion.value) - * }}} - * This will have no effect when compiling with Scala 2.x, but when compiling - * with Dotty this will change the cross-version to a Scala 2.x one. This - * works because Dotty is currently retro-compatible with Scala 2.x. - * - * NOTE: Dotty's retro-compatibility with Scala 2.x will be dropped before - * Dotty is released, you should not rely on it. - */ - def withDottyCompat(scalaVersion: String): ModuleID = - moduleID.crossVersion match { - case _: librarymanagement.Binary if scalaVersion.startsWith("0.") => - moduleID.cross(CrossVersion.constant("2.12")) - case _ => - moduleID - } - } + implicit class DottyCompatModuleID(moduleID: ModuleID) { + /** If this ModuleID cross-version is a Dotty version, replace it + * by the Scala 2.x version that the Dotty version is retro-compatible with, + * otherwise do nothing. + * + * This setting is useful when your build contains dependencies that have only + * been published with Scala 2.x, if you have: + * {{{ + * libraryDependencies += "a" %% "b" % "c" + * }}} + * you can replace it by: + * {{{ + * libraryDependencies += ("a" %% "b" % "c").withDottyCompat(scalaVersion.value) + * }}} + * This will have no effect when compiling with Scala 2.x, but when compiling + * with Dotty this will change the cross-version to a Scala 2.x one. This + * works because Dotty is currently retro-compatible with Scala 2.x. + * + * NOTE: Dotty's retro-compatibility with Scala 2.x will be dropped before + * Dotty is released, you should not rely on it. + */ + def withDottyCompat(scalaVersion: String): ModuleID = + moduleID.crossVersion match { + case _: librarymanagement.Binary if scalaVersion.startsWith("0.") => + moduleID.cross(CrossVersion.constant("2.12")) + case _ => + moduleID + } + } } import autoImport._ @@ -87,7 +87,7 @@ object DottyPlugin extends AutoPlugin { override def requires: Plugins = plugins.JvmPlugin override def trigger = allRequirements - // Adapted from CrossVersioconstant nUtil#sbtApiVersion + // Adapted from CrossVersionUtil#sbtApiVersion private def sbtFullVersion(v: String): Option[(Int, Int, Int)] = { val ReleaseV = """(\d+)\.(\d+)\.(\d+)(-\d+)?""".r From 80d36d521819a72d21522dceab0411e220338f6d Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Wed, 8 Nov 2017 10:34:55 +0100 Subject: [PATCH 48/49] Port sbt/zinc#444 --- compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala | 14 ++++++++++++-- .../value-class-underlying/C.scala | 4 ++-- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 08e4ee4c6323..a91edbbb041b 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -17,6 +17,7 @@ import StdNames._ import NameKinds.DefaultGetterName import typer.Inliner import typer.ErrorReporting.cyclicErrorMsg +import transform.ValueClasses import transform.SymUtils._ import dotty.tools.io.Path import java.io.PrintWriter @@ -254,8 +255,17 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val cinfo = csym.classInfo val bases = - try linearizedAncestorTypes(cinfo) - catch { + try { + val ancestorTypes0 = linearizedAncestorTypes(cinfo) + if (ValueClasses.isDerivedValueClass(csym)) { + val underlying = ValueClasses.valueClassUnbox(csym).info.finalResultType + // The underlying type of a value class should be part of the name hash + // of the value class (see the test `value-class-underlying`), this is accomplished + // by adding the underlying type to the list of parent types. + underlying :: ancestorTypes0 + } else + ancestorTypes0 + } catch { case ex: CyclicReference => // See neg/i1750a for an example where a cyclic error can arise. // The root cause in this example is an illegal "override" of an inner trait diff --git a/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/C.scala b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/C.scala index 0c09ea764fe5..9a6a97533086 100644 --- a/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/C.scala +++ b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/C.scala @@ -1,6 +1,6 @@ object C { def main(args: Array[String]): Unit = { - val x = B.foo - println("x: " + x) // Need to use x in an expression to see if it crashes or not + val duck = B.foo + println("duck: " + duck) // Need to use duck in an expression to see if it crashes or not } } From 77051245455087ad89f36fc825c842427116c64e Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Thu, 9 Nov 2017 10:14:14 +0100 Subject: [PATCH 49/49] Address review comments --- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 37 ++++++++++--------- project/Build.scala | 6 +-- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index a91edbbb041b..3b9135dbdd93 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -254,24 +254,25 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder def apiClassStructure(csym: ClassSymbol): api.Structure = { val cinfo = csym.classInfo - val bases = - try { - val ancestorTypes0 = linearizedAncestorTypes(cinfo) - if (ValueClasses.isDerivedValueClass(csym)) { - val underlying = ValueClasses.valueClassUnbox(csym).info.finalResultType - // The underlying type of a value class should be part of the name hash - // of the value class (see the test `value-class-underlying`), this is accomplished - // by adding the underlying type to the list of parent types. - underlying :: ancestorTypes0 - } else - ancestorTypes0 - } catch { - case ex: CyclicReference => - // See neg/i1750a for an example where a cyclic error can arise. - // The root cause in this example is an illegal "override" of an inner trait - ctx.error(cyclicErrorMsg(ex), csym.pos) - defn.ObjectType :: Nil - } + val bases = { + val ancestorTypes0 = + try linearizedAncestorTypes(cinfo) + catch { + case ex: CyclicReference => + // See neg/i1750a for an example where a cyclic error can arise. + // The root cause in this example is an illegal "override" of an inner trait + ctx.error(cyclicErrorMsg(ex), csym.pos) + defn.ObjectType :: Nil + } + if (ValueClasses.isDerivedValueClass(csym)) { + val underlying = ValueClasses.valueClassUnbox(csym).info.finalResultType + // The underlying type of a value class should be part of the name hash + // of the value class (see the test `value-class-underlying`), this is accomplished + // by adding the underlying type to the list of parent types. + underlying :: ancestorTypes0 + } else + ancestorTypes0 + } val apiBases = bases.map(apiType) diff --git a/project/Build.scala b/project/Build.scala index 33803f5246c7..62681ccefde8 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -197,8 +197,8 @@ object Build { libraryDependencies ++= { if (bootstrapFromPublishedJars.value) Seq( - dottyOrganization % "dotty-library_2.11" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name, - dottyOrganization % "dotty-compiler_2.11" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name + dottyOrganization %% "dotty-library" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name, + dottyOrganization %% "dotty-compiler" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name ).map(_.withDottyCompat(scalaVersion.value)) else Seq() @@ -862,7 +862,7 @@ object Build { unmanagedSourceDirectories in Compile += baseDirectory.value / "../language-server/src/dotty/tools/languageserver/config", sbtPlugin := true, - version := "0.1.6-SNAPSHOT", + version := "0.1.8", sbtTestDirectory := baseDirectory.value / "sbt-test", scriptedLaunchOpts += "-Dplugin.version=" + version.value, scriptedLaunchOpts += "-Dplugin.scalaVersion=" + dottyVersion,