diff --git a/.drone.yml b/.drone.yml index eb340cae3d24..384bc45cb0c9 100644 --- a/.drone.yml +++ b/.drone.yml @@ -41,15 +41,25 @@ pipeline: - cp -R . /tmp/3/ && cd /tmp/3/ - ./project/scripts/sbt dotty-optimised/testAll - test_sbt: + test_sbt_1of2: group: test image: lampepfl/dotty:2017-10-20 commands: - cp -R . /tmp/4/ && cd /tmp/4/ - - ./project/scripts/sbt sbt-dotty/scripted - when: + - ./project/scripts/sbt "sbt-dotty/scripted compilerReporter/* discovery/* sbt-dotty/* source-dependencies/*1of2" + # when: # sbt scripted tests are slow and don't run on PRs - event: [ push, tag, deployment ] + # event: [ push, tag, deployment ] + + test_sbt_2of2: + group: test + image: lampepfl/dotty:2017-10-20 + commands: + - cp -R . /tmp/5/ && cd /tmp/5/ + - ./project/scripts/sbt "sbt-dotty/scripted source-dependencies/*2of2" + # when: + # sbt scripted tests are slow and don't run on PRs + # event: [ push, tag, deployment ] # DOCUMENTATION: documentation: diff --git a/build.sbt b/build.sbt index 93e194a97d37..9fbaee50a7f4 100644 --- a/build.sbt +++ b/build.sbt @@ -15,7 +15,6 @@ val `dotty-library-optimised` = Build.`dotty-library-optimised` val `dotty-sbt-bridge` = Build.`dotty-sbt-bridge` val `dotty-sbt-bridge-bootstrapped` = Build.`dotty-sbt-bridge-bootstrapped` val `dotty-language-server` = Build.`dotty-language-server` -val sjsSandbox = Build.sjsSandbox val `dotty-bench` = Build.`dotty-bench` val `dotty-bench-bootstrapped` = Build.`dotty-bench-bootstrapped` val `dotty-bench-optimised` = Build.`dotty-bench-optimised` diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index ad84bde680d1..40841c7a51b2 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -17,6 +17,7 @@ import java.util.Optional import scala.reflect.ClassTag import dotty.tools.dotc.core._ +import dotty.tools.dotc.sbt.ExtractDependencies import Periods._ import SymDenotations._ import Contexts._ @@ -91,15 +92,17 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter /* ---------------- q2 ---------------- */ - case class Item2(arrivalPos: Int, - mirror: asm.tree.ClassNode, - plain: asm.tree.ClassNode, - bean: asm.tree.ClassNode, - outFolder: scala.tools.nsc.io.AbstractFile) { + case class SubItem2(classNode: asm.tree.ClassNode, + file: scala.tools.nsc.io.AbstractFile) + + case class Item2(arrivalPos: Int, + mirror: SubItem2, + plain: SubItem2, + bean: SubItem2) { def isPoison = { arrivalPos == Int.MaxValue } } - private val poison2 = Item2(Int.MaxValue, null, null, null, null) + private val poison2 = Item2(Int.MaxValue, null, null, null) private val q2 = new _root_.java.util.LinkedList[Item2] /* ---------------- q3 ---------------- */ @@ -113,14 +116,14 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter */ case class SubItem3( jclassName: String, - jclassBytes: Array[Byte] + jclassBytes: Array[Byte], + jclassFile: scala.tools.nsc.io.AbstractFile ) case class Item3(arrivalPos: Int, mirror: SubItem3, plain: SubItem3, - bean: SubItem3, - outFolder: scala.tools.nsc.io.AbstractFile) { + bean: SubItem3) { def isPoison = { arrivalPos == Int.MaxValue } } @@ -131,7 +134,7 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter else 1 } } - private val poison3 = Item3(Int.MaxValue, null, null, null, null) + private val poison3 = Item3(Int.MaxValue, null, null, null) private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator) /* @@ -228,12 +231,52 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter ) } else null + // ----------- create files + + val classNodes = List(mirrorC, plainC, beanC) + val classFiles = classNodes.map(cls => + if (outF != null && cls != null) { + try { + getFileForClassfile(outF, cls.name, ".class") + } catch { + case e: FileConflictException => + ctx.error(s"error writing ${cls.name}: ${e.getMessage}") + null + } + } else null + ) + + // ----------- sbt's callbacks + + val fullClassName = ctx.atPhase(ctx.typerPhase) { implicit ctx => + ExtractDependencies.extractedName(claszSymbol) + } + val isLocal = fullClassName.contains("_$") + + for ((cls, clsFile) <- classNodes.zip(classFiles)) { + if (cls != null) { + if (ctx.compilerCallback != null) + ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), fullClassName) + if (ctx.sbtCallback != null) { + // ctx.sbtCallback.generatedClass(sourceFile.jfile.orElse(null), clsFile.file, fullClassName) + // TODO: Check + if (isLocal) + ctx.sbtCallback.generatedLocalClass(sourceFile.jfile.orElse(null), clsFile.file) + else { + ctx.sbtCallback.generatedNonLocalClass(sourceFile.jfile.orElse(null), clsFile.file, + cls.name, fullClassName) + } + } + } + } + // ----------- hand over to pipeline-2 val item2 = Item2(arrivalPos, - mirrorC, plainC, beanC, - outF) + SubItem2(mirrorC, classFiles(0)), + SubItem2(plainC, classFiles(1)), + SubItem2(beanC, classFiles(2))) q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done. @@ -263,12 +306,12 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter } else { try { - localOptimizations(item.plain) + localOptimizations(item.plain.classNode) addToQ3(item) } catch { case ex: Throwable => ex.printStackTrace() - ctx.error(s"Error while emitting ${item.plain.name}\n${ex.getMessage}") + ctx.error(s"Error while emitting ${item.plain.classNode.name}\n${ex.getMessage}") } } } @@ -282,11 +325,14 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter cw.toByteArray } - val Item2(arrivalPos, mirror, plain, bean, outFolder) = item + val Item2(arrivalPos, + SubItem2(mirror, mirrorFile), + SubItem2(plain, plainFile), + SubItem2(bean, beanFile)) = item - val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror)) - val plainC = SubItem3(plain.name, getByteArray(plain)) - val beanC = if (bean == null) null else SubItem3(bean.name, getByteArray(bean)) + val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror), mirrorFile) + val plainC = SubItem3(plain.name, getByteArray(plain), plainFile) + val beanC = if (bean == null) null else SubItem3(bean.name, getByteArray(bean), beanFile) if (AsmUtils.traceSerializedClassEnabled && plain.name.contains(AsmUtils.traceSerializedClassPattern)) { if (mirrorC != null) AsmUtils.traceClass(mirrorC.jclassBytes) @@ -294,7 +340,7 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter if (beanC != null) AsmUtils.traceClass(beanC.jclassBytes) } - q3 add Item3(arrivalPos, mirrorC, plainC, beanC, outFolder) + q3 add Item3(arrivalPos, mirrorC, plainC, beanC) } @@ -394,25 +440,10 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter /* Pipeline that writes classfile representations to disk. */ private def drainQ3() = { - def sendToDisk(cfr: SubItem3, outFolder: scala.tools.nsc.io.AbstractFile): Unit = { + def sendToDisk(cfr: SubItem3): Unit = { if (cfr != null){ - val SubItem3(jclassName, jclassBytes) = cfr - try { - val outFile = - if (outFolder == null) null - else getFileForClassfile(outFolder, jclassName, ".class") - bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, outFile) - - val className = jclassName.replace('/', '.') - if (ctx.compilerCallback != null) - ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(outFile), className) - if (ctx.sbtCallback != null) - ctx.sbtCallback.generatedClass(sourceFile.jfile.orElse(null), outFile.file, className) - } - catch { - case e: FileConflictException => - ctx.error(s"error writing $jclassName: ${e.getMessage}") - } + val SubItem3(jclassName, jclassBytes, jclassFile) = cfr + bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, jclassFile) } } @@ -425,10 +456,9 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter moreComing = !incoming.isPoison if (moreComing) { val item = incoming - val outFolder = item.outFolder - sendToDisk(item.mirror, outFolder) - sendToDisk(item.plain, outFolder) - sendToDisk(item.bean, outFolder) + sendToDisk(item.mirror) + sendToDisk(item.plain) + sendToDisk(item.bean) expected += 1 } } diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index af92bbb4ecdb..62f0e38860eb 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -424,7 +424,8 @@ object Names { "dotty$tools$dotc$core$NameOps$NameDecorator$$functionArityFor$extension", "dotty$tools$dotc$typer$Checking$CheckNonCyclicMap$$apply", "$plus$plus", - "readConstant") + "readConstant", + "extractedName") .contains(elem.getMethodName)) } diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 145913261053..386968e59943 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -11,6 +11,7 @@ import collection.BitSet import dotty.tools.io.AbstractFile import Decorators.SymbolIteratorDecorator import ast._ +import ast.Trees._ import annotation.tailrec import CheckRealizable._ import util.SimpleIdentityMap @@ -334,6 +335,14 @@ object SymDenotations { case Nil => Nil } + final def children(implicit ctx: Context): List[Symbol] = + this.annotations.filter(_.symbol == ctx.definitions.ChildAnnot).map { annot => + // refer to definition of Annotation.makeChild + annot.tree match { + case Apply(TypeApply(_, List(tpTree)), _) => tpTree.symbol + } + } + /** The denotation is completed: info is not a lazy type and attributes have defined values */ final def isCompleted: Boolean = !myInfo.isInstanceOf[LazyType] @@ -919,13 +928,16 @@ object SymDenotations { */ final def topLevelClass(implicit ctx: Context): Symbol = { def topLevel(d: SymDenotation): Symbol = { - if (d.isEffectiveRoot || (d is PackageClass) || (d.owner is PackageClass)) d.symbol + if (d.isTopLevelClass) d.symbol else topLevel(d.owner) } val sym = topLevel(this) if (sym.isClass) sym else sym.moduleClass } + final def isTopLevelClass(implicit ctx: Context): Boolean = + this.isEffectiveRoot || (this is PackageClass) || (this.owner is PackageClass) + /** The package class containing this denotation */ final def enclosingPackageClass(implicit ctx: Context): Symbol = if (this is PackageClass) symbol else owner.enclosingPackageClass diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 532aef36b723..3b9135dbdd93 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -2,17 +2,29 @@ package dotty.tools.dotc package sbt import ast.{Trees, tpd} -import core._, core.Decorators._ -import Annotations._, Contexts._, Flags._, Phases._, Trees._, Types._, Symbols._ -import Names._, NameOps._, StdNames._ +import core._ +import core.Decorators._ +import Annotations._ +import Contexts._ +import Flags._ +import Phases._ +import Trees._ +import Types._ +import Symbols._ +import Names._ +import NameOps._ +import StdNames._ import NameKinds.DefaultGetterName import typer.Inliner import typer.ErrorReporting.cyclicErrorMsg +import transform.ValueClasses import transform.SymUtils._ - import dotty.tools.io.Path import java.io.PrintWriter +import dotty.tools.dotc.config.JavaPlatform +import xsbti.api.DefinitionType + import scala.collection.mutable /** This phase sends a representation of the API of classes to sbt via callbacks. @@ -46,20 +58,26 @@ class ExtractAPI extends Phase { val forceRun = dumpInc || ctx.settings.YforceSbtPhases.value if ((ctx.sbtCallback != null || forceRun) && !unit.isJava) { val sourceFile = unit.source.file + if (ctx.sbtCallback != null) + ctx.sbtCallback.startSource(sourceFile.file) + val apiTraverser = new ExtractAPICollector - val source = apiTraverser.apiSource(unit.tpdTree) + val sources = apiTraverser.apiSource(unit.tpdTree) + val mainClasses = apiTraverser.mainClasses if (dumpInc) { // Append to existing file that should have been created by ExtractDependencies val pw = new PrintWriter(Path(sourceFile.jpath).changeExtension("inc").toFile .bufferedWriter(append = true), true) try { - pw.println(DefaultShowAPI(source)) + sources.foreach(source => pw.println(DefaultShowAPI(source))) } finally pw.close() } - if (ctx.sbtCallback != null) - ctx.sbtCallback.api(sourceFile.file, source) + if (ctx.sbtCallback != null) { + sources.foreach(ctx.sbtCallback.api(sourceFile.file, _)) + mainClasses.foreach(ctx.sbtCallback.mainClass(sourceFile.file, _)) + } } } } @@ -109,11 +127,12 @@ class ExtractAPI extends Phase { private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder { import tpd._ import xsbti.api + import ExtractDependencies.extractedName /** This cache is necessary for correctness, see the comment about inherited * members in `apiClassStructure` */ - private[this] val classLikeCache = new mutable.HashMap[ClassSymbol, api.ClassLike] + private[this] val classLikeCache = new mutable.HashMap[ClassSymbol, api.ClassLikeDef] /** This cache is optional, it avoids recomputing representations */ private[this] val typeCache = new mutable.HashMap[Type, api.Type] /** This cache is necessary to avoid unstable name hashing when `typeCache` is present, @@ -122,15 +141,18 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder */ private[this] val refinedTypeCache = new mutable.HashMap[(api.Type, api.Definition), api.Structure] + private[this] val allNonLocalClassesInSrc = new mutable.HashSet[xsbti.api.ClassLike] + private[this] val _mainClasses = new mutable.HashSet[String] + private[this] object Constants { val emptyStringArray = Array[String]() - val local = new api.ThisQualifier - val public = new api.Public - val privateLocal = new api.Private(local) - val protectedLocal = new api.Protected(local) - val unqualified = new api.Unqualified - val thisPath = new api.This - val emptyType = new api.EmptyType + val local = api.ThisQualifier.create() + val public = api.Public.create() + val privateLocal = api.Private.create(local) + val protectedLocal = api.Protected.create(local) + val unqualified = api.Unqualified.create() + val thisPath = api.This.create() + val emptyType = api.EmptyType.create() val emptyModifiers = new api.Modifiers(false, false, false, false, false,false, false, false) } @@ -144,34 +166,39 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder * @param marker A special annotation to differentiate our type */ private def withMarker(tp: api.Type, marker: api.Annotation) = - new api.Annotated(tp, Array(marker)) + api.Annotated.of(tp, Array(marker)) private def marker(name: String) = - new api.Annotation(new api.Constant(Constants.emptyType, name), Array()) + api.Annotation.of(api.Constant.of(Constants.emptyType, name), Array()) val typeArgRefMarker = marker("TypeArgRef") val orMarker = marker("Or") val byNameMarker = marker("ByName") /** Extract the API representation of a source file */ - def apiSource(tree: Tree): api.SourceAPI = { - val classes = new mutable.ListBuffer[api.ClassLike] + def apiSource(tree: Tree): Seq[api.ClassLike] = { def apiClasses(tree: Tree): Unit = tree match { case PackageDef(_, stats) => stats.foreach(apiClasses) case tree: TypeDef => - classes += apiClass(tree.symbol.asClass) + apiClass(tree.symbol.asClass) case _ => } apiClasses(tree) forceThunks() - new api.SourceAPI(Array(), classes.toArray) + + allNonLocalClassesInSrc.toSeq } - def apiClass(sym: ClassSymbol): api.ClassLike = + def apiClass(sym: ClassSymbol): api.ClassLikeDef = classLikeCache.getOrElseUpdate(sym, computeClass(sym)) - private def computeClass(sym: ClassSymbol): api.ClassLike = { + def mainClasses: Set[String] = { + forceThunks() + _mainClasses.toSet + } + + private def computeClass(sym: ClassSymbol): api.ClassLikeDef = { import xsbti.api.{DefinitionType => dt} val defType = if (sym.is(Trait)) dt.Trait @@ -182,16 +209,44 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val selfType = apiType(sym.givenSelfType) - val name = if (sym.is(ModuleClass)) sym.fullName.sourceModuleName else sym.fullName + val name = extractedName(sym) - val tparams = sym.typeParams.map(apiTypeParameter) + val tparams = sym.typeParams.map(apiTypeParameter).toArray val structure = apiClassStructure(sym) + val acc = apiAccess(sym) + val modifiers = apiModifiers(sym) + val anns = apiAnnotations(sym).toArray + val topLevel = sym.isTopLevelClass + val childrenOfSealedClass = sym.children.sorted(classFirstSort).map(c => + if (c.isClass) + apiType(c.typeRef) + else + apiType(c.termRef) + ).toArray + + val cl = api.ClassLike.of( + name, acc, modifiers, anns, defType, api.SafeLazy.strict(selfType), api.SafeLazy.strict(structure), Constants.emptyStringArray, + childrenOfSealedClass, topLevel, tparams) + + // if (name.toString.contains("DottyPredef")) { + // println("sym: " + sym) + // println("name: " + name) + // ctx.atPhase(ctx.flattenPhase.next) { implicit ctx => + // println("flatten: " + sym.fullName.toString) + // println("flattenm: " + sym.fullName.mangledString) + // } + // println("flattenx: " + toDenot(sym.binaryName.toString) + // } + + allNonLocalClassesInSrc += cl + + val javaPlatform = ctx.platform.asInstanceOf[JavaPlatform] + if (sym.isStatic && defType == DefinitionType.Module && javaPlatform.hasJavaMainMethod(sym)) { + _mainClasses += name + } - new api.ClassLike( - defType, strict2lzy(selfType), strict2lzy(structure), Constants.emptyStringArray, - tparams.toArray, name.toString, apiAccess(sym), apiModifiers(sym), - apiAnnotations(sym).toArray) + api.ClassLikeDef.of(name, acc, modifiers, anns, tparams, defType) } private[this] val LegacyAppClass = ctx.requiredClass("dotty.runtime.LegacyApp") @@ -199,15 +254,25 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder def apiClassStructure(csym: ClassSymbol): api.Structure = { val cinfo = csym.classInfo - val bases = - try linearizedAncestorTypes(cinfo) - catch { - case ex: CyclicReference => - // See neg/i1750a for an example where a cyclic error can arise. - // The root cause in this example is an illegal "override" of an inner trait - ctx.error(cyclicErrorMsg(ex), csym.pos) - defn.ObjectType :: Nil - } + val bases = { + val ancestorTypes0 = + try linearizedAncestorTypes(cinfo) + catch { + case ex: CyclicReference => + // See neg/i1750a for an example where a cyclic error can arise. + // The root cause in this example is an illegal "override" of an inner trait + ctx.error(cyclicErrorMsg(ex), csym.pos) + defn.ObjectType :: Nil + } + if (ValueClasses.isDerivedValueClass(csym)) { + val underlying = ValueClasses.valueClassUnbox(csym).info.finalResultType + // The underlying type of a value class should be part of the name hash + // of the value class (see the test `value-class-underlying`), this is accomplished + // by adding the underlying type to the list of parent types. + underlying :: ancestorTypes0 + } else + ancestorTypes0 + } val apiBases = bases.map(apiType) @@ -231,7 +296,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder // this works because of `classLikeCache` val apiInherited = lzy(apiDefinitions(inherited).toArray) - new api.Structure(strict2lzy(apiBases.toArray), strict2lzy(apiDecls.toArray), apiInherited) + api.Structure.of(api.SafeLazy.strict(apiBases.toArray), api.SafeLazy.strict(apiDecls.toArray), apiInherited) } def linearizedAncestorTypes(info: ClassInfo): List[Type] = { @@ -240,48 +305,48 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder info.baseClasses.tail.map(ref.baseType) } - def apiDefinitions(defs: List[Symbol]): List[api.Definition] = { - // The hash generated by sbt for definitions is supposed to be symmetric so - // we shouldn't have to sort them, but it actually isn't symmetric for - // definitions which are classes, therefore we need to sort classes to - // ensure a stable hash. - // Modules and classes come first and are sorted by name, all other - // definitions come later and are not sorted. - object classFirstSort extends Ordering[Symbol] { - override def compare(a: Symbol, b: Symbol) = { - val aIsClass = a.isClass - val bIsClass = b.isClass - if (aIsClass == bIsClass) { - if (aIsClass) { - if (a.is(Module) == b.is(Module)) - a.fullName.toString.compareTo(b.fullName.toString) - else if (a.is(Module)) - -1 - else - 1 - } else - 0 - } else if (aIsClass) - -1 - else - 1 - } + // The hash generated by sbt for definitions is supposed to be symmetric so + // we shouldn't have to sort them, but it actually isn't symmetric for + // definitions which are classes, therefore we need to sort classes to + // ensure a stable hash. + // Modules and classes come first and are sorted by name, all other + // definitions come later and are not sorted. + private object classFirstSort extends Ordering[Symbol] { + override def compare(a: Symbol, b: Symbol) = { + val aIsClass = a.isClass + val bIsClass = b.isClass + if (aIsClass == bIsClass) { + if (aIsClass) { + if (a.is(Module) == b.is(Module)) + a.fullName.toString.compareTo(b.fullName.toString) + else if (a.is(Module)) + -1 + else + 1 + } else + 0 + } else if (aIsClass) + -1 + else + 1 } + } + def apiDefinitions(defs: List[Symbol]): List[api.ClassDefinition] = { defs.sorted(classFirstSort).map(apiDefinition) } - def apiDefinition(sym: Symbol): api.Definition = { + def apiDefinition(sym: Symbol): api.ClassDefinition = { if (sym.isClass) { apiClass(sym.asClass) } else if (sym.isType) { apiTypeMember(sym.asType) } else if (sym.is(Mutable, butNot = Accessor)) { - new api.Var(apiType(sym.info), sym.name.toString, - apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray) + api.Var.of(sym.name.toString, apiAccess(sym), apiModifiers(sym), + apiAnnotations(sym).toArray, apiType(sym.info)) } else if (sym.isStable) { - new api.Val(apiType(sym.info), sym.name.toString, - apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray) + api.Val.of(sym.name.toString, apiAccess(sym), apiModifiers(sym), + apiAnnotations(sym).toArray, apiType(sym.info)) } else { apiDef(sym.asTerm) } @@ -307,9 +372,9 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder } else (0 until pnames.length).map(Function.const(false)) val params = (pnames, ptypes, defaults).zipped.map((pname, ptype, isDefault) => - new api.MethodParameter(pname.toString, apiType(ptype), + api.MethodParameter.of(pname.toString, apiType(ptype), isDefault, api.ParameterModifier.Plain)) - new api.ParameterList(params.toArray, mt.isImplicitMethod) :: paramLists(restpe, params.length) + api.ParameterList.of(params.toArray, mt.isImplicitMethod) :: paramLists(restpe, params.length) case _ => Nil } @@ -324,8 +389,8 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val vparamss = paramLists(sym.info) val retTp = sym.info.finalResultType.widenExpr - new api.Def(vparamss.toArray, apiType(retTp), tparams.toArray, - sym.name.toString, apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray) + api.Def.of(sym.name.toString, apiAccess(sym), apiModifiers(sym), + apiAnnotations(sym).toArray, tparams.toArray, vparamss.toArray, apiType(retTp)) } def apiTypeMember(sym: TypeSymbol): api.TypeMember = { @@ -337,17 +402,17 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val tpe = sym.info if (sym.isAliasType) - new api.TypeAlias(apiType(tpe.bounds.hi), typeParams, name, access, modifiers, as.toArray) + api.TypeAlias.of(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.hi)) else { assert(sym.isAbstractType) - new api.TypeDeclaration(apiType(tpe.bounds.lo), apiType(tpe.bounds.hi), typeParams, name, access, modifiers, as.to) + api.TypeDeclaration.of(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.lo), apiType(tpe.bounds.hi)) } } // Hack to represent dotty types which don't have an equivalent in xsbti def combineApiTypes(apiTps: api.Type*): api.Type = { - new api.Structure(strict2lzy(apiTps.toArray), - strict2lzy(Array()), strict2lzy(Array())) + api.Structure.of(api.SafeLazy.strict(apiTps.toArray), + api.SafeLazy.strict(Array()), api.SafeLazy.strict(Array())) } def apiType(tp: Type): api.Type = { @@ -376,7 +441,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder sym.owner.thisType else tp.prefix - new api.Projection(simpleType(prefix), sym.name.toString) + api.Projection.of(apiType(prefix), sym.name.toString) case AppliedType(tycon, args) => def processArg(arg: Type): api.Type = arg match { case arg @ TypeBounds(lo, hi) => // Handle wildcard parameters @@ -384,32 +449,32 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder Constants.emptyType else { val name = "_" - val ref = new api.ParameterRef(name) - new api.Existential(ref, + val ref = api.ParameterRef.of(name) + api.Existential.of(ref, Array(apiTypeParameter(name, 0, lo, hi))) } case _ => apiType(arg) } - val apiTycon = simpleType(tycon) + val apiTycon = apiType(tycon) val apiArgs = args.map(processArg) - new api.Parameterized(apiTycon, apiArgs.toArray) + api.Parameterized.of(apiTycon, apiArgs.toArray) case tl: TypeLambda => val apiTparams = tl.typeParams.map(apiTypeParameter) val apiRes = apiType(tl.resType) - new api.Polymorphic(apiRes, apiTparams.toArray) + api.Polymorphic.of(apiRes, apiTparams.toArray) case rt: RefinedType => val name = rt.refinedName.toString val parent = apiType(rt.parent) def typeRefinement(name: String, tp: TypeBounds): api.TypeMember = tp match { case TypeAlias(alias) => - new api.TypeAlias(apiType(alias), - Array(), name, Constants.public, Constants.emptyModifiers, Array()) + api.TypeAlias.of(name, + Constants.public, Constants.emptyModifiers, Array(), Array(), apiType(alias)) case TypeBounds(lo, hi) => - new api.TypeDeclaration(apiType(lo), apiType(hi), - Array(), name, Constants.public, Constants.emptyModifiers, Array()) + api.TypeDeclaration.of(name, + Constants.public, Constants.emptyModifiers, Array(), Array(), apiType(lo), apiType(hi)) } val decl = rt.refinedInfo match { case rinfo: TypeBounds => @@ -441,8 +506,8 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder // `apiFoo == apiBar` always imply `apiFoo eq apiBar`. This is what // `refinedTypeCache` is for. refinedTypeCache.getOrElseUpdate((parent, decl), { - val adecl: Array[api.Definition] = if (decl == null) Array() else Array(decl) - new api.Structure(strict2lzy(Array(parent)), strict2lzy(adecl), strict2lzy(Array())) + val adecl: Array[api.ClassDefinition] = if (decl == null) Array() else Array(decl) + api.Structure.of(api.SafeLazy.strict(Array(parent)), api.SafeLazy.strict(adecl), api.SafeLazy.strict(Array())) }) case tp: RecType => apiType(tp.parent) @@ -465,15 +530,15 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder case ExprType(resultType) => withMarker(apiType(resultType), byNameMarker) case ConstantType(constant) => - new api.Constant(apiType(constant.tpe), constant.stringValue) + api.Constant.of(apiType(constant.tpe), constant.stringValue) case AnnotatedType(tpe, annot) => - new api.Annotated(apiType(tpe), Array(apiAnnotation(annot))) + api.Annotated.of(apiType(tpe), Array(apiAnnotation(annot))) case tp: ThisType => apiThis(tp.cls) case tp: ParamRef => // TODO: Distinguishing parameters based on their names alone is not enough, // the binder is also needed (at least for type lambdas). - new api.ParameterRef(tp.paramName.toString) + api.ParameterRef.of(tp.paramName.toString) case tp: LazyRef => apiType(tp.ref) case tp: TypeVar => @@ -488,26 +553,17 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder } } - // TODO: Get rid of this method. See https://github.com/sbt/zinc/issues/101 - def simpleType(tp: Type): api.SimpleType = apiType(tp) match { - case tp: api.SimpleType => - tp - case _ => - ctx.debuglog("sbt-api: Not a simple type: " + tp.show) - Constants.emptyType - } - def apiLazy(tp: => Type): api.Type = { // TODO: The sbt api needs a convenient way to make a lazy type. // For now, we repurpose Structure for this. val apiTp = lzy(Array(apiType(tp))) - new api.Structure(apiTp, strict2lzy(Array()), strict2lzy(Array())) + api.Structure.of(apiTp, api.SafeLazy.strict(Array()), api.SafeLazy.strict(Array())) } def apiThis(sym: Symbol): api.Singleton = { val pathComponents = sym.ownersIterator.takeWhile(!_.isEffectiveRoot) - .map(s => new api.Id(s.name.toString)) - new api.Singleton(new api.Path(pathComponents.toArray.reverse ++ Array(Constants.thisPath))) + .map(s => api.Id.of(s.name.toString)) + api.Singleton.of(api.Path.of(pathComponents.toArray.reverse ++ Array(Constants.thisPath))) } def apiTypeParameter(tparam: ParamInfo): api.TypeParameter = @@ -515,7 +571,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder tparam.paramInfo.bounds.lo, tparam.paramInfo.bounds.hi) def apiTypeParameter(name: String, variance: Int, lo: Type, hi: Type): api.TypeParameter = - new api.TypeParameter(name, Array(), Array(), apiVariance(variance), + api.TypeParameter.of(name, Array(), Array(), apiVariance(variance), apiType(lo), apiType(hi)) def apiVariance(v: Int): api.Variance = { @@ -539,11 +595,11 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder if (sym.privateWithin eq NoSymbol) Constants.unqualified else - new api.IdQualifier(sym.privateWithin.fullName.toString) + api.IdQualifier.of(sym.privateWithin.fullName.toString) if (sym.is(Protected)) - new api.Protected(qualifier) + api.Protected.of(qualifier) else - new api.Private(qualifier) + api.Private.of(qualifier) } } @@ -588,8 +644,8 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder // However, we still need to extract the annotation type in the way sbt expect // because sbt uses this information to find tests to run (for example // junit tests are annotated @org.junit.Test). - new api.Annotation( + api.Annotation.of( apiType(annot.tree.tpe), // Used by sbt to find tests to run - Array(new api.AnnotationArgument("FULLTREE", annot.tree.show.toString))) + Array(api.AnnotationArgument.of("FULLTREE", annot.tree.show.toString))) } } diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 32b9827e8535..5d80684b4edf 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -3,6 +3,7 @@ package sbt import ast.{Trees, tpd} import core._, core.Decorators._ +import util.NoSource.{file => NoSourceFile} import Contexts._, Flags._, Phases._, Trees._, Types._, Symbols._ import Names._, NameOps._, StdNames._ @@ -11,9 +12,11 @@ import scala.collection.{Set, mutable} import dotty.tools.io.{AbstractFile, Path, ZipArchive, PlainFile} import java.io.File -import java.util.{Arrays, Comparator} +import java.util.{Arrays, Comparator, EnumSet} + +import xsbti.api.DependencyContext +import xsbti.UseScope -import xsbti.DependencyContext /** This phase sends information on classes' dependencies to sbt via callbacks. * @@ -35,6 +38,8 @@ import xsbti.DependencyContext * @see ExtractAPI */ class ExtractDependencies extends Phase { + import ExtractDependencies._ + override def phaseName: String = "sbt-deps" // This phase should be run directly after `Frontend`, if it is run after @@ -48,7 +53,16 @@ class ExtractDependencies extends Phase { val forceRun = dumpInc || ctx.settings.YforceSbtPhases.value if ((ctx.sbtCallback != null || forceRun) && !unit.isJava) { val sourceFile = unit.source.file - val extractDeps = new ExtractDependenciesCollector + val responsibleOfImports = firstClassOrModule(unit.tpdTree) match { + case None => + ctx.warning("""|No class, trait or object is defined in the compilation unit. + |The incremental compiler cannot record the dependency information in such case. + |Some errors like unused import referring to a non-existent class might not be reported. + |""".stripMargin, unit.tpdTree.pos) + defn.RootClass + case Some(sym) => sym + } + val extractDeps = new ExtractDependenciesCollector(responsibleOfImports) extractDeps.traverse(unit.tpdTree) if (dumpInc) { @@ -68,16 +82,76 @@ class ExtractDependencies extends Phase { } if (ctx.sbtCallback != null) { - extractDeps.usedNames.foreach(name => - ctx.sbtCallback.usedName(sourceFile.file, name.toString)) + extractDeps.usedNames.foreach{ + case (rawClassName, usedNames) => + val className = rawClassName.toString + usedNames.defaultNames.foreach { rawUsedName => + val useName = rawUsedName.toString + val useScopes = + usedNames.scopedNames.get(rawUsedName) match { + case None => EnumSet.of(UseScope.Default) + case Some(existingScopes) => + existingScopes.add(UseScope.Default) + existingScopes + } + + ctx.sbtCallback.usedName(className, useName, useScopes) + } + } extractDeps.topLevelDependencies.foreach(dep => - recordDependency(sourceFile.file, dep, DependencyContext.DependencyByMemberRef)) + recordDependency(sourceFile.file, dep._2, DependencyContext.DependencyByMemberRef)(ctx.withOwner(dep._1))) extractDeps.topLevelInheritanceDependencies.foreach(dep => - recordDependency(sourceFile.file, dep, DependencyContext.DependencyByInheritance)) + recordDependency(sourceFile.file, dep._2, DependencyContext.DependencyByInheritance)(ctx.withOwner(dep._1))) + } + } + } + + private def firstClassOrModule(tree: tpd.Tree)(implicit ctx: Context): Option[Symbol] = { + import tpd._ + val acc = new TreeAccumulator[Option[Symbol]] { + def apply(x: Option[Symbol], t: Tree)(implicit ctx: Context) = + if (x.isDefined) x + else t match { + case moduleDef: Thicket => + Some(moduleDef.symbol) + case typeDef: TypeDef => + Some(typeDef.symbol) + case other => + foldOver(x, other) + } + } + acc(None, tree) + } + + private def classFile(sym: Symbol)(implicit ctx: Context): Option[AbstractFile] = { + // package can never have a corresponding class file; this test does not + // catch package objects (that do not have this flag set) + if (sym.is(Package)) None + else { + val file = Option(sym.associatedFile) + + Option(sym.associatedFile).flatMap { + case NoSourceFile => + if (isTopLevelModule(sym)) { + val linked = sym.companionClass + if (linked == NoSymbol) + None + else + classFile(linked) + } else + None + case file => + Some(file) } } } + protected def isTopLevelModule(sym: Symbol)(implicit ctx: Context): Boolean = + // enteringPhase(currentRun.picklerPhase.next) { + sym.is(ModuleClass) && sym.owner.is(PackageClass) + // } + + /** Record that `currentSourceFile` depends on the file where `dep` was loaded from. * * @param currentSourceFile The source file of the current unit @@ -86,38 +160,84 @@ class ExtractDependencies extends Phase { */ def recordDependency(currentSourceFile: File, dep: Symbol, context: DependencyContext) (implicit ctx: Context) = { - val depFile = dep.associatedFile - if (depFile != null) { - if (depFile.path.endsWith(".class")) { - /** Transform `List(java, lang, String.class)` into `java.lang.String` */ - def className(classSegments: List[String]) = - classSegments.mkString(".").stripSuffix(".class") - def binaryDependency(file: File, className: String) = - ctx.sbtCallback.binaryDependency(file, className, currentSourceFile, context) - - depFile match { - case ze: ZipArchive#Entry => - for (zip <- ze.underlyingSource; zipFile <- Option(zip.file)) { - val classSegments = Path(ze.path).segments - binaryDependency(zipFile, className(classSegments)) - } - case pf: PlainFile => - val packages = dep.ownersIterator - .filter(x => x.is(PackageClass) && !x.isEffectiveRoot).length - // We can recover the fully qualified name of a classfile from - // its path - val classSegments = pf.givenPath.segments.takeRight(packages + 1) - binaryDependency(pf.file, className(classSegments)) - case _ => - ctx.warning(s"sbt-deps: Ignoring dependency $depFile of class ${depFile.getClass}") - } - } else if (depFile.file != currentSourceFile) { - ctx.sbtCallback.sourceDependency(depFile.file, currentSourceFile, context) + val onSource = dep.sourceFile + if (onSource == null) { + // Dependency is external -- source is undefined + classFile(dep) match { + case Some(at) => + def className(classSegments: List[String]) = + classSegments.mkString(".").stripSuffix(".class") + def binaryDependency(file: File, className: String) = { + ctx.sbtCallback.binaryDependency(file, className, extractedName(currentClass), currentSourceFile, context) + } + + at match { + case ze: ZipArchive#Entry => + for (zip <- ze.underlyingSource; zipFile <- Option(zip.file)) { + val classSegments = Path(ze.path).segments + binaryDependency(zipFile, className(classSegments)) + } + case pf: PlainFile => + val packages = dep.ownersIterator + .filter(x => x.is(PackageClass) && !x.isEffectiveRoot).length + // We can recover the fully qualified name of a classfile from + // its path + val classSegments = pf.givenPath.segments.takeRight(packages + 1) + binaryDependency(pf.file, className(classSegments)) + case _ => + ctx.warning(s"sbt-deps: Ignoring dependency $at of class ${at.getClass}") + } + + case None => + ctx.debuglog(s"No file for external symbol $dep") } + } else if (onSource.file != currentSourceFile) { + ctx.sbtCallback.classDependency(extractedName(dep.enclosingClass), extractedName(currentClass), context) + } else { + () } } } +object ExtractDependencies { + def extractedName(sym: Symbol)(implicit ctx: Context): String = + // ctx.atPhase(ctx.flattenPhase.next) { implicit ctx => + if (sym.is(ModuleClass)) + sym.fullName.stripModuleClassSuffix.toString + else + sym.fullName.toString + // } +} + +private final class NameUsedInClass { + // Default names and other scopes are separated for performance reasons + val defaultNames: mutable.Set[Name] = new mutable.HashSet[Name] + val scopedNames: mutable.Map[Name, EnumSet[UseScope]] = new mutable.HashMap[Name, EnumSet[UseScope]].withDefault(_ => EnumSet.noneOf(classOf[UseScope])) + + // We have to leave with commas on ends + override def toString(): String = { + val builder = new StringBuilder(": ") + defaultNames.foreach { name => + builder.append(name.toString.trim) + val otherScopes = scopedNames.get(name) + scopedNames.get(name) match { + case None => + case Some(otherScopes) => + // Pickling tests fail when this is turned in an anonymous class + class Consumer extends java.util.function.Consumer[UseScope]() { + override def accept(scope: UseScope): Unit = + builder.append(scope.name()).append(", ") + } + builder.append(" in [") + otherScopes.forEach(new Consumer) + builder.append("]") + } + builder.append(", ") + } + builder.toString() + } +} + /** Extract the dependency information of a compilation unit. * * To understand why we track the used names see the section "Name hashing @@ -126,38 +246,66 @@ class ExtractDependencies extends Phase { * specially, see the subsection "Dependencies introduced by member reference and * inheritance" in the "Name hashing algorithm" section. */ -private class ExtractDependenciesCollector(implicit val ctx: Context) extends tpd.TreeTraverser { +private class ExtractDependenciesCollector(responsibleForImports: Symbol)(implicit val ctx: Context) extends tpd.TreeTraverser { thisTreeTraverser => import tpd._ + import ExtractDependencies._ - private[this] val _usedNames = new mutable.HashSet[Name] - private[this] val _topLevelDependencies = new mutable.HashSet[Symbol] - private[this] val _topLevelInheritanceDependencies = new mutable.HashSet[Symbol] + private[this] val _usedNames = new mutable.HashMap[String, NameUsedInClass] + private[this] val _topLevelDependencies = new mutable.HashSet[(Symbol, Symbol)] + private[this] val _topLevelInheritanceDependencies = new mutable.HashSet[(Symbol, Symbol)] /** The names used in this class, this does not include names which are only * defined and not referenced. */ - def usedNames: Set[Name] = _usedNames + def usedNames: collection.Map[String, NameUsedInClass] = _usedNames /** The set of top-level classes that the compilation unit depends on * because it refers to these classes or something defined in them. * This is always a superset of `topLevelInheritanceDependencies` by definition. */ - def topLevelDependencies: Set[Symbol] = _topLevelDependencies + def topLevelDependencies: Set[(Symbol, Symbol)] = _topLevelDependencies /** The set of top-level classes that the compilation unit extends or that * contain a non-top-level class that the compilaion unit extends. */ - def topLevelInheritanceDependencies: Set[Symbol] = _topLevelInheritanceDependencies + def topLevelInheritanceDependencies: Set[(Symbol, Symbol)] = _topLevelInheritanceDependencies - private def addUsedName(name: Name) = - _usedNames += name + private def addUsedName(enclosingSym: Symbol, name: Name) = { + val enclosingName = enclosingSym match { + case sym if sym == defn.RootClass => ExtractDependencies.extractedName(responsibleForImports) + case sym => extractedName(sym) + } + val nameUsed = _usedNames.getOrElseUpdate(enclosingName, new NameUsedInClass) + nameUsed.defaultNames += name + // TODO: Set correct scope + nameUsed.scopedNames(name).add(UseScope.Default) + } - private def addDependency(sym: Symbol): Unit = + private def addDependency(sym: Symbol)(implicit ctx: Context): Unit = if (!ignoreDependency(sym)) { val tlClass = sym.topLevelClass - if (tlClass.ne(NoSymbol)) // Some synthetic type aliases like AnyRef do not belong to any class - _topLevelDependencies += sym.topLevelClass - addUsedName(sym.name) + if (tlClass.ne(NoSymbol)) { + if (currentClass == defn.RootClass) { + _topLevelDependencies += ((responsibleForImports, tlClass)) + } else { + // Some synthetic type aliases like AnyRef do not belong to any class + _topLevelDependencies += ((currentClass, tlClass)) + } + } + addUsedName(nonLocalEnclosingClass(ctx.owner), sym.name) + } + + private def isLocal(sym: Symbol)(implicit ctx: Context): Boolean = + sym.ownersIterator.exists(_.isTerm) + + private def nonLocalEnclosingClass(sym: Symbol)(implicit ctx: Context): Symbol = + sym.enclosingClass match { + case NoSymbol => NoSymbol + case csym => + if (isLocal(csym)) + nonLocalEnclosingClass(csym.owner) + else + csym } private def ignoreDependency(sym: Symbol) = @@ -166,8 +314,20 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp sym.isAnonymousFunction || sym.isAnonymousClass - private def addInheritanceDependency(sym: Symbol): Unit = - _topLevelInheritanceDependencies += sym.topLevelClass + private def addInheritanceDependency(sym: Symbol)(implicit ctx: Context): Unit = + _topLevelInheritanceDependencies += ((currentClass, sym.topLevelClass)) + + private class PatMatDependencyTraverser(ctx0: Context) extends ExtractTypesCollector(ctx0) { + override protected def addDependency(symbol: Symbol)(implicit ctx: Context): Unit = { + if (!ignoreDependency(symbol) && symbol.is(Sealed)) { + val encName = nonLocalEnclosingClass(ctx.owner).fullName.stripModuleClassSuffix.mangledString + val nameUsed = _usedNames.getOrElseUpdate(encName, new NameUsedInClass) + + nameUsed.defaultNames += symbol.name + nameUsed.scopedNames(symbol.name).add(UseScope.PatMatTarget) + } + } + } /** Traverse the tree of a source file and record the dependencies which * can be retrieved using `topLevelDependencies`, `topLevelInheritanceDependencies`, @@ -175,6 +335,8 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp */ override def traverse(tree: Tree)(implicit ctx: Context): Unit = { tree match { + case v @ ValDef(_, tpt, _) if v.symbol.is(Case) && v.symbol.is(Synthetic) => + new PatMatDependencyTraverser(ctx).traverse(tpt.tpe) case Import(expr, selectors) => def lookupImported(name: Name) = expr.tpe.member(name).symbol def addImported(name: Name) = { @@ -187,8 +349,9 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp addImported(name) case Thicket(Ident(name) :: Ident(rename) :: Nil) => addImported(name) - if (rename ne nme.WILDCARD) - addUsedName(rename) + if (rename ne nme.WILDCARD) { + addUsedName(nonLocalEnclosingClass(ctx.owner), rename) + } case _ => } case Inlined(call, _, _) => @@ -196,10 +359,10 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp // record it as a dependency traverse(call) case t: TypeTree => - usedTypeTraverser.traverse(t.tpe) + new usedTypeTraverser(ctx).traverse(t.tpe) case ref: RefTree => addDependency(ref.symbol) - usedTypeTraverser.traverse(ref.tpe) + new usedTypeTraverser(ctx).traverse(ref.tpe) case t @ Template(_, parents, _, _) => t.parents.foreach(p => addInheritanceDependency(p.tpe.classSymbol)) case _ => @@ -240,7 +403,7 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp * The tests in sbt `types-in-used-names-a`, `types-in-used-names-b`, * `as-seen-from-a` and `as-seen-from-b` rely on this. */ - private object usedTypeTraverser extends TypeTraverser { + private class ExtractTypesCollector(ctx0: Context) extends TypeTraverser()(ctx0) { val seen = new mutable.HashSet[Type] def traverse(tp: Type): Unit = if (!seen.contains(tp)) { seen += tp @@ -263,5 +426,10 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp traverseChildren(tp) } } + + protected def addDependency(symbol: Symbol)(implicit ctx: Context): Unit = + thisTreeTraverser.addDependency(symbol) } + + private class usedTypeTraverser(ctx0: Context) extends ExtractTypesCollector(ctx0) } diff --git a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala index 0e6b19867950..35bdc8594066 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala @@ -18,17 +18,17 @@ object DefaultShowAPI { def apply(d: Definition) = ShowAPI.showDefinition(d)(defaultNesting) def apply(d: Type) = ShowAPI.showType(d)(defaultNesting) - def apply(a: SourceAPI) = ShowAPI.showApi(a)(defaultNesting) + def apply(a: ClassLike) = ShowAPI.showApi(a)(defaultNesting) } object ShowAPI { private lazy val numDecls = Try { java.lang.Integer.parseInt(sys.props.get("sbt.inc.apidiff.decls").get) } getOrElse 0 - private def truncateDecls(decls: Array[Definition]): Array[Definition] = if (numDecls <= 0) decls else decls.take(numDecls) + private def truncateDecls(decls: Array[ClassDefinition]): Array[ClassDefinition] = if (numDecls <= 0) decls else decls.take(numDecls) private def lines(ls: Seq[String]): String = ls.mkString("\n", "\n", "\n") - def showApi(a: SourceAPI)(implicit nesting: Int) = - a.packages.map(pkg => "package " + pkg.name).mkString("\n") + lines(truncateDecls(a.definitions).map(showDefinition)) + def showApi(c: ClassLike)(implicit nesting: Int) = + showDefinition(c) def showDefinition(d: Definition)(implicit nesting: Int): String = d match { case v: Val => showMonoDef(v, "val") + ": " + showType(v.tpe) @@ -36,7 +36,9 @@ object ShowAPI { case d: Def => showPolyDef(d, "def") + showValueParams(d.valueParameters) + ": " + showType(d.returnType) case ta: TypeAlias => showPolyDef(ta, "type") + " = " + showType(ta.tpe) case td: TypeDeclaration => showPolyDef(td, "type") + showBounds(td.lowerBound, td.upperBound) - case cl: ClassLike => showPolyDef(cl, showDefinitionType(cl.definitionType)) + " extends " + showTemplate(cl) + case cl: ClassLike => showMonoDef(d, showDefinitionType(cl.definitionType)) + + showTypeParameters(cl.typeParameters) + " extends " + showTemplate(cl) + case cl: ClassLikeDef => showPolyDef(cl, showDefinitionType(cl.definitionType)) } private def showTemplate(cl: ClassLike)(implicit nesting: Int) = @@ -61,14 +63,17 @@ object ShowAPI { case s: Structure => s.parents.map(showType).mkString(" with ") + ( if (nesting <= 0) "{ }" - else truncateDecls(s.declared).map(showNestedDefinition).mkString(" {", "\n", "}")) + else truncateDecls(s.declared).map(showNestedDefinition).mkString(" {", "\n", "}") + ) case e: Existential => showType(e.baseType) + ( if (nesting <= 0) " forSome { }" - else e.clause.map(t => "type " + showNestedTypeParameter(t)).mkString(" forSome { ", "; ", " }")) + else e.clause.map(t => "type " + showNestedTypeParameter(t)).mkString(" forSome { ", "; ", " }") + ) case p: Polymorphic => showType(p.baseType) + ( if (nesting <= 0) " [ ]" - else showNestedTypeParameters(p.parameters)) + else showNestedTypeParameters(p.parameters) + ) } private def showPath(p: Path): String = p.components.map(showPathComponent).mkString(".") @@ -104,9 +109,7 @@ object ShowAPI { private def showValueParams(ps: Seq[ParameterList])(implicit nesting: Int): String = ps.map(pl => pl.parameters.map(mp => - mp.name + ": " + showParameterModifier(showType(mp.tpe), mp.modifier) + (if (mp.hasDefault) "= ..." else "") - ).mkString(if (pl.isImplicit) "(implicit " else "(", ", ", ")") - ).mkString("") + mp.name + ": " + showParameterModifier(showType(mp.tpe), mp.modifier) + (if (mp.hasDefault) "= ..." else "")).mkString(if (pl.isImplicit) "(implicit " else "(", ", ", ")")).mkString("") private def showParameterModifier(base: String, pm: ParameterModifier): String = pm match { case ParameterModifier.Plain => base @@ -154,3 +157,4 @@ object ShowAPI { private def showNestedTypeParameters(tps: Seq[TypeParameter])(implicit nesting: Int) = showTypeParameters(tps)(nesting - 1) private def showNestedDefinition(d: Definition)(implicit nesting: Int) = showDefinition(d)(nesting - 1) } + diff --git a/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala b/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala index 350819e3a3ce..2315c40955e4 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala @@ -25,97 +25,8 @@ private[sbt] trait ThunkHolder { * It will be forced by the next call to `forceThunks()` */ def lzy[T <: AnyRef](t: => T): api.Lazy[T] = { - val l = SafeLazyWrapper(() => t) + val l = api.SafeLazy.apply(() => t) thunks += l l } - - /** Store the parameter `s` in a `Lazy` container, since `s` is not by-name, there - * is nothing to force. - * - * TODO: Get rid of this method. It is only needed because some xsbti.api classes - * take lazy arguments when they could be strict, but this can be fixed in sbt, - * see https://github.com/sbt/zinc/issues/114 - */ - def strict2lzy[T <: AnyRef](t: T): api.Lazy[T] = - SafeLazyWrapper.strict(t) -} - -/** Wrapper around SafeLazy implementations. - * - * `xsbti.SafeLazy` is part of sbt but it is not part of the `interface` jar - * that dotty depends on, therefore we can only access it by reflection, - * and this will only succeed when dotty is run by sbt (otherwise - * `xsbti.SafeLazy` won't be on the classpath at all). - * - * For testing purposes, we still want to be able to run the sbt phases outside - * of sbt, using `-Yforce-sbt-phases` and `-Ydump-sbt-inc`, therefore we - * provide a copy of SafeLazy in `dotty.tools.dotc.sbt.SafeLazy` that we use - * when `xsbti.SafeLazy` is unavailable. - * - * This raises a question: why bother with `xsbti.SafeLazy` if we have our own - * version anyway? Because sbt uses Java serialization to persist the output of - * the incremental compilation analysis when sbt is stopped and restarted. If - * we used `dotty.tools.dotc.sbt.SafeLazy` with sbt, deserialization would fail - * and every restart of sbt would require a full recompilation. - * - * Note: this won't be needed once we switch to zinc 1.0 where `SafeLazy` becomes - * part of the `interface` jar, see https://github.com/sbt/zinc/issues/113 - */ -private object SafeLazyWrapper { - - @sharable private[this] val safeLazy = - try { - Class.forName("xsbti.SafeLazy") - } catch { - case e: ClassNotFoundException => - null - } - - @sharable private[this] val safeLazyApply = - if (safeLazy != null) - safeLazy.getMethod("apply", classOf[xsbti.F0[_]]) - else - null - @sharable private[this] val safeLazyStrict = - if (safeLazy != null) - safeLazy.getMethod("strict", classOf[Object]) - else - null - - def apply[T <: AnyRef](eval: () => T): xsbti.api.Lazy[T] = - if (safeLazyApply != null) - safeLazyApply - .invoke(null, new xsbti.F0[T] { def apply() = eval() }) - .asInstanceOf[xsbti.api.Lazy[T]] - else - SafeLazy(eval) - - def strict[T <: AnyRef](value: T): xsbti.api.Lazy[T] = - if (safeLazyStrict != null) - safeLazyStrict - .invoke(null, value) - .asInstanceOf[xsbti.api.Lazy[T]] - else - SafeLazy.strict(value) -} - -// Adapted from https://github.com/sbt/sbt/blob/0.13/compile/api/src/main/scala/xsbti/SafeLazy.scala -private object SafeLazy { - def apply[T <: AnyRef](eval: () => T): xsbti.api.Lazy[T] = - new Impl(eval) - - def strict[T <: AnyRef](value: T): xsbti.api.Lazy[T] = - new Strict(value) - - private[this] final class Impl[T <: AnyRef](private[this] var eval: () => T) extends xsbti.api.AbstractLazy[T] { - private[this] lazy val _t = { - val t = eval() - eval = null // clear the reference, ensuring the only memory we hold onto is the result - t - } - def get(): T = _t - } - - private[this] final class Strict[T <: AnyRef](val get: T) extends xsbti.api.Lazy[T] with java.io.Serializable } diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index ee17927a2598..cfbfea837365 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -501,7 +501,7 @@ class SpaceEngine(implicit ctx: Context) extends SpaceLogic { /** Decompose a type into subspaces -- assume the type can be decomposed */ def decompose(tp: Type): List[Space] = { - val children = tp.classSymbol.children + val children = tp.classSymbol.denot.children debug.println(s"candidates for ${tp.show} : [${children.map(_.show).mkString(", ")}]") diff --git a/compiler/test/dotty/Jars.scala b/compiler/test/dotty/Jars.scala index dd06dc2a6fee..588cbd7167da 100644 --- a/compiler/test/dotty/Jars.scala +++ b/compiler/test/dotty/Jars.scala @@ -26,7 +26,7 @@ object Jars { dottyLib :: dottyCompiler :: dottyInterfaces :: dottyExtras def scalaLibrary: String = sys.env.get("DOTTY_SCALA_LIBRARY") - .getOrElse(findJarFromRuntime("scala-library-2.")) + .getOrElse(findJarFromRuntime("scala-library")) /** Gets the scala 2.* library at runtime, note that doing this is unsafe * unless you know that the library will be on the classpath of the running diff --git a/dist/bin/common b/dist/bin/common index b5239ce28af0..660b25f220b3 100755 --- a/dist/bin/common +++ b/dist/bin/common @@ -118,7 +118,7 @@ DOTTY_LIB=$(find_lib "*dotty-library*") SCALA_ASM=$(find_lib "*scala-asm*") SCALA_LIB=$(find_lib "*scala-library*") SCALA_XML=$(find_lib "*scala-xml*") -SBT_INTF=$(find_lib "*sbt-interface*") +SBT_INTF=$(find_lib "*compiler-interface*") # debug DEBUG_STR=-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005 diff --git a/project/Build.scala b/project/Build.scala index 0e28abc47f51..62681ccefde8 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -7,22 +7,26 @@ import java.nio.file.{ Files, FileSystemException } import java.util.Calendar import scala.reflect.io.Path -import sbtassembly.AssemblyKeys.assembly -import xerial.sbt.Pack._ import sbt.Package.ManifestAttributes -import com.typesafe.sbteclipse.plugin.EclipsePlugin._ - import dotty.tools.sbtplugin.DottyPlugin.autoImport._ import dotty.tools.sbtplugin.DottyIDEPlugin.{ prepareCommand, runProcess } import dotty.tools.sbtplugin.DottyIDEPlugin.autoImport._ -import org.scalajs.sbtplugin.ScalaJSPlugin -import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._ import pl.project13.scala.sbt.JmhPlugin import JmhPlugin.JmhKeys.Jmh +import sbt.ScriptedPlugin.autoImport._ + +import xerial.sbt.pack.PackPlugin.packSettings +import xerial.sbt.pack.PackPlugin.autoImport._ + +import org.scalajs.sbtplugin.ScalaJSPlugin +import ScalaJSPlugin.autoImport._ + +import com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseKeys + import Modes._ /* In sbt 0.13 the Build trait would expose all vals to the shell, where you @@ -134,7 +138,7 @@ object Build { resourceDirectory in Test := baseDirectory.value / "test-resources", // Prevent sbt from rewriting our dependencies - ivyScala ~= (_ map (_ copy (overrideScalaVersion = false))) + scalaModuleInfo ~= (_.map(_.withOverrideScalaVersion(false))) ) // Settings used for projects compiled only with Scala 2 @@ -151,14 +155,13 @@ object Build { // Settings used when compiling dotty with a non-bootstrapped dotty lazy val commonBootstrappedSettings = commonSettings ++ Seq( - EclipseKeys.skipProject := true, version := dottyVersion, scalaVersion := dottyNonBootstrappedVersion, // Avoid having to run `dotty-sbt-bridge/publishLocal` before compiling a bootstrapped project scalaCompilerBridgeSource := - (dottyOrganization %% "dotty-sbt-bridge" % dottyVersion % Configurations.Component.name) - .artifacts(Artifact.sources("dotty-sbt-bridge").copy(url = + (dottyOrganization %% "dotty-sbt-bridge" % dottyVersion) + .artifacts(Artifact.sources("dotty-sbt-bridge").withUrl( // We cannot use the `packageSrc` task because a setting cannot depend // on a task. Instead, we make `compile` below depend on the bridge `packageSrc` Some((artifactPath in (`dotty-sbt-bridge`, Compile, packageSrc)).value.toURI.toURL))), @@ -172,7 +175,7 @@ object Build { // contain `scalaInstance.value.libraryJar` which in our case is the // non-bootstrapped dotty-library that will then take priority over // the bootstrapped dotty-library on the classpath or sourcepath. - classpathOptions ~= (_.copy(autoBoot = false)), + classpathOptions ~= (_.withAutoBoot(false)), // We still need a Scala bootclasspath equal to the JVM bootclasspath, // otherwise sbt 0.13 incremental compilation breaks (https://github.com/sbt/sbt/issues/3142) scalacOptions ++= Seq("-bootclasspath", sys.props("sun.boot.class.path")), @@ -196,7 +199,7 @@ object Build { Seq( dottyOrganization %% "dotty-library" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name, dottyOrganization %% "dotty-compiler" % dottyNonBootstrappedVersion % Configurations.ScalaTool.name - ).map(_.withDottyCompat()) + ).map(_.withDottyCompat(scalaVersion.value)) else Seq() }, @@ -204,25 +207,29 @@ object Build { // Compile using the non-bootstrapped and non-published dotty managedScalaInstance := false, scalaInstance := { + val updateResult = update.value val (libraryJar, compilerJar) = if (bootstrapFromPublishedJars.value) { - val jars = update.value.select( + val jars = updateResult.select( configuration = configurationFilter(Configurations.ScalaTool.name), + module = moduleFilter(), artifact = artifactFilter(extension = "jar") ) (jars.find(_.getName.startsWith("dotty-library_2.12")).get, jars.find(_.getName.startsWith("dotty-compiler_2.12")).get) } else - ((packageBin in (`dotty-library`, Compile)).value, - (packageBin in (`dotty-compiler`, Compile)).value) + ((packageBin in (`dotty-library`, Compile)).value: @sbtUnchecked, + (packageBin in (`dotty-compiler`, Compile)).value: @sbtUnchecked) // All compiler dependencies except the library val otherDependencies = (dependencyClasspath in (`dotty-compiler`, Compile)).value .filterNot(_.get(artifact.key).exists(_.name == "dotty-library")) .map(_.data) - val loader = state.value.classLoaderCache(libraryJar :: compilerJar :: otherDependencies.toList) - new ScalaInstance(scalaVersion.value, loader, libraryJar, compilerJar, otherDependencies, None) + val allJars = libraryJar :: compilerJar :: otherDependencies.toList + val classLoader = state.value.classLoaderCache(allJars) + new sbt.internal.inc.ScalaInstance(scalaVersion.value, + classLoader, libraryJar, compilerJar, allJars.toArray, None) } ) @@ -275,8 +282,8 @@ object Build { // - publishes its own empty artifact "dotty" that depends on "dotty-library" and "dotty-compiler", // this is only necessary for compatibility with sbt which currently hardcodes the "dotty" artifact name lazy val dotty = project.in(file(".")).asDottyRoot(NonBootstrapped) - lazy val `dotty-bootstrapped` = project.asDottyRoot(Bootstrapped) - lazy val `dotty-optimised` = project.asDottyRoot(BootstrappedOptimised) + lazy val `dotty-bootstrapped` = project.asDottyRoot(Bootstrapped).disablePlugins(ScriptedPlugin) + lazy val `dotty-optimised` = project.asDottyRoot(BootstrappedOptimised).disablePlugins(ScriptedPlugin) lazy val `dotty-interfaces` = project.in(file("interfaces")). settings(commonScala2Settings). // Java-only project, so this is fine @@ -285,8 +292,6 @@ object Build { crossPaths := false, // Do not depend on the Scala library autoScalaLibrary := false, - // Let the sbt eclipse plugin know that this is a Java-only project - EclipseKeys.projectFlavor := EclipseProjectFlavor.Java, //Remove javac invalid options in Compile doc javacOptions in (Compile, doc) --= Seq("-Xlint:unchecked", "-Xlint:deprecation") ) @@ -356,8 +361,8 @@ object Build { ) lazy val `dotty-doc` = project.in(file("doc-tool")).asDottyDoc(NonBootstrapped) - lazy val `dotty-doc-bootstrapped` = project.in(file("doc-tool")).asDottyDoc(Bootstrapped) - lazy val `dotty-doc-optimised` = project.in(file("doc-tool")).asDottyDoc(BootstrappedOptimised) + lazy val `dotty-doc-bootstrapped` = project.in(file("doc-tool")).asDottyDoc(Bootstrapped).disablePlugins(ScriptedPlugin) + lazy val `dotty-doc-optimised` = project.in(file("doc-tool")).asDottyDoc(BootstrappedOptimised).disablePlugins(ScriptedPlugin) def dottyDoc(implicit mode: Mode): Project = mode match { case NonBootstrapped => `dotty-doc` @@ -370,10 +375,6 @@ object Build { settings( resourceDirectory in Test := baseDirectory.value / "test" / "resources", - // specify main and ignore tests when assembling - mainClass in assembly := Some("dotty.tools.bot.Main"), - test in assembly := {}, - libraryDependencies ++= { val circeVersion = "0.7.0" val http4sVersion = "0.15.3" @@ -428,7 +429,7 @@ object Build { } catch { case _: UnsupportedOperationException | _: FileSystemException => // If the OS doesn't support symbolic links, copy the directory instead. - sbt.IO.copy(pairs, overwrite = true, preserveLastModified = true) + sbt.IO.copy(pairs, CopyOptions(overwrite = true, preserveLastModified = true, preserveExecutable = true)) } pairs.map(_._2) @@ -462,19 +463,12 @@ object Build { Seq(file) }.taskValue, - // include sources in eclipse (downloads source code for all dependencies) - //http://stackoverflow.com/questions/10472840/how-to-attach-sources-to-sbt-managed-dependencies-in-scala-ide#answer-11683728 - com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseKeys.withSource := true, - // get libraries onboard - libraryDependencies ++= Seq("com.typesafe.sbt" % "sbt-interface" % sbtVersion.value, - ("org.scala-lang.modules" %% "scala-xml" % "1.0.6").withDottyCompat(), + libraryDependencies ++= Seq("org.scala-sbt" % "compiler-interface" % "1.0.2", + ("org.scala-lang.modules" % "scala-xml_2.12" % "1.0.6"), "com.novocode" % "junit-interface" % "0.11" % "test", "org.scala-lang" % "scala-library" % scalacVersion % "test"), - // enable improved incremental compilation algorithm - incOptions := incOptions.value.withNameHashing(true), - // For convenience, change the baseDirectory when running the compiler baseDirectory in (Compile, run) := baseDirectory.value / "..", // .. but not when running test @@ -502,7 +496,6 @@ object Build { // Override run to be able to run compiled classfiles dotr := { val args: Seq[String] = spaceDelimited("").parsed - val java: String = Process("which" :: "java" :: Nil).!! val attList = (dependencyClasspath in Runtime).value val _ = packageAll.value val scalaLib = attList @@ -510,13 +503,19 @@ object Build { .find(_.contains("scala-library")) .toList.mkString(":") - if (java == "") - println("Couldn't find java executable on path, please install java to a default location") - else if (scalaLib == "") { + if (scalaLib == "") { println("Couldn't find scala-library on classpath, please run using script in bin dir instead") } else { val dottyLib = packageAll.value("dotty-library") - s"""$java -classpath .:$dottyLib:$scalaLib ${args.mkString(" ")}""".! + val allArgs = Seq("java", "-classpath", s".:$dottyLib:$scalaLib") ++ args + val exitCode = new java.lang.ProcessBuilder(allArgs: _*) + .inheritIO() + .start() + .waitFor() + if (exitCode != 0) + throw new FeedbackProvidedException { + override def toString = "dotr failed" + } } }, run := Def.inputTaskDyn { @@ -614,7 +613,8 @@ object Build { // used for tests that compile dotty path.contains("scala-asm") || // needed for the xsbti interface - path.contains("sbt-interface") + path.contains("compiler-interface") || + path.contains("util-interface") } yield "-Xbootclasspath/p:" + path val ci_build = // propagate if this is a ci build @@ -670,8 +670,8 @@ object Build { if (mode == NonBootstrapped) nonBootstrapedDottyCompilerSettings else bootstrapedDottyCompilerSettings lazy val `dotty-compiler` = project.in(file("compiler")).asDottyCompiler(NonBootstrapped) - lazy val `dotty-compiler-bootstrapped` = project.in(file("compiler")).asDottyCompiler(Bootstrapped) - lazy val `dotty-compiler-optimised` = project.in(file("compiler")).asDottyCompiler(BootstrappedOptimised) + lazy val `dotty-compiler-bootstrapped` = project.in(file("compiler")).asDottyCompiler(Bootstrapped).disablePlugins(ScriptedPlugin) + lazy val `dotty-compiler-optimised` = project.in(file("compiler")).asDottyCompiler(BootstrappedOptimised).disablePlugins(ScriptedPlugin) def dottyCompiler(implicit mode: Mode): Project = mode match { case NonBootstrapped => `dotty-compiler` @@ -688,8 +688,8 @@ object Build { ) lazy val `dotty-library` = project.in(file("library")).asDottyLibrary(NonBootstrapped) - lazy val `dotty-library-bootstrapped`: Project = project.in(file("library")).asDottyLibrary(Bootstrapped) - lazy val `dotty-library-optimised`: Project = project.in(file("library")).asDottyLibrary(BootstrappedOptimised) + lazy val `dotty-library-bootstrapped`: Project = project.in(file("library")).asDottyLibrary(Bootstrapped).disablePlugins(ScriptedPlugin) + lazy val `dotty-library-optimised`: Project = project.in(file("library")).asDottyLibrary(BootstrappedOptimised).disablePlugins(ScriptedPlugin) def dottyLibrary(implicit mode: Mode): Project = mode match { case NonBootstrapped => `dotty-library` @@ -721,10 +721,10 @@ object Build { description := "sbt compiler bridge for Dotty", resolvers += Resolver.typesafeIvyRepo("releases"), // For org.scala-sbt:api libraryDependencies ++= Seq( - "com.typesafe.sbt" % "sbt-interface" % sbtVersion.value, - "org.scala-sbt" % "api" % sbtVersion.value % "test", - ("org.specs2" %% "specs2-core" % "3.9.1" % "test").withDottyCompat(), - ("org.specs2" %% "specs2-junit" % "3.9.1" % "test").withDottyCompat() + "org.scala-sbt" % "compiler-interface" % "1.0.2", + ("org.scala-sbt" %% "zinc-apiinfo" % "1.0.2" % "test").withDottyCompat(scalaVersion.value), + ("org.specs2" %% "specs2-core" % "3.9.1" % "test").withDottyCompat(scalaVersion.value), + ("org.specs2" %% "specs2-junit" % "3.9.1" % "test").withDottyCompat(scalaVersion.value) ), // The sources should be published with crossPaths := false since they // need to be compiled by the project using the bridge. @@ -738,7 +738,7 @@ object Build { ) lazy val `dotty-sbt-bridge` = project.in(file("sbt-bridge")).asDottySbtBridge(NonBootstrapped) - lazy val `dotty-sbt-bridge-bootstrapped` = project.in(file("sbt-bridge")).asDottySbtBridge(Bootstrapped) + lazy val `dotty-sbt-bridge-bootstrapped` = project.in(file("sbt-bridge")).asDottySbtBridge(Bootstrapped).disablePlugins(ScriptedPlugin) lazy val `dotty-language-server` = project.in(file("language-server")). dependsOn(dottyCompiler(Bootstrapped)). @@ -773,7 +773,7 @@ object Build { runTask(Runtime, mainClass, allArgs: _*) }.dependsOn(compile in (`vscode-dotty`, Compile)).evaluated - ) + ).disablePlugins(ScriptedPlugin) /** A sandbox to play with the Scala.js back-end of dotty. * @@ -818,8 +818,8 @@ object Build { ))) lazy val `dotty-bench` = project.in(file("bench")).asDottyBench(NonBootstrapped) - lazy val `dotty-bench-bootstrapped` = project.in(file("bench")).asDottyBench(Bootstrapped) - lazy val `dotty-bench-optimised` = project.in(file("bench")).asDottyBench(BootstrappedOptimised) + lazy val `dotty-bench-bootstrapped` = project.in(file("bench")).asDottyBench(Bootstrapped).disablePlugins(ScriptedPlugin) + lazy val `dotty-bench-optimised` = project.in(file("bench")).asDottyBench(BootstrappedOptimised).disablePlugins(ScriptedPlugin) // Depend on dotty-library so that sbt projects using dotty automatically // depend on the dotty-library @@ -853,21 +853,22 @@ object Build { lazy val `sbt-dotty` = project.in(file("sbt-dotty")). settings(commonSettings). settings( + scalaVersion := "2.12.2", // Keep in sync with inject-sbt-dotty.sbt - libraryDependencies += Dependencies.`jackson-databind`, + libraryDependencies ++= Seq( + Dependencies.`jackson-databind`, + "org.scala-sbt" % "compiler-interface" % "1.0.2" + ), unmanagedSourceDirectories in Compile += baseDirectory.value / "../language-server/src/dotty/tools/languageserver/config", - - sbtPlugin := true, - version := "0.1.7", - ScriptedPlugin.scriptedSettings, - ScriptedPlugin.sbtTestDirectory := baseDirectory.value / "sbt-test", - ScriptedPlugin.scriptedLaunchOpts += "-Dplugin.version=" + version.value, - ScriptedPlugin.scriptedLaunchOpts += "-Dplugin.scalaVersion=" + dottyVersion, - // By default scripted tests use $HOME/.ivy2 for the ivy cache. We need to override this value for the CI. - ScriptedPlugin.scriptedLaunchOpts ++= ivyPaths.value.ivyHome.map("-Dsbt.ivy.home=" + _.getAbsolutePath).toList, - ScriptedPlugin.scripted := ScriptedPlugin.scripted.dependsOn(Def.task { + version := "0.1.8", + sbtTestDirectory := baseDirectory.value / "sbt-test", + scriptedLaunchOpts += "-Dplugin.version=" + version.value, + scriptedLaunchOpts += "-Dplugin.scalaVersion=" + dottyVersion, + // By default scripted tests use $HOME/.ivy2 for the ivy cache. We need to override this value for the CI. + scriptedLaunchOpts ++= ivyPaths.value.ivyHome.map("-Dsbt.ivy.home=" + _.getAbsolutePath).toList, + scripted := scripted.dependsOn(Def.task { val x0 = (publishLocal in `dotty-sbt-bridge-bootstrapped`).value val x1 = (publishLocal in `dotty-interfaces`).value val x2 = (publishLocal in `dotty-compiler-bootstrapped`).value @@ -881,7 +882,7 @@ object Build { lazy val `vscode-dotty` = project.in(file("vscode-dotty")). settings(commonSettings). settings( - EclipseKeys.skipProject := true, + EclipseKeys.skipProject := true, version := "0.1.2", // Keep in sync with package.json @@ -893,7 +894,7 @@ object Build { val coursier = baseDirectory.value / "out/coursier" val packageJson = baseDirectory.value / "package.json" if (!coursier.exists || packageJson.lastModified > coursier.lastModified) - runProcess(Seq("npm", "run", "update-all"), wait = true, directory = baseDirectory.value) + runProcess(Seq("npm", "run", "update-all"), wait = true, directory = baseDirectory.value: @sbtUnchecked) val tsc = baseDirectory.value / "node_modules" / ".bin" / "tsc" runProcess(Seq(tsc.getAbsolutePath, "--pretty", "--project", baseDirectory.value.getAbsolutePath), wait = true) @@ -902,7 +903,7 @@ object Build { // (--extensionDevelopmentPath=...) runProcess(codeCommand.value ++ Seq("--install-extension", "daltonjorge.scala"), wait = true) - sbt.inc.Analysis.Empty + sbt.internal.inc.Analysis.Empty }, sbt.Keys.`package`:= { runProcess(Seq("vsce", "package"), wait = true, directory = baseDirectory.value) @@ -1011,7 +1012,8 @@ object Build { compile := { val inputs = (compileInputs in compile).value - import inputs.config._ + val inputOptions = inputs.options() + import inputOptions._ val s = streams.value val logger = s.log @@ -1034,6 +1036,7 @@ object Build { // Compile + val run = (runner in compile).value val cachedCompile = FileFunction.cached(cacheDir / "compile", FilesInfo.lastModified, FilesInfo.exists) { dependencies => @@ -1064,13 +1067,12 @@ object Build { } def doCompile(sourcesArgs: List[String]): Unit = { - val run = (runner in compile).value run.run("dotty.tools.dotc.Main", compilerCp, "-classpath" :: cpStr :: "-d" :: classesDirectory.getAbsolutePath() :: - options ++: + scalacOptions ++: sourcesArgs, - patchedLogger) foreach sys.error + patchedLogger) } // Work around the Windows limitation on command line length. @@ -1093,7 +1095,7 @@ object Build { cachedCompile((sources ++ allMyDependencies).toSet) // We do not have dependency analysis when compiling externally - sbt.inc.Analysis.Empty + sbt.internal.inc.Analysis.Empty } )) } @@ -1123,8 +1125,8 @@ object Build { ) lazy val dist = project.asDist(NonBootstrapped) - lazy val `dist-bootstrapped` = project.asDist(Bootstrapped) - lazy val `dist-optimised` = project.asDist(BootstrappedOptimised) + lazy val `dist-bootstrapped` = project.asDist(Bootstrapped).disablePlugins(ScriptedPlugin) + lazy val `dist-optimised` = project.asDist(BootstrappedOptimised).disablePlugins(ScriptedPlugin) implicit class ProjectDefinitions(val project: Project) extends AnyVal { @@ -1159,7 +1161,7 @@ object Build { settings(dottyDocSettings) def asDottySbtBridge(implicit mode: Mode): Project = project.withCommonSettings. - dependsOn(dottyCompiler). + dependsOn(dottyCompiler % Provided). settings(dottySbtBridgeSettings) def asDottyBench(implicit mode: Mode): Project = project.withCommonSettings. @@ -1181,5 +1183,4 @@ object Build { case BootstrappedOptimised => commonOptimisedSettings }) } - } diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index 399a468591c8..c127c8fee000 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -6,7 +6,7 @@ object VersionUtil { if (System.getProperty("os.name").toLowerCase.contains("windows")) s"cmd.exe /c project\\scripts\\build\\$scriptName.bat -p" else s"project/scripts/build/$scriptName" - Process(cmd).lines.head.trim + Process(cmd).lineStream.head.trim } /** Seven letters of the SHA hash is considered enough to uniquely identify a diff --git a/project/build.properties b/project/build.properties index 64317fdae59f..9abea1294a1f 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.15 +sbt.version=1.0.3 diff --git a/project/inject-sbt-dotty.sbt b/project/inject-sbt-dotty.sbt index 5ccc77fc2783..d63d6da5d1d4 100644 --- a/project/inject-sbt-dotty.sbt +++ b/project/inject-sbt-dotty.sbt @@ -5,6 +5,9 @@ unmanagedSourceDirectories in Compile += baseDirectory.value / "../sbt-dotty/src" // Keep in sync with `sbt-dotty` config in Build.scala -libraryDependencies += Dependencies.`jackson-databind` +libraryDependencies ++= Seq( + Dependencies.`jackson-databind`, + "org.scala-sbt" % "compiler-interface" % "1.0.0-X16" +) unmanagedSourceDirectories in Compile += baseDirectory.value / "../language-server/src/dotty/tools/languageserver/config" diff --git a/project/plugins.sbt b/project/plugins.sbt index 6b24922932e3..ef60384df746 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -3,17 +3,17 @@ // e.g. addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.1.0") // Scala IDE project file generator -addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.1.0") +addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.2.3") -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.14") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.21") -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.4") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.5") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "1.1") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.0") -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0") +addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.0") -addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.8.2") +addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.9.1") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.24") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.27") diff --git a/project/scripted.sbt b/project/scripted.sbt index 76fdf267ff0d..a7d7ecccf2a9 100644 --- a/project/scripted.sbt +++ b/project/scripted.sbt @@ -1,2 +1,2 @@ // Used by the subproject dotty-bridge -libraryDependencies += "org.scala-sbt" % "scripted-plugin" % sbtVersion.value +libraryDependencies += "org.scala-sbt" %% "scripted-plugin" % sbtVersion.value diff --git a/sbt-bridge/src/xsbt/CompilerInterface.scala b/sbt-bridge/src/xsbt/CompilerInterface.scala index bf1488dad93b..ac828b996957 100644 --- a/sbt-bridge/src/xsbt/CompilerInterface.scala +++ b/sbt-bridge/src/xsbt/CompilerInterface.scala @@ -3,10 +3,8 @@ */ package xsbt -import xsbti.{ AnalysisCallback, Logger, Problem, Reporter, Severity, DependencyContext } -import xsbti.api.SourceAPI +import xsbti.{ AnalysisCallback, Logger, Reporter, Severity } import xsbti.compile._ -import Log.debug import java.io.File import dotty.tools.dotc.core.Contexts.ContextBase @@ -16,19 +14,19 @@ import dotty.tools.dotc.interfaces._ import java.net.URLClassLoader final class CompilerInterface { - def newCompiler(options: Array[String], output: Output, initialLog: Logger, - initialDelegate: Reporter, resident: Boolean): CachedCompiler = { + def newCompiler(options: Array[String], output: Output, initialLog: xsbti.Logger, + initialDelegate: xsbti.Reporter): CachedCompiler = { // The classloader that sbt uses to load the compiler bridge is broken // (see CompilerClassLoader#fixBridgeLoader for details). To workaround // this we construct our own ClassLoader and then run the following code // with it: - // new CachedCompilerImpl(options, output, resident) + // new CachedCompilerImpl(options, output) val bridgeLoader = getClass.getClassLoader val fixedLoader = CompilerClassLoader.fixBridgeLoader(bridgeLoader) val cciClass = fixedLoader.loadClass("xsbt.CachedCompilerImpl") cciClass.getConstructors.head - .newInstance(options, output, resident: java.lang.Boolean) + .newInstance(options, output) .asInstanceOf[CachedCompiler] } @@ -37,13 +35,13 @@ final class CompilerInterface { cached.run(sources, changes, callback, log, delegate, progress) } -class CachedCompilerImpl(args: Array[String], output: Output, resident: Boolean) extends CachedCompiler { +class CachedCompilerImpl(args: Array[String], output: Output) extends CachedCompiler { val outputArgs = output match { case multi: MultipleOutput => ??? case single: SingleOutput => - List("-d", single.outputDirectory.getAbsolutePath.toString) + List("-d", single.getOutputDirectory.getAbsolutePath.toString) } def commandArguments(sources: Array[File]): Array[String] = @@ -53,7 +51,7 @@ class CachedCompilerImpl(args: Array[String], output: Output, resident: Boolean) run(sources.toList, changes, callback, log, delegate, progress) } private[this] def run(sources: List[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, compileProgress: CompileProgress): Unit = { - debug(log, args.mkString("Calling Dotty compiler with arguments (CompilerInterface):\n\t", "\n\t", "")) + log.debug(() => args.mkString("Calling Dotty compiler with arguments (CompilerInterface):\n\t", "\n\t", "")) val ctx = (new ContextBase).initialCtx.fresh .setSbtCallback(callback) .setReporter(new DelegatingReporter(delegate)) @@ -67,6 +65,6 @@ class CachedCompilerImpl(args: Array[String], output: Output, resident: Boolean) } } -class InterfaceCompileFailed(override val arguments: Array[String], override val problems: Array[Problem]) extends xsbti.CompileFailed { +class InterfaceCompileFailed(override val arguments: Array[String], override val problems: Array[xsbti.Problem]) extends xsbti.CompileFailed { override val toString = "Compilation failed" } diff --git a/sbt-bridge/src/xsbt/DelegatingReporter.scala b/sbt-bridge/src/xsbt/DelegatingReporter.scala index ffc4792ecef0..6ac533a9cb13 100644 --- a/sbt-bridge/src/xsbt/DelegatingReporter.scala +++ b/sbt-bridge/src/xsbt/DelegatingReporter.scala @@ -9,7 +9,8 @@ import reporting._ import reporting.diagnostic.MessageContainer import reporting.diagnostic.messages import core.Contexts._ -import xsbti.{Maybe, Position} +import xsbti.{Position, Severity} +import java.util.Optional final class DelegatingReporter(delegate: xsbti.Reporter) extends Reporter with UniqueMessagePositions @@ -22,9 +23,9 @@ final class DelegatingReporter(delegate: xsbti.Reporter) extends Reporter def doReport(cont: MessageContainer)(implicit ctx: Context): Unit = { val severity = cont match { - case _: messages.Error => xsbti.Severity.Error - case _: messages.Warning => xsbti.Severity.Warn - case _ => xsbti.Severity.Info + case _: messages.Error => Severity.Error + case _: messages.Warning => Severity.Warn + case _ => Severity.Info } val position = @@ -32,13 +33,13 @@ final class DelegatingReporter(delegate: xsbti.Reporter) extends Reporter val pos = cont.pos val src = pos.source new Position { - val sourceFile: Maybe[java.io.File] = maybe(Option(src.file.file)) - val sourcePath: Maybe[String] = maybe(Option(src.file.path)) - val line: Maybe[Integer] = Maybe.just(pos.line) + val sourceFile: Optional[java.io.File] = maybe(Option(src.file.file)) + val sourcePath: Optional[String] = maybe(Option(src.file.path)) + val line: Optional[Integer] = Optional.of(pos.line) val lineContent: String = pos.lineContent.stripLineEnd - val offset: Maybe[Integer] = Maybe.just(pos.point) - val pointer: Maybe[Integer] = Maybe.just(pos.point - src.startOfLine(pos.point)) - val pointerSpace: Maybe[String] = Maybe.just( + val offset: Optional[Integer] = Optional.of(pos.point) + val pointer: Optional[Integer] = Optional.of(pos.point - src.startOfLine(pos.point)) + val pointerSpace: Optional[String] = Optional.of( ((lineContent: Seq[Char]).take(pointer.get).map { case '\t' => '\t'; case x => ' ' }).mkString ) } @@ -51,21 +52,21 @@ final class DelegatingReporter(delegate: xsbti.Reporter) extends Reporter sb.append(explanation(cont.contained())) } - delegate.log(position, sb.toString(), severity) + delegate.log(Problem(position, sb.toString(), severity)) } - private[this] def maybe[T](opt: Option[T]): Maybe[T] = opt match { - case None => Maybe.nothing[T] - case Some(s) => Maybe.just[T](s) + private[this] def maybe[T](opt: Option[T]): Optional[T] = opt match { + case None => Optional.empty[T] + case Some(s) => Optional.of[T](s) } private[this] val noPosition = new Position { - val line: Maybe[Integer] = Maybe.nothing[Integer] + val line: Optional[Integer] = Optional.empty[Integer] val lineContent: String = "" - val offset: Maybe[Integer] = Maybe.nothing[Integer] - val pointer: Maybe[Integer] = Maybe.nothing[Integer] - val pointerSpace: Maybe[String] = Maybe.nothing[String] - val sourceFile: Maybe[java.io.File] = Maybe.nothing[java.io.File] - val sourcePath: Maybe[String] = Maybe.nothing[String] + val offset: Optional[Integer] = Optional.empty[Integer] + val pointer: Optional[Integer] = Optional.empty[Integer] + val pointerSpace: Optional[String] = Optional.empty[String] + val sourceFile: Optional[java.io.File] = Optional.empty[java.io.File] + val sourcePath: Optional[String] = Optional.empty[String] } } diff --git a/sbt-bridge/src/xsbt/Log.scala b/sbt-bridge/src/xsbt/Log.scala deleted file mode 100644 index e514d7abbc46..000000000000 --- a/sbt-bridge/src/xsbt/Log.scala +++ /dev/null @@ -1,10 +0,0 @@ -/* sbt -- Simple Build Tool - * Copyright 2008, 2009 Mark Harrah - */ -package xsbt - -object Log { - def debug(log: xsbti.Logger, msg: => String) = log.debug(Message(msg)) - def settingsError(log: xsbti.Logger): String => Unit = - s => log.error(Message(s)) -} diff --git a/sbt-bridge/src/xsbt/Message.scala b/sbt-bridge/src/xsbt/Message.scala deleted file mode 100644 index 48f24f53349f..000000000000 --- a/sbt-bridge/src/xsbt/Message.scala +++ /dev/null @@ -1,8 +0,0 @@ -/* sbt -- Simple Build Tool - * Copyright 2008, 2009 Mark Harrah - */ -package xsbt - -object Message { - def apply[T](s: => T) = new xsbti.F0[T] { def apply() = s } -} diff --git a/sbt-bridge/src/xsbt/Problem.scala b/sbt-bridge/src/xsbt/Problem.scala new file mode 100644 index 000000000000..f92d8c284f89 --- /dev/null +++ b/sbt-bridge/src/xsbt/Problem.scala @@ -0,0 +1,12 @@ +package xsbt + +import xsbti.{Position, Severity} + +final case class Problem(override val position: Position, + override val message: String, + override val severity: Severity) extends xsbti.Problem { + override val category = "" + override def toString = s"[$severity] $position: $message" + +} + diff --git a/sbt-bridge/src/xsbt/ScaladocInterface.scala b/sbt-bridge/src/xsbt/ScaladocInterface.scala index 1eae8374d28b..c5124d8996e0 100644 --- a/sbt-bridge/src/xsbt/ScaladocInterface.scala +++ b/sbt-bridge/src/xsbt/ScaladocInterface.scala @@ -5,6 +5,7 @@ package xsbt import xsbti.{ Logger, Severity } import java.net.URL +import java.util.Optional class ScaladocInterface { def run(args: Array[String], log: Logger, delegate: xsbti.Reporter) = @@ -12,21 +13,21 @@ class ScaladocInterface { } class DottydocRunner(args: Array[String], log: Logger, delegate: xsbti.Reporter) { - def run(): Unit = delegate.log( + def run(): Unit = delegate.log(Problem( NoPosition, """|The dotty sbt-bridge currently does not support doc generation directly |via sbt. Please see the dotty documentation at dotty.epfl.ch""".stripMargin, Severity.Error - ) + )) private[this] val NoPosition = new xsbti.Position { - val line = xsbti.Maybe.nothing[Integer] + val line = Optional.empty[Integer] val lineContent = "" - val offset = xsbti.Maybe.nothing[Integer] - val sourcePath = xsbti.Maybe.nothing[String] - val sourceFile = xsbti.Maybe.nothing[java.io.File] - val pointer = xsbti.Maybe.nothing[Integer] - val pointerSpace = xsbti.Maybe.nothing[String] + val offset = Optional.empty[Integer] + val sourcePath = Optional.empty[String] + val sourceFile = Optional.empty[java.io.File] + val pointer = Optional.empty[Integer] + val pointerSpace = Optional.empty[String] } private def getStringSetting(name: String): Option[String] = diff --git a/sbt-bridge/test/xsbt/DependencySpecification.scala b/sbt-bridge/test/xsbt/DependencySpecification.scala index 60545091b666..0fbd285ee35d 100644 --- a/sbt-bridge/test/xsbt/DependencySpecification.scala +++ b/sbt-bridge/test/xsbt/DependencySpecification.scala @@ -8,80 +8,80 @@ import xsbt.api.SameAPI import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner -import ScalaCompilerForUnitTesting.ExtractedSourceDependencies +import xsbti.TestCallback.ExtractedClassDependencies @RunWith(classOf[JUnitRunner]) class DependencySpecification extends Specification { "Extracted source dependencies from public members" in { - val sourceDependencies = extractSourceDependenciesPublic - val memberRef = sourceDependencies.memberRef - val inheritance = sourceDependencies.inheritance - memberRef('A) === Set.empty - inheritance('A) === Set.empty - memberRef('B) === Set('A, 'D) - inheritance('B) === Set('D) - memberRef('C) === Set('A) - inheritance('C) === Set.empty - memberRef('D) === Set.empty - inheritance('D) === Set.empty - memberRef('E) === Set.empty - inheritance('E) === Set.empty - memberRef('F) === Set('A, 'B, 'C, 'D, 'E, 'G) - inheritance('F) === Set('A, 'E) - memberRef('H) === Set('B, 'E, 'G) + val classDependencies = extractClassDependenciesPublic + val memberRef = classDependencies.memberRef + val inheritance = classDependencies.inheritance + memberRef("A") === Set.empty + inheritance("A") === Set.empty + memberRef("B") === Set("A", "D") + inheritance("B") === Set("D") + memberRef("C") === Set("A") + inheritance("C") === Set.empty + memberRef("D") === Set.empty + inheritance("D") === Set.empty + memberRef("E") === Set.empty + inheritance("E") === Set.empty + memberRef("F") === Set("A", "B", "C", "D", "E", "G") + inheritance("F") === Set("A", "E") + memberRef("H") === Set("B", "E", "G") // aliases and applied type constructors are expanded so we have inheritance dependency on B - inheritance('H) === Set('B, 'E) + inheritance("H") === Set("B", "E") } "Extracted source dependencies from private members" in { - val sourceDependencies = extractSourceDependenciesPrivate - val memberRef = sourceDependencies.memberRef - val inheritance = sourceDependencies.inheritance - memberRef('A) === Set.empty - inheritance('A) === Set.empty - memberRef('B) === Set.empty - inheritance('B) === Set.empty - memberRef('C) === Set('A) - inheritance('C) === Set('A) - memberRef('D) === Set('B) - inheritance('D) === Set('B) + val classDependencies = extractClassDependenciesPrivate + val memberRef = classDependencies.memberRef + val inheritance = classDependencies.inheritance + memberRef("A") === Set.empty + inheritance("A") === Set.empty + memberRef("B") === Set.empty + inheritance("B") === Set.empty + memberRef("C.Inner1") === Set("A") + inheritance("C.Inner1") === Set("A") + memberRef("D._$Inner2") === Set("B") + inheritance("D._$Inner2") === Set("B") } "Extracted source dependencies with trait as first parent" in { - val sourceDependencies = extractSourceDependenciesTraitAsFirstPatent - val memberRef = sourceDependencies.memberRef - val inheritance = sourceDependencies.inheritance - memberRef('A) === Set.empty - inheritance('A) === Set.empty - memberRef('B) === Set('A) - inheritance('B) === Set('A) + val classDependencies = extractClassDependenciesTraitAsFirstPatent + val memberRef = classDependencies.memberRef + val inheritance = classDependencies.inheritance + memberRef("A") === Set.empty + inheritance("A") === Set.empty + memberRef("B") === Set("A") + inheritance("B") === Set("A") // verify that memberRef captures the oddity described in documentation of `Relations.inheritance` // we are mainly interested whether dependency on A is captured in `memberRef` relation so // the invariant that says that memberRef is superset of inheritance relation is preserved - memberRef('C) === Set('A, 'B) - inheritance('C) === Set('A, 'B) + memberRef("C") === Set("A", "B") + inheritance("C") === Set("A", "B") // same as above but indirect (C -> B -> A), note that only A is visible here - memberRef('D) === Set('A, 'C) - inheritance('D) === Set('A, 'C) + memberRef("D") === Set("A", "C") + inheritance("D") === Set("A", "C") } /* "Extracted source dependencies from macro arguments" in { - val sourceDependencies = extractSourceDependenciesFromMacroArgument - val memberRef = sourceDependencies.memberRef - val inheritance = sourceDependencies.inheritance - - memberRef('A) === Set('B, 'C) - inheritance('A) === Set.empty - memberRef('B) === Set.empty - inheritance('B) === Set.empty - memberRef('C) === Set.empty - inheritance('C) === Set.empty + val classDependencies = extractClassDependenciesFromMacroArgument + val memberRef = classDependencies.memberRef + val inheritance = classDependencies.inheritance + + memberRef("A") === Set("B", "C") + inheritance("A") === Set.empty + memberRef("B") === Set.empty + inheritance("B") === Set.empty + memberRef("C") === Set.empty + inheritance("C") === Set.empty } */ - private def extractSourceDependenciesPublic: ExtractedSourceDependencies = { + private def extractClassDependenciesPublic: ExtractedClassDependencies = { val srcA = "class A" val srcB = "class B extends D[A]" val srcC = """|class C { @@ -96,38 +96,38 @@ class DependencySpecification extends Specification { // E verifies the core type gets pulled out val srcH = "trait H extends G.T[Int] with (E[Int] @unchecked)" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val sourceDependencies = compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, - 'D -> srcD, 'E -> srcE, 'F -> srcF, 'G -> srcG, 'H -> srcH) - sourceDependencies + val compilerForTesting = new ScalaCompilerForUnitTesting + val classDependencies = compilerForTesting.extractDependenciesFromSrcs(srcA, srcB, srcC, + srcD, srcE, srcF, srcG, srcH) + classDependencies } - private def extractSourceDependenciesPrivate: ExtractedSourceDependencies = { + private def extractClassDependenciesPrivate: ExtractedClassDependencies = { val srcA = "class A" val srcB = "class B" val srcC = "class C { private class Inner1 extends A }" val srcD = "class D { def foo: Unit = { class Inner2 extends B } }" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val sourceDependencies = - compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, 'D -> srcD) - sourceDependencies + val compilerForTesting = new ScalaCompilerForUnitTesting + val classDependencies = + compilerForTesting.extractDependenciesFromSrcs(srcA, srcB, srcC, srcD) + classDependencies } - private def extractSourceDependenciesTraitAsFirstPatent: ExtractedSourceDependencies = { + private def extractClassDependenciesTraitAsFirstPatent: ExtractedClassDependencies = { val srcA = "class A" val srcB = "trait B extends A" val srcC = "trait C extends B" val srcD = "class D extends C" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val sourceDependencies = - compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, 'D -> srcD) - sourceDependencies + val compilerForTesting = new ScalaCompilerForUnitTesting + val classDependencies = + compilerForTesting.extractDependenciesFromSrcs(srcA, srcB, srcC, srcD) + classDependencies } /* - private def extractSourceDependenciesFromMacroArgument: ExtractedSourceDependencies = { + private def extractClassDependenciesFromMacroArgument: ExtractedClassDependencies = { val srcA = "class A { println(B.printTree(C.foo)) }" val srcB = """ |import scala.language.experimental.macros @@ -143,9 +143,9 @@ class DependencySpecification extends Specification { val srcC = "object C { val foo = 1 }" val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val sourceDependencies = + val classDependencies = compilerForTesting.extractDependenciesFromSrcs(List(Map('B -> srcB, 'C -> srcC), Map('A -> srcA))) - sourceDependencies + classDependencies } */ } diff --git a/sbt-bridge/test/xsbt/ExtractAPISpecification.scala b/sbt-bridge/test/xsbt/ExtractAPISpecification.scala index ce87134bc16d..8d867511e899 100644 --- a/sbt-bridge/test/xsbt/ExtractAPISpecification.scala +++ b/sbt-bridge/test/xsbt/ExtractAPISpecification.scala @@ -16,26 +16,25 @@ class ExtractAPISpecification extends Specification { def stableExistentialNames: Boolean = { def compileAndGetFooMethodApi(src: String): Def = { - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = false) + val compilerForTesting = new ScalaCompilerForUnitTesting val sourceApi = compilerForTesting.extractApiFromSrc(src) - val FooApi = sourceApi.definitions().find(_.name() == "Foo").get.asInstanceOf[ClassLike] + val FooApi = sourceApi.find(_.name() == "Foo").get.asInstanceOf[ClassLike] val fooMethodApi = FooApi.structure().declared().find(_.name == "foo").get fooMethodApi.asInstanceOf[Def] } val src1 = """ - |class Box[T] - |class Foo { - | def foo: Box[_] = null - | - }""".stripMargin + |class Box[T] + |class Foo { + | def foo: Box[_] = null + | + }""".stripMargin val fooMethodApi1 = compileAndGetFooMethodApi(src1) val src2 = """ - |class Box[T] - |class Foo { - | def bar: Box[_] = null - | def foo: Box[_] = null - | - }""".stripMargin + |class Box[T] + |class Foo { + | def bar: Box[_] = null + | def foo: Box[_] = null + |}""".stripMargin val fooMethodApi2 = compileAndGetFooMethodApi(src2) fooMethodApi1 == fooMethodApi2 @@ -52,13 +51,11 @@ class ExtractAPISpecification extends Specification { * See https://github.com/sbt/sbt/issues/2504 */ "Self variable and no self type" in { - def selectNamer(api: SourceAPI): ClassLike = { + def selectNamer(api: Seq[Definition]): ClassLike = { def selectClass(defs: Iterable[Definition], name: String): ClassLike = defs.collectFirst { case cls: ClassLike if cls.name == name => cls }.get - val global = selectClass(api.definitions, "Global") - val foo = selectClass(global.structure.declared, "Global.Foo") - selectClass(foo.structure.inherited, "Namers.Namer") + selectClass(api, "Namers.Namer") } val src1 = """|class Namers { @@ -70,7 +67,7 @@ class ExtractAPISpecification extends Specification { | class Foo extends Namers |} |""".stripMargin - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = false) + val compilerForTesting = new ScalaCompilerForUnitTesting val apis = compilerForTesting.extractApisFromSrcs(reuseCompilerInstance = false)(List(src1, src2), List(src2)) val _ :: src2Api1 :: src2Api2 :: Nil = apis.toList val namerApi1 = selectNamer(src2Api1) @@ -86,7 +83,7 @@ class ExtractAPISpecification extends Specification { * with our without a self variable. */ "Self type" in { - def collectFirstClass(defs: Array[Definition]): ClassLike = defs.collectFirst { + def collectFirstClass(defs: Iterable[Definition]): ClassLike = defs.collectFirst { case c: ClassLike => c }.get val srcX = "trait X" @@ -99,11 +96,11 @@ class ExtractAPISpecification extends Specification { val srcC6 = "class C6 extends AnyRef with X { self: X with Y => }" // val srcC7 = "class C7 { _ => }" // DOTTY: Syntax not supported val srcC8 = "class C8 { self => }" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = false) + val compilerForTesting = new ScalaCompilerForUnitTesting val apis = compilerForTesting.extractApisFromSrcs(reuseCompilerInstance = true)( List(srcX, srcY, srcC1, srcC2, srcC3, srcC4, srcC5, srcC6, srcC8) - ).map(x => collectFirstClass(x.definitions)) - val emptyType = new EmptyType + ).map(collectFirstClass) + val emptyType = EmptyType.create() def hasSelfType(c: ClassLike): Boolean = c.selfType != emptyType val (withSelfType, withoutSelfType) = apis.partition(hasSelfType) diff --git a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala index 5044c771872e..eda9533fcee4 100644 --- a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala +++ b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala @@ -26,64 +26,64 @@ class ExtractUsedNamesSpecification extends Specification { "Unit" ) - "imported name" in { - val src = """ - |package a { class A } - |package b { - | import a.{A => A2} - |}""".stripMargin - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val usedNames = compilerForTesting.extractUsedNamesFromSrc(src) - val expectedNames = standardNames ++ Set("a", "A", "A2", "b") - usedNames === expectedNames - } + "imported name" in { + val src = """ + |package a { class A } + |package b { + | import a.{A => A2} + |}""".stripMargin + val compilerForTesting = new ScalaCompilerForUnitTesting + val usedNames = compilerForTesting.extractUsedNamesFromSrc(src) + val expectedNames = standardNames ++ Set("a", "A", "A2", "b") + usedNames("a.A") === expectedNames + } // test covers https://github.com/gkossakowski/sbt/issues/6 "names in type tree" in { val srcA = """| - |package a { - | class A { - | class C { class D } - | } - | class B[T] - | class BB - |}""".stripMargin + |package a { + | class A { + | class C { class D } + | } + | class B[T] + | class BB + |}""".stripMargin val srcB = """| - |package b { - | abstract class X { - | def foo: a.A#C#D - | def bar: a.B[a.BB] - | } - |}""".stripMargin - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) + |package b { + | abstract class X { + | def foo: a.A#C#D + | def bar: a.B[a.BB] + | } + |}""".stripMargin + val compilerForTesting = new ScalaCompilerForUnitTesting val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB) // DOTTY: unlike the scalac sbt phase, this does not contain "X", I believe this is safe // TODO: report issue against sbt suggesting that they do the same val expectedNames = standardNames ++ Set("a", "A", "B", "C", "D", "b", "BB") - usedNames === expectedNames + usedNames("b.X") === expectedNames } - // test for https://github.com/gkossakowski/sbt/issues/5 - "symbolic names" in { - val srcA = """| - |class A { - | def `=`: Int = 3 - |}""".stripMargin - val srcB = """| - |class B { - | def foo(a: A) = a.`=` - |}""".stripMargin - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB) + // test for https://github.com/gkossakowski/sbt/issues/5 + "symbolic names" in { + val srcA = """| + |class A { + | def `=`: Int = 3 + |}""".stripMargin + val srcB = """| + |class B { + | def foo(a: A) = a.`=` + |}""".stripMargin + val compilerForTesting = new ScalaCompilerForUnitTesting + val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB) - // DOTTY TODO: "Int" is not actually used, but we collect it because - // it's the inferred return type so it appears in a TypeTree - // We could avoid this by checking if the untyped tree has a return type - // but is it worth it? Revisit this after https://github.com/sbt/sbt/issues/1104 - // has landed. - val expectedNames = standardNames ++ Set("A", "a", "=", "Int") - usedNames === expectedNames - } + // DOTTY TODO: "Int" is not actually used, but we collect it because + // it's the inferred return type so it appears in a TypeTree + // We could avoid this by checking if the untyped tree has a return type + // but is it worth it? Revisit this after https://github.com/sbt/sbt/issues/1104 + // has landed. + val expectedNames = standardNames ++ Set("A", "a", "=", "Int") + usedNames("B") === expectedNames + } "extract names in the types of trees" in { val src1 = """|class X0 @@ -104,55 +104,86 @@ class ExtractUsedNamesSpecification extends Specification { | def foo(m: M): N = ??? | def bar[Param >: P1 <: P0](p: Param): Param = ??? |}""".stripMargin - val src2 = """|object Test { + val src2 = """|object Test_lista { | val x = B.lista - | val y = B.at - | val z = B.as - | B.foo(???) - | B.bar(???) - |}""".stripMargin - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) + |} + |object Test_at { + | val x = B.at + |} + |object Test_as { + | val x = B.as + |} + |object Test_foo { + | val x = B.foo(???) + |} + |object Test_bar { + | val x = B.bar(???) + |} + |""".stripMargin + val compilerForTesting = new ScalaCompilerForUnitTesting val usedNames = compilerForTesting.extractUsedNamesFromSrc(src1, src2) - val expectedNames = standardNames ++ Set("Test", "Test$", "B", "B$", - "Predef", "Predef$", "???", "Nothing", - "lista", "List", "A", - "at", "T", "X1", "X0", - "as", "S", "Y", - "foo", "M", "N", - "bar", "P1", "P0") - usedNames === expectedNames + val expectedNames_lista = standardNames ++ Set("Test_lista", "Test_lista$", "B", "B$", "lista", "List", "A") + val expectedNames_at = standardNames ++ Set("Test_at", "Test_at$", "B", "B$", "at", "A", "T", "X0", "X1") + val expectedNames_as = standardNames ++ Set("Test_as", "Test_as$", "B", "B$", "as", "S", "Y") + val expectedNames_foo = standardNames ++ Set("Test_foo", + "Test_foo$", + "B", + "B$", + "foo", + "M", + "N", + "Predef", + "Predef$", + "???", + "Nothing") + val expectedNames_bar = standardNames ++ Set("Test_bar", + "Test_bar$", + "B", + "B$", + "bar", + "P1", + "P0", + "Predef", + "Predef$", + "???", + "Nothing") + usedNames("Test_lista") === expectedNames_lista + usedNames("Test_at") === expectedNames_at + usedNames("Test_as") === expectedNames_as + usedNames("Test_foo") === expectedNames_foo + usedNames("Test_bar") === expectedNames_bar } - // test for https://github.com/gkossakowski/sbt/issues/3 - "used names from the same compilation unit" in { - val src = "class A { def foo: Int = 0; def bar: Int = foo }" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val usedNames = compilerForTesting.extractUsedNamesFromSrc(src) - val expectedNames = standardNames ++ Set("A", "foo", "Int") - usedNames === expectedNames - } + // test for https://github.com/gkossakowski/sbt/issues/3 + "used names from the same compilation unit" in { + val src = "class A { def foo: Int = 0; def bar: Int = foo }" + val compilerForTesting = new ScalaCompilerForUnitTesting + val usedNames = compilerForTesting.extractUsedNamesFromSrc(src) + val expectedNames = standardNames ++ Set("A", "foo", "Int") + usedNames("A") === expectedNames + } - // pending test for https://issues.scala-lang.org/browse/SI-7173 - "names of constants" in { - val src = "class A { final val foo = 12; def bar: Int = foo }" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val usedNames = compilerForTesting.extractUsedNamesFromSrc(src) - val expectedNames = standardNames ++ Set("A", "foo", "Int") - usedNames === expectedNames - } + // pending test for https://issues.scala-lang.org/browse/SI-7173 + "names of constants" in { + val src = "class A { final val foo = 12; def bar: Int = foo }" + val compilerForTesting = new ScalaCompilerForUnitTesting + val usedNames = compilerForTesting.extractUsedNamesFromSrc(src) + val expectedNames = standardNames ++ Set("A", "foo", "Int") + usedNames("A") === expectedNames + } - // pending test for https://github.com/gkossakowski/sbt/issues/4 - // TODO: we should fix it by having special treatment of `selectDynamic` and `applyDynamic` calls - "names from method calls on Dynamic" in { - val srcA = """|import scala.language.dynamics - |class A extends Dynamic { - | def selectDynamic(name: String): Int = name.length - |}""".stripMargin - val srcB = "class B { def foo(a: A): Int = a.bla }" - val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) - val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB) - val expectedNames = standardNames ++ Set("B", "A", "a", "Int", "selectDynamic", "bla") - usedNames === expectedNames - }.pendingUntilFixed("Call to Dynamic is desugared in type checker so Select nodes is turned into string literal.") + // pending test for https://github.com/gkossakowski/sbt/issues/4 + // TODO: we should fix it by having special treatment of `selectDynamic` and `applyDynamic` calls + "names from method calls on Dynamic" in { + val srcA = """|import scala.language.dynamics + |class A extends Dynamic { + | def selectDynamic(name: String): Int = name.length + |}""".stripMargin + val srcB = "class B { def foo(a: A): Int = a.bla }" + val compilerForTesting = new ScalaCompilerForUnitTesting + val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB) + val expectedNames = standardNames ++ Set("B", "A", "a", "Int", "selectDynamic", "bla") + usedNames("") === expectedNames + }.pendingUntilFixed("Call to Dynamic is desugared in type checker so Select nodes is turned into string literal.") } diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala index fb27f9b9d276..e81d58a07744 100644 --- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala +++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala @@ -4,29 +4,26 @@ package xsbt import xsbti.compile.SingleOutput import java.io.File import xsbti._ -import xsbti.api.SourceAPI -import sbt.IO._ -import xsbti.api.ClassLike -import xsbti.api.Definition -import xsbti.api.Def +import sbt.io.IO +import xsbti.api.{ ClassLike, Def, DependencyContext } +import DependencyContext._ import xsbt.api.SameAPI -import sbt.ConsoleLogger -import xsbti.DependencyContext._ +import sbt.internal.util.ConsoleLogger -import ScalaCompilerForUnitTesting.ExtractedSourceDependencies +import TestCallback.ExtractedClassDependencies /** * Provides common functionality needed for unit tests that require compiling * source code using Scala compiler. */ -class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashing: Boolean = false) { +class ScalaCompilerForUnitTesting { import scala.language.reflectiveCalls /** * Compiles given source code using Scala compiler and returns API representation * extracted by ExtractAPI class. */ - def extractApiFromSrc(src: String): SourceAPI = { + def extractApiFromSrc(src: String): Seq[ClassLike] = { val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src) analysisCallback.apis(tempSrcFile) } @@ -35,27 +32,50 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashin * Compiles given source code using Scala compiler and returns API representation * extracted by ExtractAPI class. */ - def extractApisFromSrcs(reuseCompilerInstance: Boolean)(srcs: List[String]*): Seq[SourceAPI] = { + def extractApisFromSrcs(reuseCompilerInstance: Boolean)(srcs: List[String]*): Seq[Seq[ClassLike]] = { val (tempSrcFiles, analysisCallback) = compileSrcs(srcs.toList, reuseCompilerInstance) tempSrcFiles.map(analysisCallback.apis) } - def extractUsedNamesFromSrc(src: String): Set[String] = { - val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src) - analysisCallback.usedNames(tempSrcFile) - } - /** * Extract used names from src provided as the second argument. + * If `assertDefaultScope` is set to true it will fail if there is any name used in scope other then Default * * The purpose of the first argument is to define names that the second * source is going to refer to. Both files are compiled in the same compiler * Run but only names used in the second src file are returned. */ - def extractUsedNamesFromSrc(definitionSrc: String, actualSrc: String): Set[String] = { + def extractUsedNamesFromSrc( + definitionSrc: String, + actualSrc: String, + assertDefaultScope: Boolean = true + ): Map[String, Set[String]] = { // we drop temp src file corresponding to the definition src file val (Seq(_, tempSrcFile), analysisCallback) = compileSrcs(definitionSrc, actualSrc) - analysisCallback.usedNames(tempSrcFile) + + if (assertDefaultScope) for { + (className, used) <- analysisCallback.usedNamesAndScopes + analysisCallback.TestUsedName(name, scopes) <- used + } assert(scopes.size() == 1 && scopes.contains(UseScope.Default), s"$className uses $name in $scopes") + + val classesInActualSrc = analysisCallback.classNames(tempSrcFile).map(_._1) + classesInActualSrc.map(className => className -> analysisCallback.usedNames(className)).toMap + } + + /** + * Extract used names from the last source file in `sources`. + * + * The previous source files are provided to successfully compile examples. + * Only the names used in the last src file are returned. + */ + def extractUsedNamesFromSrc(sources: String*): Map[String, Set[String]] = { + val (srcFiles, analysisCallback) = compileSrcs(sources: _*) + srcFiles + .map { srcFile => + val classesInSrc = analysisCallback.classNames(srcFile).map(_._1) + classesInSrc.map(className => className -> analysisCallback.usedNames(className)).toMap + } + .reduce(_ ++ _) } /** @@ -70,42 +90,23 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashin * Symbols are used to express extracted dependencies between source code snippets. This way we have * file system-independent way of testing dependencies between source code "files". */ - def extractDependenciesFromSrcs(srcs: List[Map[Symbol, String]]): ExtractedSourceDependencies = { - val rawGroupedSrcs = srcs.map(_.values.toList) - val symbols = srcs.flatMap(_.keys) - val (tempSrcFiles, testCallback) = compileSrcs(rawGroupedSrcs, reuseCompilerInstance = true) - val fileToSymbol = (tempSrcFiles zip symbols).toMap - - val memberRefFileDeps = testCallback.sourceDependencies collect { - // false indicates that those dependencies are not introduced by inheritance + def extractDependenciesFromSrcs(srcs: List[List[String]]): ExtractedClassDependencies = { + val (_, testCallback) = compileSrcs(srcs, reuseCompilerInstance = true) + + val memberRefDeps = testCallback.classDependencies collect { case (target, src, DependencyByMemberRef) => (src, target) } - val inheritanceFileDeps = testCallback.sourceDependencies collect { - // true indicates that those dependencies are introduced by inheritance + val inheritanceDeps = testCallback.classDependencies collect { case (target, src, DependencyByInheritance) => (src, target) } - def toSymbols(src: File, target: File): (Symbol, Symbol) = (fileToSymbol(src), fileToSymbol(target)) - val memberRefDeps = memberRefFileDeps map { case (src, target) => toSymbols(src, target) } - val inheritanceDeps = inheritanceFileDeps map { case (src, target) => toSymbols(src, target) } - def pairsToMultiMap[A, B](pairs: Seq[(A, B)]): Map[A, Set[B]] = { - import scala.collection.mutable.{ HashMap, MultiMap } - val emptyMultiMap = new HashMap[A, scala.collection.mutable.Set[B]] with MultiMap[A, B] - val multiMap = pairs.foldLeft(emptyMultiMap) { - case (acc, (key, value)) => - acc.addBinding(key, value) - } - // convert all collections to immutable variants - multiMap.toMap.mapValues(_.toSet).withDefaultValue(Set.empty) + val localInheritanceDeps = testCallback.classDependencies collect { + case (target, src, LocalDependencyByInheritance) => (src, target) } - - ExtractedSourceDependencies(pairsToMultiMap(memberRefDeps), pairsToMultiMap(inheritanceDeps)) + ExtractedClassDependencies.fromPairs(memberRefDeps, inheritanceDeps, localInheritanceDeps) } - def extractDependenciesFromSrcs(srcs: (Symbol, String)*): ExtractedSourceDependencies = { - val symbols = srcs.map(_._1) - assert(symbols.distinct.size == symbols.size, - s"Duplicate symbols for srcs detected: $symbols") - extractDependenciesFromSrcs(List(srcs.toMap)) + def extractDependenciesFromSrcs(srcs: String*): ExtractedClassDependencies = { + extractDependenciesFromSrcs(List(srcs.toList)) } /** @@ -124,12 +125,12 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashin * The sequence of temporary files corresponding to passed snippets and analysis * callback is returned as a result. */ - private def compileSrcs(groupedSrcs: List[List[String]], + def compileSrcs(groupedSrcs: List[List[String]], reuseCompilerInstance: Boolean): (Seq[File], TestCallback) = { // withTemporaryDirectory { temp => { - val temp = createTemporaryDirectory - val analysisCallback = new TestCallback(nameHashing, includeSynthToNameHashing) + val temp = IO.createTemporaryDirectory + val analysisCallback = new TestCallback val classesDir = new File(temp, "classes") classesDir.mkdir() @@ -158,13 +159,13 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashin } } - private def compileSrcs(srcs: String*): (Seq[File], TestCallback) = { + def compileSrcs(srcs: String*): (Seq[File], TestCallback) = { compileSrcs(List(srcs.toList), reuseCompilerInstance = true) } private def prepareSrcFile(baseDir: File, fileName: String, src: String): File = { val srcFile = new File(baseDir, fileName) - sbt.IO.write(srcFile, src) + IO.write(srcFile, src) srcFile } @@ -184,14 +185,11 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashin def hasErrors: Boolean = false def hasWarnings: Boolean = false def printWarnings(): Unit = () - def problems: Array[Problem] = Array.empty - def log(pos: Position, msg: String, sev: Severity): Unit = println(msg) + def problems(): Array[xsbti.Problem] = Array.empty + def log(problem: xsbti.Problem): Unit = println(problem.message) def comment(pos: Position, msg: String): Unit = () def printSummary(): Unit = () } } -object ScalaCompilerForUnitTesting { - case class ExtractedSourceDependencies(memberRef: Map[Symbol, Set[Symbol]], inheritance: Map[Symbol, Set[Symbol]]) -} diff --git a/sbt-bridge/test/xsbti/TestCallback.scala b/sbt-bridge/test/xsbti/TestCallback.scala index 99c8d963d555..7a065d5abdc9 100644 --- a/sbt-bridge/test/xsbti/TestCallback.scala +++ b/sbt-bridge/test/xsbti/TestCallback.scala @@ -3,33 +3,90 @@ package xsbti import java.io.File import scala.collection.mutable.ArrayBuffer -import xsbti.api.SourceAPI -import xsbti.DependencyContext._ +import xsbti.api.ClassLike +import xsbti.api.DependencyContext +import DependencyContext._ +import java.util.EnumSet -class TestCallback(override val nameHashing: Boolean, override val includeSynthToNameHashing: Boolean) extends AnalysisCallback +class TestCallback extends AnalysisCallback { - val sourceDependencies = new ArrayBuffer[(File, File, DependencyContext)] - val binaryDependencies = new ArrayBuffer[(File, String, File, DependencyContext)] - val products = new ArrayBuffer[(File, File, String)] - val usedNames = scala.collection.mutable.Map.empty[File, Set[String]].withDefaultValue(Set.empty) - val apis: scala.collection.mutable.Map[File, SourceAPI] = scala.collection.mutable.Map.empty - - def sourceDependency(dependsOn: File, source: File, inherited: Boolean): Unit = { - val context = if(inherited) DependencyByInheritance else DependencyByMemberRef - sourceDependency(dependsOn, source, context) - } - def sourceDependency(dependsOn: File, source: File, context: DependencyContext): Unit = { sourceDependencies += ((dependsOn, source, context)) } - def binaryDependency(binary: File, name: String, source: File, inherited: Boolean): Unit = { - val context = if(inherited) DependencyByInheritance else DependencyByMemberRef - binaryDependency(binary, name, source, context) - } - def binaryDependency(binary: File, name: String, source: File, context: DependencyContext): Unit = { binaryDependencies += ((binary, name, source, context)) } - def generatedClass(source: File, module: File, name: String): Unit = { products += ((source, module, name)) } - - def usedName(source: File, name: String): Unit = { usedNames(source) += name } - def api(source: File, sourceAPI: SourceAPI): Unit = { - assert(!apis.contains(source), s"The `api` method should be called once per source file: $source") - apis(source) = sourceAPI - } - def problem(category: String, pos: xsbti.Position, message: String, severity: xsbti.Severity, reported: Boolean): Unit = () + case class TestUsedName(name: String, scopes: EnumSet[UseScope]) + val classDependencies = new ArrayBuffer[(String, String, DependencyContext)] + val binaryDependencies = new ArrayBuffer[(File, String, String, File, DependencyContext)] + val products = new ArrayBuffer[(File, File)] + val usedNamesAndScopes = scala.collection.mutable.Map.empty[String, Set[TestUsedName]].withDefaultValue(Set.empty) + val classNames = scala.collection.mutable.Map.empty[File, Set[(String, String)]].withDefaultValue(Set.empty) + val apis: scala.collection.mutable.Map[File, Seq[ClassLike]] = scala.collection.mutable.Map.empty + + def usedNames = usedNamesAndScopes.mapValues(_.map(_.name)) + + override def startSource(source: File): Unit = { + assert(!apis.contains(source), s"startSource can be called only once per source file: $source") + apis(source) = Seq.empty + } + + override def binaryDependency(binary: File, name: String, fromClassName: String, source: File, context: DependencyContext): Unit = { + binaryDependencies += ((binary, name, fromClassName, source, context)) + } + + def generatedNonLocalClass(source: File, + module: File, + binaryClassName: String, + srcClassName: String): Unit = { + products += ((source, module)) + classNames(source) += ((srcClassName, binaryClassName)) + () + } + + def generatedLocalClass(source: File, module: File): Unit = { + products += ((source, module)) + () + } + + + override def classDependency(onClassName: String, sourceClassName: String, context: DependencyContext): Unit = { + if (onClassName != sourceClassName) classDependencies += ((onClassName, sourceClassName, context)) + } + + override def usedName(className: String, name: String, scopes: EnumSet[UseScope]): Unit = { + usedNamesAndScopes(className) += TestUsedName(name, scopes) + } + override def api(source: File, classApi: ClassLike): Unit = { + apis(source) = classApi +: apis(source) + } + override def problem(category: String, pos: xsbti.Position, message: String, severity: xsbti.Severity, reported: Boolean): Unit = () + override def dependencyPhaseCompleted(): Unit = () + override def apiPhaseCompleted(): Unit = () + override def enabled(): Boolean = true + def mainClass(source: File, className: String): Unit = () + +} + +object TestCallback { + case class ExtractedClassDependencies(memberRef: Map[String, Set[String]], + inheritance: Map[String, Set[String]], + localInheritance: Map[String, Set[String]]) + object ExtractedClassDependencies { + def fromPairs( + memberRefPairs: Seq[(String, String)], + inheritancePairs: Seq[(String, String)], + localInheritancePairs: Seq[(String, String)] + ): ExtractedClassDependencies = { + ExtractedClassDependencies(pairsToMultiMap(memberRefPairs), + pairsToMultiMap(inheritancePairs), + pairsToMultiMap(localInheritancePairs)) + } + + private def pairsToMultiMap[A, B](pairs: Seq[(A, B)]): Map[A, Set[B]] = { + import scala.collection.mutable.{ HashMap, MultiMap } + val emptyMultiMap = new HashMap[A, scala.collection.mutable.Set[B]] with MultiMap[A, B] + val multiMap = pairs.foldLeft(emptyMultiMap) { + case (acc, (key, value)) => + acc.addBinding(key, value) + } + // convert all collections to immutable variants + multiMap.toMap.mapValues(_.toSet).withDefaultValue(Set.empty) + } + } } + diff --git a/sbt-dotty/sbt-test/compilerReporter/simple/project/Reporter.scala b/sbt-dotty/sbt-test/compilerReporter/simple/project/Reporter.scala index d2dae1fc202a..1d72caa02cab 100644 --- a/sbt-dotty/sbt-test/compilerReporter/simple/project/Reporter.scala +++ b/sbt-dotty/sbt-test/compilerReporter/simple/project/Reporter.scala @@ -3,38 +3,29 @@ import Keys._ import KeyRanks.DTask object Reporter { - import xsbti.{Reporter, Problem, Position, Severity, Maybe} + import xsbti.{Reporter, Problem, Position, Severity} lazy val check = TaskKey[Unit]("check", "make sure compilation info are forwared to sbt") // compilerReporter is marked private in sbt - lazy val compilerReporter = TaskKey[Option[xsbti.Reporter]]("compilerReporter", "Experimental hook to listen (or send) compilation failure messages.", DTask) - - lazy val reporter = - Some(new xsbti.Reporter { + lazy val compilerReporter = TaskKey[xsbti.Reporter]("compilerReporter", "Experimental hook to listen (or send) compilation failure messages.", DTask) + + lazy val reporter = + new xsbti.Reporter { private val buffer = collection.mutable.ArrayBuffer.empty[Problem] def reset(): Unit = buffer.clear() def hasErrors: Boolean = buffer.exists(_.severity == Severity.Error) def hasWarnings: Boolean = buffer.exists(_.severity == Severity.Warn) def printSummary(): Unit = println(problems.mkString(System.lineSeparator)) def problems: Array[Problem] = buffer.toArray - def log(pos: Position, msg: String, sev: Severity): Unit = { - object MyProblem extends Problem { - def category: String = null - def severity: Severity = sev - def message: String = msg - def position: Position = pos - override def toString = s"custom: $position:$severity: $message" - } - buffer.append(MyProblem) - } + def log(problem: Problem): Unit = buffer.append(problem) def comment(pos: xsbti.Position, msg: String): Unit = () - }) + } lazy val checkSettings = Seq( compilerReporter in (Compile, compile) := reporter, - check <<= (compile in Compile).mapFailure( _ => { - val problems = reporter.get.problems + check := (compile in Compile).failure.map(_ => { + val problems = reporter.problems println(problems.toList) assert(problems.size == 1) @@ -43,6 +34,6 @@ object Reporter { // assert(problems.forall(_.position.offset.isDefined)) assert(problems.count(_.severity == Severity.Error) == 1) // not found: er1, - }) + }).value ) } diff --git a/sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt b/sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt index 07fd238624a3..6e0adfaf4d08 100644 --- a/sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt +++ b/sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt @@ -1,3 +1,4 @@ scalaVersion := sys.props("plugin.scalaVersion") -libraryDependencies += ("org.scala-lang.modules" %% "scala-xml" % "1.0.6").withDottyCompat() +libraryDependencies += +("org.scala-lang.modules" %% "scala-xml" % "1.0.6").withDottyCompat(scalaVersion.value) diff --git a/sbt-dotty/sbt-test/sbt-dotty/example-project/project/build.properties b/sbt-dotty/sbt-test/sbt-dotty/example-project/project/build.properties deleted file mode 100644 index 64317fdae59f..000000000000 --- a/sbt-dotty/sbt-test/sbt-dotty/example-project/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=0.13.15 diff --git a/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/build.sbt b/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/build.sbt index 92d2093771e4..6c2c11e0926a 100644 --- a/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/build.sbt @@ -1,7 +1,9 @@ -InputKey[Unit]("check-number-of-compiler-iterations") <<= inputTask { (argTask: TaskKey[Seq[String]]) => - (argTask, compile in Compile) map { (args: Seq[String], a: sbt.inc.Analysis) => - assert(args.size == 1) - val expectedIterationsNumber = args(0).toInt - assert(a.compilations.allCompilations.size == expectedIterationsNumber, "a.compilations.allCompilations.size = %d (expected %d)".format(a.compilations.allCompilations.size, expectedIterationsNumber)) - } +import complete.DefaultParsers._ + +InputKey[Unit]("check-number-of-compiler-iterations") := { + val args = spaceDelimited("").parsed + val a = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + assert(args.size == 1) + val expectedIterationsNumber = args(0).toInt + assert(a.compilations.allCompilations.size == expectedIterationsNumber, "a.compilations.allCompilations.size = %d (expected %d)".format(a.compilations.allCompilations.size, expectedIterationsNumber)) } diff --git a/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/test b/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/test index b0bec415eadb..9ffa4fb17ccd 100644 --- a/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/test +++ b/sbt-dotty/sbt-test/source-dependencies/abstract-type-override/test @@ -11,4 +11,4 @@ $ copy-file changes/Bar1.scala src/main/scala/Bar.scala # second iteration #> compile # check if there are only two compile iterations performed -> check-number-of-compiler-iterations 2 +> checkNumberOfCompilerIterations 2 diff --git a/sbt-dotty/sbt-test/source-dependencies/backtick-quoted-names/build.sbt b/sbt-dotty/sbt-test/source-dependencies/backtick-quoted-names/build.sbt deleted file mode 100644 index 8a38ef41424b..000000000000 --- a/sbt-dotty/sbt-test/source-dependencies/backtick-quoted-names/build.sbt +++ /dev/null @@ -1 +0,0 @@ -incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt-dotty/sbt-test/source-dependencies/binary/build.sbt b/sbt-dotty/sbt-test/source-dependencies/binary/build.sbt new file mode 100644 index 000000000000..40d7ea353760 --- /dev/null +++ b/sbt-dotty/sbt-test/source-dependencies/binary/build.sbt @@ -0,0 +1,5 @@ +lazy val dep = project.in(file("dep")) +lazy val use = project.in(file("use")). + settings( + unmanagedJars in Compile := Attributed.blank(packageBin.in(dep, Compile).value) :: Nil + ) diff --git a/sbt-dotty/sbt-test/source-dependencies/binary/test b/sbt-dotty/sbt-test/source-dependencies/binary/pending similarity index 100% rename from sbt-dotty/sbt-test/source-dependencies/binary/test rename to sbt-dotty/sbt-test/source-dependencies/binary/pending diff --git a/sbt-dotty/sbt-test/source-dependencies/binary/project/P.scala b/sbt-dotty/sbt-test/source-dependencies/binary/project/P.scala index 9cabc95a4e82..e69de29bb2d1 100644 --- a/sbt-dotty/sbt-test/source-dependencies/binary/project/P.scala +++ b/sbt-dotty/sbt-test/source-dependencies/binary/project/P.scala @@ -1,10 +0,0 @@ -import sbt._ -import Keys._ - -object B extends Build -{ - lazy val dep = Project("dep", file("dep")) - lazy val use = Project("use", file("use")) settings( - unmanagedJars in Compile <+= packageBin in (dep, Compile) map Attributed.blank - ) -} diff --git a/sbt-dotty/sbt-test/source-dependencies/canon/build.sbt b/sbt-dotty/sbt-test/source-dependencies/canon/build.sbt index d23dff7054d2..d7524d433978 100644 --- a/sbt-dotty/sbt-test/source-dependencies/canon/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/canon/build.sbt @@ -3,8 +3,10 @@ import complete.DefaultParsers._ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iterations of incremental compilation.") checkIterations := { - val expected: Int = (Space ~> NatBasic).parsed - val actual: Int = (compile in Compile).value.compilations.allCompilations.size - assert(expected == actual, s"Expected $expected compilations, got $actual") + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = analysis.compilations.allCompilations.size + assert(expected == actual, s"Expected $expected compilations, got $actual") } diff --git a/sbt-dotty/sbt-test/source-dependencies/compactify/build.sbt b/sbt-dotty/sbt-test/source-dependencies/compactify/build.sbt index f44ca08623f2..121f59cd756b 100644 --- a/sbt-dotty/sbt-test/source-dependencies/compactify/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/compactify/build.sbt @@ -1,6 +1,7 @@ -TaskKey[Unit]("output-empty") <<= classDirectory in Configurations.Compile map { outputDirectory => - def classes = (outputDirectory ** "*.class").get - if(!classes.isEmpty) sys.error("Classes existed:\n\t" + classes.mkString("\n\t")) else () +TaskKey[Unit]("output-empty") := { + val outputDirectory = (classDirectory in Compile).value + val classes = (outputDirectory ** "*.class").get + if (classes.nonEmpty) sys.error("Classes existed:\n\t" + classes.mkString("\n\t")) else () } // apparently Travis CI stopped allowing long file names diff --git a/sbt-dotty/sbt-test/source-dependencies/compactify/test b/sbt-dotty/sbt-test/source-dependencies/compactify/test index e2abf578b8c0..b56be3e5d4aa 100644 --- a/sbt-dotty/sbt-test/source-dependencies/compactify/test +++ b/sbt-dotty/sbt-test/source-dependencies/compactify/test @@ -1,8 +1,8 @@ # Marked pending due to https://github.com/sbt/sbt/issues/1553 -> output-empty +> outputEmpty > compile --> output-empty +-> outputEmpty $ delete src/main/scala/For.scala src/main/scala/Nested.scala > compile -> output-empty \ No newline at end of file +> outputEmpty \ No newline at end of file diff --git a/sbt-dotty/sbt-test/source-dependencies/export-jars/build.sbt b/sbt-dotty/sbt-test/source-dependencies/export-jars/build.sbt new file mode 100644 index 000000000000..8b39c7cf680e --- /dev/null +++ b/sbt-dotty/sbt-test/source-dependencies/export-jars/build.sbt @@ -0,0 +1,2 @@ +lazy val root = project.in(file(".")).dependsOn(a) +lazy val a = project.in(file("a")) diff --git a/sbt-dotty/sbt-test/source-dependencies/export-jars/changes/build2.sbt b/sbt-dotty/sbt-test/source-dependencies/export-jars/changes/build2.sbt index 0f5735bc81d0..3fa7cbb370fb 100644 --- a/sbt-dotty/sbt-test/source-dependencies/export-jars/changes/build2.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/export-jars/changes/build2.sbt @@ -1 +1,4 @@ +lazy val root = Project("root", file(".")) dependsOn(a) +lazy val a = Project("a", file("a")) + exportJars := true \ No newline at end of file diff --git a/sbt-dotty/sbt-test/source-dependencies/export-jars/project/Build.scala b/sbt-dotty/sbt-test/source-dependencies/export-jars/project/Build.scala deleted file mode 100644 index 4a783acbe158..000000000000 --- a/sbt-dotty/sbt-test/source-dependencies/export-jars/project/Build.scala +++ /dev/null @@ -1,7 +0,0 @@ -import sbt._ - -object Build extends Build -{ - lazy val root = Project("root", file(".")) dependsOn(a) - lazy val a = Project("a", file("a")) -} \ No newline at end of file diff --git a/sbt-dotty/sbt-test/source-dependencies/ext/build.sbt b/sbt-dotty/sbt-test/source-dependencies/ext/build.sbt index 8aaec76ecfc2..bbc053fa6385 100644 --- a/sbt-dotty/sbt-test/source-dependencies/ext/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/ext/build.sbt @@ -3,7 +3,9 @@ import complete.DefaultParsers._ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iterations of incremental compilation.") checkIterations := { - val expected: Int = (Space ~> NatBasic).parsed - val actual: Int = (compile in Compile).value.compilations.allCompilations.size - assert(expected == actual, s"Expected $expected compilations, got $actual") + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = analysis.compilations.allCompilations.size + assert(expected == actual, s"Expected $expected compilations, got $actual") } \ No newline at end of file diff --git a/sbt-dotty/sbt-test/source-dependencies/import-class/B.scala b/sbt-dotty/sbt-test/source-dependencies/import-class/B.scala index 0489f4a26c29..eb81ff6bd6e7 100644 --- a/sbt-dotty/sbt-test/source-dependencies/import-class/B.scala +++ b/sbt-dotty/sbt-test/source-dependencies/import-class/B.scala @@ -1 +1,3 @@ import a.A + +class B diff --git a/sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/test b/sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/disabled similarity index 100% rename from sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/test rename to sbt-dotty/sbt-test/source-dependencies/inherited-deps-java/disabled diff --git a/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/build.sbt b/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/build.sbt index a5982f901fd7..e25f1beeaaba 100644 --- a/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/build.sbt @@ -1,7 +1,8 @@ name := "test" -TaskKey[Unit]("check-same") <<= compile in Configurations.Compile map { analysis => - analysis.apis.internal foreach { case (_, api) => - assert( xsbt.api.SameAPI(api.api, api.api) ) - } -} \ No newline at end of file +TaskKey[Unit]("check-same") := { + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + analysis.apis.internal.foreach { case (_, api) => + assert(xsbt.api.SameAPI(api.api, api.api)) + } +} diff --git a/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/test b/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/test index 8434347c5a23..353461049b5f 100644 --- a/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/test +++ b/sbt-dotty/sbt-test/source-dependencies/inherited_type_params/test @@ -1 +1 @@ -> check-same \ No newline at end of file +> checkSame \ No newline at end of file diff --git a/sbt-dotty/sbt-test/source-dependencies/java-analysis-serialization-error/build.sbt b/sbt-dotty/sbt-test/source-dependencies/java-analysis-serialization-error/build.sbt index 1b1ddefb98ce..f78f33916fce 100644 --- a/sbt-dotty/sbt-test/source-dependencies/java-analysis-serialization-error/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/java-analysis-serialization-error/build.sbt @@ -1 +1 @@ -incOptions := incOptions.value.withNameHashing(true).withApiDebug(true) +incOptions := incOptions.value.withApiDebug(true) diff --git a/sbt-dotty/sbt-test/source-dependencies/less-inter-inv-java/build.sbt b/sbt-dotty/sbt-test/source-dependencies/less-inter-inv-java/build.sbt index d23dff7054d2..d7524d433978 100644 --- a/sbt-dotty/sbt-test/source-dependencies/less-inter-inv-java/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/less-inter-inv-java/build.sbt @@ -3,8 +3,10 @@ import complete.DefaultParsers._ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iterations of incremental compilation.") checkIterations := { - val expected: Int = (Space ~> NatBasic).parsed - val actual: Int = (compile in Compile).value.compilations.allCompilations.size - assert(expected == actual, s"Expected $expected compilations, got $actual") + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = analysis.compilations.allCompilations.size + assert(expected == actual, s"Expected $expected compilations, got $actual") } diff --git a/sbt-dotty/sbt-test/source-dependencies/less-inter-inv/build.sbt b/sbt-dotty/sbt-test/source-dependencies/less-inter-inv/build.sbt index d23dff7054d2..d7524d433978 100644 --- a/sbt-dotty/sbt-test/source-dependencies/less-inter-inv/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/less-inter-inv/build.sbt @@ -3,8 +3,10 @@ import complete.DefaultParsers._ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iterations of incremental compilation.") checkIterations := { - val expected: Int = (Space ~> NatBasic).parsed - val actual: Int = (compile in Compile).value.compilations.allCompilations.size - assert(expected == actual, s"Expected $expected compilations, got $actual") + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = analysis.compilations.allCompilations.size + assert(expected == actual, s"Expected $expected compilations, got $actual") } diff --git a/sbt-dotty/sbt-test/source-dependencies/replace-test-a/build.sbt b/sbt-dotty/sbt-test/source-dependencies/replace-test-a/build.sbt new file mode 100644 index 000000000000..c63b7bc8a91a --- /dev/null +++ b/sbt-dotty/sbt-test/source-dependencies/replace-test-a/build.sbt @@ -0,0 +1,14 @@ +import java.net.URLClassLoader + +lazy val root = project.in(file(".")). + settings( + TaskKey[Unit]("check-first") := checkTask("First").value, + TaskKey[Unit]("check-second") := checkTask("Second").value + ) + +def checkTask(className: String) = Def.task { + val runClasspath = (fullClasspath in Runtime).value + val cp = runClasspath.map(_.data.toURI.toURL).toArray + Class.forName(className, false, new URLClassLoader(cp)) + () +} diff --git a/sbt-dotty/sbt-test/source-dependencies/replace-test-a/project/Build.scala b/sbt-dotty/sbt-test/source-dependencies/replace-test-a/project/Build.scala deleted file mode 100644 index 9c2678540442..000000000000 --- a/sbt-dotty/sbt-test/source-dependencies/replace-test-a/project/Build.scala +++ /dev/null @@ -1,19 +0,0 @@ -import sbt._ -import Keys._ -import java.net.URLClassLoader - -object B extends Build -{ - lazy val root = Project("root", file(".")) settings( ss : _*) - - def ss = Seq( - TaskKey[Unit]("check-first") <<= checkTask("First"), - TaskKey[Unit]("check-second") <<= checkTask("Second") - ) - private def checkTask(className: String) = - fullClasspath in Configurations.Runtime map { runClasspath => - val cp = runClasspath.map(_.data.toURI.toURL).toArray - Class.forName(className, false, new URLClassLoader(cp)) - () - } -} diff --git a/sbt-dotty/sbt-test/source-dependencies/replace-test-a/test b/sbt-dotty/sbt-test/source-dependencies/replace-test-a/test index 4b4ad3a2b953..21dec1db9924 100644 --- a/sbt-dotty/sbt-test/source-dependencies/replace-test-a/test +++ b/sbt-dotty/sbt-test/source-dependencies/replace-test-a/test @@ -1,9 +1,9 @@ $ copy-file changes/first.scala src/main/scala/A.scala > compile -> check-first --> check-second +> checkFirst +-> checkSecond $ copy-file changes/second.scala src/main/scala/A.scala > compile --> check-first -> check-second \ No newline at end of file +-> checkFirst +> checkSecond \ No newline at end of file diff --git a/sbt-dotty/sbt-test/source-dependencies/restore-classes/build.sbt b/sbt-dotty/sbt-test/source-dependencies/restore-classes/build.sbt index 2231204ea3bf..cf38564cc570 100644 --- a/sbt-dotty/sbt-test/source-dependencies/restore-classes/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/restore-classes/build.sbt @@ -5,8 +5,10 @@ crossTarget in Compile := target.value val checkIterations = inputKey[Unit]("Verifies the accumlated number of iterations of incremental compilation.") checkIterations := { - val expected: Int = (Space ~> NatBasic).parsed - val actual: Int = (compile in Compile).value.compilations.allCompilations.size - assert(expected == actual, s"Expected $expected compilations, got $actual") + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] + + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = analysis.compilations.allCompilations.size + assert(expected == actual, s"Expected $expected compilations, got $actual") } diff --git a/sbt-dotty/sbt-test/source-dependencies/same-file-used-names/build.sbt b/sbt-dotty/sbt-test/source-dependencies/same-file-used-names/build.sbt deleted file mode 100644 index 8a38ef41424b..000000000000 --- a/sbt-dotty/sbt-test/source-dependencies/same-file-used-names/build.sbt +++ /dev/null @@ -1 +0,0 @@ -incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/build.sbt b/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/build.sbt index 949d782317c0..1ebe8fad1a41 100644 --- a/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/build.sbt @@ -3,25 +3,22 @@ * b) checks overall number of compilations performed */ TaskKey[Unit]("check-compilations") := { - val analysis = (compile in Compile).value - val srcDir = (scalaSource in Compile).value - def relative(f: java.io.File): java.io.File = f.relativeTo(srcDir) getOrElse f + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] val allCompilations = analysis.compilations.allCompilations - val recompiledFiles: Seq[Set[java.io.File]] = allCompilations map { c => - val recompiledFiles = analysis.apis.internal.collect { - case (file, api) if api.compilation.startTime == c.startTime => relative(file) + val recompiledClasses: Seq[Set[String]] = allCompilations map { c => + val recompiledClasses = analysis.apis.internal.collect { + case (clazz, api) if api.compilationTimestamp() == c.getStartTime() => clazz } - recompiledFiles.toSet + recompiledClasses.toSet } - def recompiledFilesInIteration(iteration: Int, fileNames: Set[String]) = { - val files = fileNames.map(new java.io.File(_)) - assert(recompiledFiles(iteration) == files, "%s != %s".format(recompiledFiles(iteration), files)) + def recompiledFilesInIteration(iteration: Int, classNames: Set[String]): Unit = { + assert(recompiledClasses(iteration) == classNames, "%s != %s".format(recompiledClasses(iteration), classNames)) } assert(allCompilations.size == 2) // B.scala is just compiled at the beginning - recompiledFilesInIteration(0, Set("B.scala")) + recompiledFilesInIteration(0, Set("B")) // A.scala is changed and recompiled - recompiledFilesInIteration(1, Set("A.scala")) + recompiledFilesInIteration(1, Set("A")) } logLevel := Level.Debug diff --git a/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/test b/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/test index f8f7cb076b2b..183e1d40e805 100644 --- a/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/test +++ b/sbt-dotty/sbt-test/source-dependencies/trait-member-modified/test @@ -6,4 +6,4 @@ $ copy-file changes/A1.scala src/main/scala/A.scala # only A.scala should be recompiled > compile # check if there are only two compile iterations performed -> check-compilations +> checkCompilations diff --git a/sbt-dotty/sbt-test/source-dependencies/trait-private-var/test b/sbt-dotty/sbt-test/source-dependencies/trait-private-var/pending similarity index 100% rename from sbt-dotty/sbt-test/source-dependencies/trait-private-var/test rename to sbt-dotty/sbt-test/source-dependencies/trait-private-var/pending diff --git a/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/build.sbt b/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/build.sbt index d24e304b1bc1..5180981ce13c 100644 --- a/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/build.sbt @@ -1,40 +1,36 @@ logLevel := Level.Debug -incOptions := incOptions.value.withNameHashing(true) - -// disable sbt's heauristic which recompiles everything in case +// disable sbt's heuristic which recompiles everything in case // some fraction (e.g. 50%) of files is scheduled to be recompiled // in this test we want precise information about recompiled files // which that heuristic would distort -incOptions := incOptions.value.copy(recompileAllFraction = 1.0) +incOptions := incOptions.value.withRecompileAllFraction(1.0) /* Performs checks related to compilations: * a) checks in which compilation given set of files was recompiled * b) checks overall number of compilations performed */ TaskKey[Unit]("check-compilations") := { - val analysis = (compile in Compile).value + val analysis = (compile in Compile).value.asInstanceOf[sbt.internal.inc.Analysis] val srcDir = (scalaSource in Compile).value - def relative(f: java.io.File): java.io.File = f.relativeTo(srcDir) getOrElse f val allCompilations = analysis.compilations.allCompilations - val recompiledFiles: Seq[Set[java.io.File]] = allCompilations map { c => - val recompiledFiles = analysis.apis.internal.collect { - case (file, api) if api.compilation.startTime == c.startTime => relative(file) + val recompiledClasses: Seq[Set[String]] = allCompilations map { c => + val recompiledClasses = analysis.apis.internal.collect { + case (clazz, api) if api.compilationTimestamp() == c.getStartTime() => clazz } - recompiledFiles.toSet + recompiledClasses.toSet } - def recompiledFilesInIteration(iteration: Int, fileNames: Set[String]) = { - val files = fileNames.map(new java.io.File(_)) - assert(recompiledFiles(iteration) == files, "%s != %s".format(recompiledFiles(iteration), files)) + def recompiledClassesInIteration(iteration: Int, classNames: Set[String]): Unit = { + assert(recompiledClasses(iteration) == classNames, "%s != %s".format(recompiledClasses(iteration), classNames)) } - // Y.scala is compiled only at the beginning as changes to A.scala do not affect it - recompiledFilesInIteration(0, Set("X.scala", "Y.scala")) - // A.scala is changed and recompiled - recompiledFilesInIteration(1, Set("A.scala")) - // change in A.scala causes recompilation of B.scala, C.scala, D.scala which depend on transtiviely - // and by inheritance on A.scala - // X.scala is also recompiled because it depends by member reference on B.scala - // Note that Y.scala is not recompiled because it depends just on X through member reference dependency - recompiledFilesInIteration(2, Set("B.scala", "C.scala", "D.scala")) + // test.Y is compiled only at the beginning as changes to test.A do not affect it + recompiledClassesInIteration(0, Set("test.X", "test.Y")) + // test.A is changed and recompiled + recompiledClassesInIteration(1, Set("test.A")) + // change in test.A causes recompilation of test.B, test.C, test.D which depend on transitively + // and by inheritance on test.A + // test.X is also recompiled because it depends by member reference on test.B + // Note that test.Y is not recompiled because it depends just on X through member reference dependency + recompiledClassesInIteration(2, Set("test.B", "test.C", "test.D")) assert(allCompilations.size == 3) } diff --git a/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/test b/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/test index 395f90229b5c..a39fe13a99ed 100644 --- a/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/test +++ b/sbt-dotty/sbt-test/source-dependencies/transitive-memberRef/test @@ -8,4 +8,4 @@ $ copy-file changes/A1.scala src/main/scala/A.scala # second iteration > compile # check in which compile iteration given source file got recompiled -> check-compilations +> checkCompilations diff --git a/sbt-dotty/sbt-test/source-dependencies/type-alias/build.sbt b/sbt-dotty/sbt-test/source-dependencies/type-alias/build.sbt index c5a1099aacad..00edfde1d056 100644 --- a/sbt-dotty/sbt-test/source-dependencies/type-alias/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/type-alias/build.sbt @@ -1,3 +1 @@ logLevel in compile := Level.Debug - -incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt-dotty/sbt-test/source-dependencies/typeref-only/build.sbt b/sbt-dotty/sbt-test/source-dependencies/typeref-only/build.sbt index 02813797f231..ea80ab8d990d 100644 --- a/sbt-dotty/sbt-test/source-dependencies/typeref-only/build.sbt +++ b/sbt-dotty/sbt-test/source-dependencies/typeref-only/build.sbt @@ -2,4 +2,4 @@ logLevel := Level.Debug // disable recompile all which causes full recompile which // makes it more difficult to test dependency tracking -incOptions ~= { _.copy(recompileAllFraction = 1.0) } +incOptions := incOptions.value.withRecompileAllFraction(1.0) diff --git a/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/C.scala b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/C.scala index 1a9a42bde96f..9a6a97533086 100644 --- a/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/C.scala +++ b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/C.scala @@ -1,5 +1,6 @@ object C { def main(args: Array[String]): Unit = { - val x = B.foo + val duck = B.foo + println("duck: " + duck) // Need to use duck in an expression to see if it crashes or not } } diff --git a/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/DottyInjectedPlugin.scala b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..ce3d46d79921 --- /dev/null +++ b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-language:Scala2" + ) +} diff --git a/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/plugins.sbt b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/plugins.sbt new file mode 100644 index 000000000000..c17caab2d98c --- /dev/null +++ b/sbt-dotty/sbt-test/source-dependencies/value-class-underlying/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % sys.props("plugin.version")) diff --git a/sbt-dotty/src/dotty/tools/sbtplugin/DottyIDEPlugin.scala b/sbt-dotty/src/dotty/tools/sbtplugin/DottyIDEPlugin.scala index 240e51479ce3..cfb913bb236f 100644 --- a/sbt-dotty/src/dotty/tools/sbtplugin/DottyIDEPlugin.scala +++ b/sbt-dotty/src/dotty/tools/sbtplugin/DottyIDEPlugin.scala @@ -55,7 +55,7 @@ object DottyIDEPlugin extends AutoPlugin { else { def matchingSetting(setting: Setting[_]) = setting.key.key == scalaVersion.key && - setting.key.scope.project.fold(ref => projRefs.contains(ref), ifGlobal = true, ifThis = true) + setting.key.scope.project.fold(ref => projRefs.contains(ref), ifZero = true, ifThis = true) val newSettings = extracted.session.mergeSettings.collect { case setting if matchingSetting(setting) => @@ -205,9 +205,9 @@ object DottyIDEPlugin extends AutoPlugin { origState } - private def projectConfigTask(config: Configuration): Initialize[Task[Option[ProjectConfig]]] = Def.task { - if ((sources in config).value.isEmpty) None - else { + private def projectConfigTask(config: Configuration): Initialize[Task[Option[ProjectConfig]]] = Def.taskDyn { + if ((sources in config).value.isEmpty) Def.task { None } + else Def.task { // Not needed to generate the config, but this guarantees that the // generated config is usable by an IDE without any extra compilation // step. diff --git a/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala b/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala index 04b87f8004a4..201273b9026d 100644 --- a/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala +++ b/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala @@ -2,7 +2,9 @@ package dotty.tools.sbtplugin import sbt._ import sbt.Keys._ -import sbt.inc.{ ClassfileManager, IncOptions } +// import sbt.inc.{ ClassfileManager, IncOptions } +import xsbti.compile._ +import java.util.Optional object DottyPlugin extends AutoPlugin { object autoImport { @@ -61,7 +63,7 @@ object DottyPlugin extends AutoPlugin { * }}} * you can replace it by: * {{{ - * libraryDependencies += ("a" %% "b" % "c").withDottyCompat() + * libraryDependencies += ("a" %% "b" % "c").withDottyCompat(scalaVersion.value) * }}} * This will have no effect when compiling with Scala 2.x, but when compiling * with Dotty this will change the cross-version to a Scala 2.x one. This @@ -70,19 +72,10 @@ object DottyPlugin extends AutoPlugin { * NOTE: Dotty's retro-compatibility with Scala 2.x will be dropped before * Dotty is released, you should not rely on it. */ - def withDottyCompat(): ModuleID = - moduleID.crossVersion match { - case _: CrossVersion.Binary => - moduleID.cross(CrossVersion.binaryMapped { version => - CrossVersion.partialVersion(version) match { - case Some((0, minor)) => - // Dotty v0.4 or greater is compatible with 2.12.x - if (minor >= 4) "2.12" - else "2.11" - case _ => - version - } - }) + def withDottyCompat(scalaVersion: String): ModuleID = + moduleID.crossVersion match { + case _: librarymanagement.Binary if scalaVersion.startsWith("0.") => + moduleID.cross(CrossVersion.constant("2.12")) case _ => moduleID } @@ -119,25 +112,33 @@ object DottyPlugin extends AutoPlugin { * corresponding .tasty or .hasTasty file is also deleted. */ def dottyPatchIncOptions(incOptions: IncOptions): IncOptions = { - val inheritedNewClassfileManager = incOptions.newClassfileManager - val newClassfileManager = () => new ClassfileManager { - private[this] val inherited = inheritedNewClassfileManager() + val inheritedNewClassFileManager = ClassFileManagerUtil.getDefaultClassFileManager(incOptions) + val tastyFileManager = new ClassFileManager { + private[this] val inherited = inheritedNewClassFileManager - def delete(classes: Iterable[File]): Unit = { + def delete(classes: Array[File]): Unit = { val tastySuffixes = List(".tasty", ".hasTasty") inherited.delete(classes flatMap { classFile => - val dottyFiles = if (classFile.getPath endsWith ".class") { + if (classFile.getPath endsWith ".class") { val prefix = classFile.getAbsolutePath.stripSuffix(".class") tastySuffixes.map(suffix => new File(prefix + suffix)).filter(_.exists) } else Nil - classFile :: dottyFiles }) } - def generated(classes: Iterable[File]): Unit = inherited.generated(classes) - def complete(success: Boolean): Unit = inherited.complete(success) + def generated(classes: Array[File]): Unit = {} + def complete(success: Boolean): Unit = {} } - incOptions.withNewClassfileManager(newClassfileManager) + val inheritedHooks = incOptions.externalHooks + val externalClassFileManager: Optional[ClassFileManager] = Option(inheritedHooks.getExternalClassFileManager.orElse(null)) match { + case Some(prevManager) => + Optional.of(WrappedClassFileManager.of(prevManager, Optional.of(tastyFileManager))) + case None => + Optional.of(tastyFileManager) + } + + val hooks = new DefaultExternalHooks(inheritedHooks.getExternalLookup, externalClassFileManager) + incOptions.withExternalHooks(hooks) } override def projectSettings: Seq[Setting[_]] = { @@ -161,10 +162,20 @@ object DottyPlugin extends AutoPlugin { }, incOptions in Compile := { + val inc = (incOptions in Compile).value + if (isDotty.value) + dottyPatchIncOptions(inc) + else + inc + }, + + scalaCompilerBridgeSource := { + val scalaBridge = scalaCompilerBridgeSource.value + val dottyBridge = (scalaOrganization.value % "dotty-sbt-bridge" % scalaVersion.value).withConfigurations(Some(Configurations.Compile.name)).sources() if (isDotty.value) - dottyPatchIncOptions((incOptions in Compile).value) + dottyBridge else - (incOptions in Compile).value + scalaBridge }, scalaBinaryVersion := {