diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b4b8da1b82a7..368766349bab 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -2,8 +2,6 @@ name: Dotty on: push: - branches-ignore: - - 'language-reference-stable' tags: - '**' pull_request: diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala new file mode 100644 index 000000000000..fb2cedbb7d41 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala @@ -0,0 +1,49 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit +import java.util.concurrent.{Executors, ExecutorService} + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@State(Scope.Benchmark) +class ContendedInitialization { + + @Param(Array("2000000", "5000000")) + var size: Int = _ + + @Param(Array("2", "4", "8")) + var nThreads: Int = _ + + var executor: ExecutorService = _ + + @Setup + def prepare: Unit = { + executor = Executors.newFixedThreadPool(nThreads) + } + + @TearDown + def cleanup: Unit = { + executor.shutdown() + executor = null + } + + @Benchmark + def measureContended(bh: Blackhole): Unit = { + val array = Array.fill(size)(new LazyHolder) + val task: Runnable = () => + for (elem <- array) bh.consume(elem.value) + + val futures = + for (_ <- 0 until nThreads) yield + executor.submit(task) + + futures.foreach(_.get()) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala new file mode 100644 index 000000000000..a2aaf3e88570 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala @@ -0,0 +1,30 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedAccess { + + var holder: LazyHolder = _ + + @Setup + def prepare: Unit = { + holder = new LazyHolder + holder.value + } + + @Benchmark + def measureInitialized(bh: Blackhole) = { + bh.consume(holder) + bh.consume(holder.value) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala new file mode 100644 index 000000000000..5a6b4ae1686d --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala @@ -0,0 +1,30 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyAnyHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedAccessAny { + + var holder: LazyAnyHolder = _ + + @Setup + def prepare: Unit = { + holder = new LazyAnyHolder + holder.value + } + + @Benchmark + def measureInitialized(bh: Blackhole) = { + bh.consume(holder) + bh.consume(holder.value) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala new file mode 100644 index 000000000000..a95cb1de2980 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala @@ -0,0 +1,30 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyGenericHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedAccessGeneric { + + var holder: LazyGenericHolder[String] = _ + + @Setup + def prepare: Unit = { + holder = new LazyGenericHolder[String]("foo") + holder.value + } + + @Benchmark + def measureInitialized(bh: Blackhole) = { + bh.consume(holder) + bh.consume(holder.value) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala new file mode 100644 index 000000000000..4f3c75fd920b --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala @@ -0,0 +1,34 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedAccessMultiple { + + var holders: Array[LazyHolder] = _ + + @Setup + def prepare: Unit = { + holders = Array.fill(100){ new LazyHolder } + } + + @Benchmark + def measureInitialized(bh: Blackhole) = { + var i = 0 + while(i < 100) { + val currentHolder = holders(i) + bh.consume(currentHolder) + bh.consume(currentHolder.value) + i = i + 1 + } + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala new file mode 100644 index 000000000000..25cc0f9b288d --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala @@ -0,0 +1,30 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyStringHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedAccessString { + + var holder: LazyStringHolder = _ + + @Setup + def prepare: Unit = { + holder = new LazyStringHolder + holder.value + } + + @Benchmark + def measureInitialized(bh: Blackhole) = { + bh.consume(holder) + bh.consume(holder.value) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala new file mode 100644 index 000000000000..0afd93d086be --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala @@ -0,0 +1,53 @@ +package dotty.tools.benchmarks.lazyvals +import java.util.concurrent.CountDownLatch +object LazyVals { + + trait Foo + class Bar1 extends Foo + class Bar2 extends Foo + class Bar3 extends Foo + class Bar4 extends Foo + class Bar5 extends Bar4 + + class LazyStringHolder { + + lazy val value: String = { + System.nanoTime() % 5 match { + case 0 => "abc" + case 1 => "def" + case 2 => "ghi" + case 3 => "jkl" + case 4 => "mno" + } + } + } + + class LazyHolder { + + lazy val value: List[Int] = { + System.nanoTime() % 5 match { + case 0 => 1 :: 2 :: Nil + case 1 => Nil + case 2 => 1 :: Nil + case 3 => Nil + case 4 => 1 :: 2 :: 3 :: Nil + } + } + } + + class LazyGenericHolder[A](v: => A) { + lazy val value: A = v + } + + class LazyAnyHolder { + lazy val value: Any = { + System.nanoTime() % 5 match { + case 0 => new Bar1 + case 1 => new Bar2 + case 2 => new Bar3 + case 3 => new Bar4 + case 4 => new Bar4 + } + } + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/UninitializedAccess.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/UninitializedAccess.scala new file mode 100644 index 000000000000..417d22f67d48 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/UninitializedAccess.scala @@ -0,0 +1,25 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class UninitializedAccess { + + @Benchmark + def measureInitialized(bh: Blackhole) = { + var i = 0 + val holder = new LazyHolder + bh.consume(holder) + bh.consume(holder.value) + i = i + 1 + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/UninitializedAccessMultiple.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/UninitializedAccessMultiple.scala new file mode 100644 index 000000000000..133a0932bf51 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/UninitializedAccessMultiple.scala @@ -0,0 +1,27 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class UninitializedAccessMultiple { + + @Benchmark + def measureInitialized(bh: Blackhole) = { + var i = 0 + while(i < 100) { + val holder = new LazyHolder + bh.consume(holder) + bh.consume(holder.value) + i = i + 1 + } + } +} diff --git a/community-build/community-projects/PPrint b/community-build/community-projects/PPrint index bb5162249f9e..2203dc6081f5 160000 --- a/community-build/community-projects/PPrint +++ b/community-build/community-projects/PPrint @@ -1 +1 @@ -Subproject commit bb5162249f9e40e925df49ad1467efd260a3eb2c +Subproject commit 2203dc6081f5e8fa89f552b155724b0a8fdcec03 diff --git a/community-build/community-projects/akka b/community-build/community-projects/akka index ed97fe5233cb..7f5115ebc9cd 160000 --- a/community-build/community-projects/akka +++ b/community-build/community-projects/akka @@ -1 +1 @@ -Subproject commit ed97fe5233cbda2da02abad50d48c310077b313c +Subproject commit 7f5115ebc9cde408433040f11834f5218b4a3357 diff --git a/community-build/community-projects/cask b/community-build/community-projects/cask index 03b6a24ab597..d5fa6d47da5e 160000 --- a/community-build/community-projects/cask +++ b/community-build/community-projects/cask @@ -1 +1 @@ -Subproject commit 03b6a24ab59796ff8f7a3dc5f28041fce816aaf9 +Subproject commit d5fa6d47da5ea99d94887fafd555696ba07aa205 diff --git a/community-build/community-projects/fansi b/community-build/community-projects/fansi index 9f2881ff73aa..953306f8139f 160000 --- a/community-build/community-projects/fansi +++ b/community-build/community-projects/fansi @@ -1 +1 @@ -Subproject commit 9f2881ff73aae30bc860e349bedeed0b4a8b590a +Subproject commit 953306f8139f6eaabf9f4ae7707906f9d2ba236a diff --git a/community-build/community-projects/geny b/community-build/community-projects/geny index 474fc0c12314..d981da16a05f 160000 --- a/community-build/community-projects/geny +++ b/community-build/community-projects/geny @@ -1 +1 @@ -Subproject commit 474fc0c123144aef5e930be46d63e48b4571af3a +Subproject commit d981da16a05ff4978857e7aa88489c873b8d8922 diff --git a/community-build/community-projects/os-lib b/community-build/community-projects/os-lib index 94d229becfed..a4400deb3bec 160000 --- a/community-build/community-projects/os-lib +++ b/community-build/community-projects/os-lib @@ -1 +1 @@ -Subproject commit 94d229becfeda1a3b8bb05e3fade3e4fc9d6cf3b +Subproject commit a4400deb3bec415fd82d331fc1f8b749f3d64e60 diff --git a/community-build/community-projects/requests-scala b/community-build/community-projects/requests-scala index 162fcedd9a9d..6d4a223bc33d 160000 --- a/community-build/community-projects/requests-scala +++ b/community-build/community-projects/requests-scala @@ -1 +1 @@ -Subproject commit 162fcedd9a9d6ff2f5d75a2ec5dc2792ef1980a6 +Subproject commit 6d4a223bc33def14ae9a4def24a3f5c258451e8e diff --git a/community-build/community-projects/scalacheck b/community-build/community-projects/scalacheck index 0ac8005753ab..fbfaabd7b628 160000 --- a/community-build/community-projects/scalacheck +++ b/community-build/community-projects/scalacheck @@ -1 +1 @@ -Subproject commit 0ac8005753ab98b6494fd631502201b97a103638 +Subproject commit fbfaabd7b628e9b0d8f78ed8a91a0672cf56ba15 diff --git a/community-build/community-projects/scalaz b/community-build/community-projects/scalaz index ee85b0925809..6e7f3d9caf64 160000 --- a/community-build/community-projects/scalaz +++ b/community-build/community-projects/scalaz @@ -1 +1 @@ -Subproject commit ee85b0925809f6e04808a6124ae04dd89adba0d6 +Subproject commit 6e7f3d9caf64d8ad1c82804cf418882345f41930 diff --git a/community-build/community-projects/sourcecode b/community-build/community-projects/sourcecode index 518af8532f3d..28b4f6c21fdd 160000 --- a/community-build/community-projects/sourcecode +++ b/community-build/community-projects/sourcecode @@ -1 +1 @@ -Subproject commit 518af8532f3df346cf57d8e05d55c55ab5ad325d +Subproject commit 28b4f6c21fddeb33d694dc210f51b0535a2ada68 diff --git a/community-build/community-projects/specs2 b/community-build/community-projects/specs2 index e1ae96e7a55f..2bfe446a4e91 160000 --- a/community-build/community-projects/specs2 +++ b/community-build/community-projects/specs2 @@ -1 +1 @@ -Subproject commit e1ae96e7a55fed2268f9ccd391687a5ac96ee4df +Subproject commit 2bfe446a4e9122b1122a7e13a3d100b3749b8630 diff --git a/community-build/community-projects/spire b/community-build/community-projects/spire index 6869620975fa..7f630c0209e3 160000 --- a/community-build/community-projects/spire +++ b/community-build/community-projects/spire @@ -1 +1 @@ -Subproject commit 6869620975fa84dd1ef78c2711d6a4f8197060ae +Subproject commit 7f630c0209e327bdc782ade2210d8e4b916fddcc diff --git a/community-build/community-projects/upickle b/community-build/community-projects/upickle index 0213eea95b28..aa3bc0e43ec7 160000 --- a/community-build/community-projects/upickle +++ b/community-build/community-projects/upickle @@ -1 +1 @@ -Subproject commit 0213eea95b282b1e961b1d5ad68031365c9a8bb2 +Subproject commit aa3bc0e43ec7b618eb087753878f3d845e58277a diff --git a/community-build/community-projects/utest b/community-build/community-projects/utest index b5a04a2f3e35..eae17c7a4d0d 160000 --- a/community-build/community-projects/utest +++ b/community-build/community-projects/utest @@ -1 +1 @@ -Subproject commit b5a04a2f3e35ae340e0821d604dafe9cb10a7fc7 +Subproject commit eae17c7a4d0d63bab1406ca75791d3cb6394233d diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index 5f0302e8bdf8..52155189a31f 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -99,7 +99,7 @@ final case class MillCommunityProject( // uncomment once mill is released // if ignoreDocs then null else s"$baseCommand.docJar" override val runCommandsArgs = List("-i", "-D", s"dottyVersion=$compilerVersion") - override val environment = Map("MILL_VERSION" -> "0.9.6-16-a5da34") + override val environment = Map("MILL_VERSION" -> "0.10.5") final case class SbtCommunityProject( project: String, diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala index b6d898b3b221..c49b7d9556c9 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala @@ -61,7 +61,6 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { @threadUnsafe lazy val AnnotationRetentionSourceAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("SOURCE") @threadUnsafe lazy val AnnotationRetentionClassAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("CLASS") @threadUnsafe lazy val AnnotationRetentionRuntimeAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("RUNTIME") - @threadUnsafe lazy val JavaAnnotationClass: ClassSymbol = requiredClass("java.lang.annotation.Annotation") val bCodeAsmCommon: BCodeAsmCommon[int.type] = new BCodeAsmCommon(int) @@ -415,7 +414,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { arrAnnotV.visitEnd() } // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape. */ - case t @ Apply(constr, args) if t.tpe.derivesFrom(JavaAnnotationClass) => + case t @ Apply(constr, args) if t.tpe.classSymbol.is(JavaAnnotation) => val typ = t.tpe.classSymbol.denot.info val assocs = assocsFromApply(t) val desc = innerClasesStore.typeDescriptor(typ) // the class descriptor of the nested annotation class diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala index 02268c2919ba..2d4b22a10527 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala @@ -54,6 +54,7 @@ trait BCodeIdiomatic { case "17" => asm.Opcodes.V17 case "18" => asm.Opcodes.V18 case "19" => asm.Opcodes.V19 + case "20" => asm.Opcodes.V20 } lazy val majorVersion: Int = (classfileVersion & 0xFF) diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index 8ec19bb994b8..6714f664620b 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -3144,7 +3144,23 @@ class JSCodeGen()(using genCtx: Context) { val tpe = atPhase(elimErasedValueTypePhase) { sym.info.finalResultType } - unbox(boxedResult, tpe) + if (tpe.isRef(defn.BoxedUnitClass) && sym.isGetter) { + /* Work around to reclaim Scala 2 erasure behavior, assumed by the test + * NonNativeJSTypeTest.defaultValuesForFields. + * Scala 2 erases getters of `Unit`-typed fields as returning `Unit` + * (not `BoxedUnit`). Therefore, when called in expression position, + * the call site introduces an explicit `BoxedUnit.UNIT`. Even if the + * field has not been initialized at all (with `= _`), this results in + * an actual `()` value. + * In Scala 3, the same pattern returns `null`, as a `BoxedUnit`, so we + * introduce here an explicit `()` value. + * TODO We should remove this branch if the upstream test is updated + * not to assume such a strict interpretation of erasure. + */ + js.Block(boxedResult, js.Undefined()) + } else { + unbox(boxedResult, tpe) + } } } diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index a6069e2749a9..44ca582c3c61 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -9,10 +9,12 @@ import util.{FreshNameCreator, SourceFile, NoSource} import util.Spans.Span import ast.{tpd, untpd} import tpd.{Tree, TreeTraverser} +import ast.Trees.{Import, Ident} import typer.Nullables import transform.SymUtils._ import core.Decorators._ -import config.SourceVersion +import config.{SourceVersion, Feature} +import StdNames.nme import scala.annotation.internal.sharable class CompilationUnit protected (val source: SourceFile) { @@ -51,6 +53,12 @@ class CompilationUnit protected (val source: SourceFile) { */ var needsStaging: Boolean = false + /** Will be set to true if the unit contains a captureChecking language import */ + var needsCaptureChecking: Boolean = false + + /** Will be set to true if the unit contains a pureFunctions language import */ + var knowsPureFuns: Boolean = false + var suspended: Boolean = false var suspendedAtInliningPhase: Boolean = false @@ -138,11 +146,20 @@ object CompilationUnit { private class Force extends TreeTraverser { var containsQuote = false var containsInline = false + var containsCaptureChecking = false def traverse(tree: Tree)(using Context): Unit = { if (tree.symbol.isQuote) containsQuote = true if tree.symbol.is(Flags.Inline) then containsInline = true + tree match + case Import(qual, selectors) => + tpd.languageImport(qual) match + case Some(prefix) => + for case untpd.ImportSelector(untpd.Ident(imported), untpd.EmptyTree, _) <- selectors do + Feature.handleGlobalLanguageImport(prefix, imported) + case _ => + case _ => traverseChildren(tree) } } diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 46d36c4412c7..b121a47781e1 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -81,8 +81,8 @@ class Compiler { new PatternMatcher) :: // Compile pattern matches List(new TestRecheck.Pre) :: // Test only: run rechecker, enabled under -Yrecheck-test List(new TestRecheck) :: // Test only: run rechecker, enabled under -Yrecheck-test - List(new CheckCaptures.Pre) :: // Preparations for check captures phase, enabled under -Ycc - List(new CheckCaptures) :: // Check captures, enabled under -Ycc + List(new CheckCaptures.Pre) :: // Preparations for check captures phase, enabled under captureChecking + List(new CheckCaptures) :: // Check captures, enabled under captureChecking List(new ElimOpaque, // Turn opaque into normal aliases new sjs.ExplicitJSClasses, // Make all JS classes explicit (Scala.js only) new ExplicitOuter, // Add accessors to outer classes from nested ones. diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index f9152e8294c6..f7a08d1640ee 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -163,6 +163,16 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint /** Actions that need to be performed at the end of the current compilation run */ private var finalizeActions = mutable.ListBuffer[() => Unit]() + /** Will be set to true if any of the compiled compilation units contains + * a pureFunctions language import. + */ + var pureFunsImportEncountered = false + + /** Will be set to true if any of the compiled compilation units contains + * a captureChecking language import. + */ + var ccImportEncountered = false + def compile(files: List[AbstractFile]): Unit = try val sources = files.map(runContext.getSource(_)) @@ -224,6 +234,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint def runPhases(using Context) = { var lastPrintedTree: PrintedTree = NoPrintedTree val profiler = ctx.profiler + var phasesWereAdjusted = false for (phase <- ctx.base.allPhases) if (phase.isRunnable) @@ -242,6 +253,11 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint Stats.record(s"retained typed trees at end of $phase", unit.tpdTree.treeSize) ctx.typerState.gc() } + if !phasesWereAdjusted then + phasesWereAdjusted = true + if !Feature.ccEnabledSomewhere then + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase.prev) + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase) profiler.finished() } diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index bcedc4dfa50b..054ffe66f323 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -111,7 +111,12 @@ object NavigateAST { p.forceIfLazy case _ => } - childPath(p.productIterator, p :: path) + val iterator = p match + case defdef: DefTree[?] => + p.productIterator ++ defdef.mods.productIterator + case _ => + p.productIterator + childPath(iterator, p :: path) } else { p match { diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 083a92b26d11..d17bfd0f7564 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -14,10 +14,7 @@ import scala.collection.mutable import scala.annotation.tailrec -trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => - - // Note: the <: Type constraint looks necessary (and is needed to make the file compile in dotc). - // But Scalac accepts the program happily without it. Need to find out why. +trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => def unsplice(tree: Trees.Tree[T]): Trees.Tree[T] = tree @@ -195,11 +192,11 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => case arg => arg.typeOpt.widen.isRepeatedParam } - /** Is tree a type tree of the form `=> T` or (under -Ycc) `{refs}-> T`? */ + /** Is tree a type tree of the form `=> T` or (under pureFunctions) `{refs}-> T`? */ def isByNameType(tree: Tree)(using Context): Boolean = stripByNameType(tree) ne tree - /** Strip `=> T` to `T` and (under -Ycc) `{refs}-> T` to `T` */ + /** Strip `=> T` to `T` and (under pureFunctions) `{refs}-> T` to `T` */ def stripByNameType(tree: Tree)(using Context): Tree = unsplice(tree) match case ByNameTypeTree(t1) => t1 case untpd.CapturingTypeTree(_, parent) => @@ -400,18 +397,18 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] } } - /** Under -Ycc: A builder and extractor for `=> T`, which is an alias for `{*}-> T`. + /** Under pureFunctions: A builder and extractor for `=> T`, which is an alias for `{*}-> T`. * Only trees of the form `=> T` are matched; trees written directly as `{*}-> T` * are ignored by the extractor. */ object ImpureByNameTypeTree: - + def apply(tp: ByNameTypeTree)(using Context): untpd.CapturingTypeTree = untpd.CapturingTypeTree( - Ident(nme.CAPTURE_ROOT).withSpan(tp.span.startPos) :: Nil, tp) + untpd.captureRoot.withSpan(tp.span.startPos) :: Nil, tp) def unapply(tp: Tree)(using Context): Option[ByNameTypeTree] = tp match - case untpd.CapturingTypeTree(id @ Ident(nme.CAPTURE_ROOT) :: Nil, bntp: ByNameTypeTree) + case untpd.CapturingTypeTree(id @ Select(_, nme.CAPTURE_ROOT) :: Nil, bntp: ByNameTypeTree) if id.span == bntp.span.startPos => Some(bntp) case _ => None end ImpureByNameTypeTree @@ -512,7 +509,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => sym.owner.isPrimitiveValueClass || sym.owner == defn.StringClass || defn.pureMethods.contains(sym) - tree.tpe.isInstanceOf[ConstantType] && isKnownPureOp(tree.symbol) // A constant expression with pure arguments is pure. + tree.tpe.isInstanceOf[ConstantType] && tree.symbol != NoSymbol && isKnownPureOp(tree.symbol) // A constant expression with pure arguments is pure. || fn.symbol.isStableMember && !fn.symbol.is(Lazy) // constructors of no-inits classes are stable /** The purity level of this reference. diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 87974218fb0f..71998aff9304 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -56,7 +56,7 @@ class TreeTypeMap( /** Replace occurrences of `This(oldOwner)` in some prefix of a type * by the corresponding `This(newOwner)`. */ - private val mapOwnerThis = new TypeMap { + private val mapOwnerThis = new TypeMap with cc.CaptureSet.IdempotentCaptRefMap { private def mapPrefix(from: List[Symbol], to: List[Symbol], tp: Type): Type = from match { case Nil => tp case (cls: ClassSymbol) :: from1 => mapPrefix(from1, to.tail, tp.substThis(cls, to.head.thisType)) diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 1159d13d5aef..253477c5382c 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -15,11 +15,12 @@ import config.Printers.overload import annotation.internal.sharable import annotation.unchecked.uncheckedVariance import annotation.constructorOnly +import compiletime.uninitialized import Decorators._ object Trees { - type Untyped = Nothing + type Untyped = Type | Null /** The total number of created tree nodes, maintained if Stats.enabled */ @sharable var ntrees: Int = 0 @@ -45,36 +46,34 @@ object Trees { * - Type checking an untyped tree should remove all embedded `TypedSplice` * nodes. */ - abstract class Tree[-T >: Untyped](implicit @constructorOnly src: SourceFile) + abstract class Tree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable { if (Stats.enabled) ntrees += 1 /** The type constructor at the root of the tree */ - type ThisTree[T >: Untyped] <: Tree[T] + type ThisTree[T <: Untyped] <: Tree[T] - protected var myTpe: T @uncheckedVariance = _ + protected var myTpe: T @uncheckedVariance = uninitialized /** Destructively set the type of the tree. This should be called only when it is known that * it is safe under sharing to do so. One use-case is in the withType method below * which implements copy-on-write. Another use-case is in method interpolateAndAdapt in Typer, * where we overwrite with a simplified version of the type itself. */ - private[dotc] def overwriteType(tpe: T): Unit = + private[dotc] def overwriteType(tpe: T @uncheckedVariance): Unit = myTpe = tpe /** The type of the tree. In case of an untyped tree, * an UnAssignedTypeException is thrown. (Overridden by empty trees) */ - final def tpe: T @uncheckedVariance = { - if (myTpe == null) - throw UnAssignedTypeException(this) - myTpe - } + final def tpe: T = + if myTpe == null then throw UnAssignedTypeException(this) + myTpe.uncheckedNN /** Copy `tpe` attribute from tree `from` into this tree, independently * whether it is null or not. - final def copyAttr[U >: Untyped](from: Tree[U]): ThisTree[T] = { + final def copyAttr[U <: Untyped](from: Tree[U]): ThisTree[T] = { val t1 = this.withSpan(from.span) val t2 = if (from.myTpe != null) t1.withType(from.myTpe.asInstanceOf[Type]) @@ -131,10 +130,9 @@ object Trees { */ final def hasType: Boolean = myTpe != null - final def typeOpt: Type = myTpe match { + final def typeOpt: Type = myTpe match case tp: Type => tp - case _ => NoType - } + case null => NoType /** The denotation referred to by this tree. * Defined for `DenotingTree`s and `ProxyTree`s, NoDenotation for other @@ -166,7 +164,7 @@ object Trees { def toList: List[Tree[T]] = this :: Nil /** if this tree is the empty tree, the alternative, else this tree */ - inline def orElse[U >: Untyped <: T](inline that: Tree[U]): Tree[U] = + inline def orElse[U >: T <: Untyped](inline that: Tree[U]): Tree[U] = if (this eq genericEmptyTree) that else this /** The number of nodes in this tree */ @@ -217,42 +215,42 @@ object Trees { override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] } - class UnAssignedTypeException[T >: Untyped](tree: Tree[T]) extends RuntimeException { + class UnAssignedTypeException[T <: Untyped](tree: Tree[T]) extends RuntimeException { override def getMessage: String = s"type of $tree is not assigned" } - type LazyTree[-T >: Untyped] = Tree[T] | Lazy[Tree[T]] - type LazyTreeList[-T >: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] + type LazyTree[+T <: Untyped] = Tree[T] | Lazy[Tree[T]] + type LazyTreeList[+T <: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] // ------ Categories of trees ----------------------------------- /** Instances of this class are trees for which isType is definitely true. * Note that some trees have isType = true without being TypTrees (e.g. Ident, Annotated) */ - trait TypTree[-T >: Untyped] extends Tree[T] { - type ThisTree[-T >: Untyped] <: TypTree[T] + trait TypTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: TypTree[T] override def isType: Boolean = true } /** Instances of this class are trees for which isTerm is definitely true. * Note that some trees have isTerm = true without being TermTrees (e.g. Ident, Annotated) */ - trait TermTree[-T >: Untyped] extends Tree[T] { - type ThisTree[-T >: Untyped] <: TermTree[T] + trait TermTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: TermTree[T] override def isTerm: Boolean = true } /** Instances of this class are trees which are not terms but are legal * parts of patterns. */ - trait PatternTree[-T >: Untyped] extends Tree[T] { - type ThisTree[-T >: Untyped] <: PatternTree[T] + trait PatternTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: PatternTree[T] override def isPattern: Boolean = true } /** Tree's denotation can be derived from its type */ - abstract class DenotingTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] <: DenotingTree[T] + abstract class DenotingTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: DenotingTree[T] override def denot(using Context): Denotation = typeOpt.stripped match case tpe: NamedType => tpe.denot case tpe: ThisType => tpe.cls.denot @@ -262,8 +260,8 @@ object Trees { /** Tree's denot/isType/isTerm properties come from a subtree * identified by `forwardTo`. */ - abstract class ProxyTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] <: ProxyTree[T] + abstract class ProxyTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: ProxyTree[T] def forwardTo: Tree[T] override def denot(using Context): Denotation = forwardTo.denot override def isTerm: Boolean = forwardTo.isTerm @@ -271,24 +269,24 @@ object Trees { } /** Tree has a name */ - abstract class NameTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { - type ThisTree[-T >: Untyped] <: NameTree[T] + abstract class NameTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { + type ThisTree[+T <: Untyped] <: NameTree[T] def name: Name } /** Tree refers by name to a denotation */ - abstract class RefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { - type ThisTree[-T >: Untyped] <: RefTree[T] + abstract class RefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { + type ThisTree[+T <: Untyped] <: RefTree[T] def qualifier: Tree[T] override def isType: Boolean = name.isTypeName override def isTerm: Boolean = name.isTermName } /** Tree defines a new symbol */ - trait DefTree[-T >: Untyped] extends DenotingTree[T] { - type ThisTree[-T >: Untyped] <: DefTree[T] + trait DefTree[+T <: Untyped] extends DenotingTree[T] { + type ThisTree[+T <: Untyped] <: DefTree[T] - private var myMods: untpd.Modifiers | Null = _ + private var myMods: untpd.Modifiers | Null = uninitialized private[dotc] def rawMods: untpd.Modifiers = if (myMods == null) untpd.EmptyModifiers else myMods.uncheckedNN @@ -313,7 +311,7 @@ object Trees { extension (mdef: untpd.DefTree) def mods: untpd.Modifiers = mdef.rawMods - sealed trait WithEndMarker[-T >: Untyped]: + sealed trait WithEndMarker[+T <: Untyped]: self: PackageDef[T] | NamedDefTree[T] => import WithEndMarker.* @@ -356,9 +354,9 @@ object Trees { end WithEndMarker - abstract class NamedDefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) + abstract class NamedDefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] with DefTree[T] with WithEndMarker[T] { - type ThisTree[-T >: Untyped] <: NamedDefTree[T] + type ThisTree[+T <: Untyped] <: NamedDefTree[T] protected def srcName(using Context): Name = if name == nme.CONSTRUCTOR then nme.this_ @@ -395,8 +393,8 @@ object Trees { * The envelope of a MemberDef contains the whole definition and has its point * on the opening keyword (or the next token after that if keyword is missing). */ - abstract class MemberDef[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { - type ThisTree[-T >: Untyped] <: MemberDef[T] + abstract class MemberDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { + type ThisTree[+T <: Untyped] <: MemberDef[T] def rawComment: Option[Comment] = getAttachment(DocComment) @@ -409,40 +407,40 @@ object Trees { } /** A ValDef or DefDef tree */ - abstract class ValOrDefDef[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T] with WithLazyField[Tree[T]] { - type ThisTree[-T >: Untyped] <: ValOrDefDef[T] + abstract class ValOrDefDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T] with WithLazyField[Tree[T]] { + type ThisTree[+T <: Untyped] <: ValOrDefDef[T] def name: TermName def tpt: Tree[T] def unforcedRhs: LazyTree[T] = unforced def rhs(using Context): Tree[T] = forceIfLazy } - trait ValOrTypeDef[-T >: Untyped] extends MemberDef[T]: - type ThisTree[-T >: Untyped] <: ValOrTypeDef[T] + trait ValOrTypeDef[+T <: Untyped] extends MemberDef[T]: + type ThisTree[+T <: Untyped] <: ValOrTypeDef[T] - type ParamClause[T >: Untyped] = List[ValDef[T]] | List[TypeDef[T]] + type ParamClause[T <: Untyped] = List[ValDef[T]] | List[TypeDef[T]] // ----------- Tree case classes ------------------------------------ /** name */ - case class Ident[-T >: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) + case class Ident[+T <: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) extends RefTree[T] { - type ThisTree[-T >: Untyped] = Ident[T] + type ThisTree[+T <: Untyped] = Ident[T] def qualifier: Tree[T] = genericEmptyTree def isBackquoted: Boolean = hasAttachment(Backquoted) } - class SearchFailureIdent[-T >: Untyped] private[ast] (name: Name, expl: => String)(implicit @constructorOnly src: SourceFile) + class SearchFailureIdent[+T <: Untyped] private[ast] (name: Name, expl: => String)(implicit @constructorOnly src: SourceFile) extends Ident[T](name) { def explanation = expl override def toString: String = s"SearchFailureIdent($explanation)" } /** qualifier.name, or qualifier#name, if qualifier is a type */ - case class Select[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) + case class Select[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) extends RefTree[T] { - type ThisTree[-T >: Untyped] = Select[T] + type ThisTree[+T <: Untyped] = Select[T] override def denot(using Context): Denotation = typeOpt match case ConstantType(_) if ConstFold.foldedUnops.contains(name) => @@ -464,15 +462,15 @@ object Trees { else span } - class SelectWithSig[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) + class SelectWithSig[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) extends Select[T](qualifier, name) { override def toString: String = s"SelectWithSig($qualifier, $name, $sig)" } /** qual.this */ - case class This[-T >: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) + case class This[+T <: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = This[T] + type ThisTree[+T <: Untyped] = This[T] // Denotation of a This tree is always the underlying class; needs correction for modules. override def denot(using Context): Denotation = typeOpt match { @@ -484,21 +482,21 @@ object Trees { } /** C.super[mix], where qual = C.this */ - case class Super[-T >: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) + case class Super[+T <: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = Super[T] + type ThisTree[+T <: Untyped] = Super[T] def forwardTo: Tree[T] = qual } - abstract class GenericApply[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] <: GenericApply[T] + abstract class GenericApply[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { + type ThisTree[+T <: Untyped] <: GenericApply[T] val fun: Tree[T] val args: List[Tree[T]] def forwardTo: Tree[T] = fun } object GenericApply: - def unapply[T >: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match + def unapply[T <: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match case tree: GenericApply[T] => Some((tree.fun, tree.args)) case _ => None @@ -509,9 +507,9 @@ object Trees { case InfixTuple // r f (x1, ..., xN) where N != 1; needs to be treated specially for an error message in typedApply /** fun(args) */ - case class Apply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class Apply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends GenericApply[T] { - type ThisTree[-T >: Untyped] = Apply[T] + type ThisTree[+T <: Untyped] = Apply[T] def setApplyKind(kind: ApplyKind) = putAttachment(untpd.KindOfApply, kind) @@ -525,57 +523,57 @@ object Trees { } /** fun[args] */ - case class TypeApply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class TypeApply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends GenericApply[T] { - type ThisTree[-T >: Untyped] = TypeApply[T] + type ThisTree[+T <: Untyped] = TypeApply[T] } /** const */ - case class Literal[-T >: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) + case class Literal[+T <: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) extends Tree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = Literal[T] + type ThisTree[+T <: Untyped] = Literal[T] } /** new tpt, but no constructor call */ - case class New[-T >: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class New[+T <: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = New[T] + type ThisTree[+T <: Untyped] = New[T] } /** expr : tpt */ - case class Typed[-T >: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Typed[+T <: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = Typed[T] + type ThisTree[+T <: Untyped] = Typed[T] def forwardTo: Tree[T] = expr } /** name = arg, in a parameter list */ - case class NamedArg[-T >: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) + case class NamedArg[+T <: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = NamedArg[T] + type ThisTree[+T <: Untyped] = NamedArg[T] } /** name = arg, outside a parameter list */ - case class Assign[-T >: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Assign[+T <: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Assign[T] + type ThisTree[+T <: Untyped] = Assign[T] } /** { stats; expr } */ - case class Block[-T >: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Block[+T <: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = Block[T] + type ThisTree[+T <: Untyped] = Block[T] override def isType: Boolean = expr.isType override def isTerm: Boolean = !isType // this will classify empty trees as terms, which is necessary } /** if cond then thenp else elsep */ - case class If[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + case class If[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = If[T] + type ThisTree[+T <: Untyped] = If[T] def isInline = false } - class InlineIf[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + class InlineIf[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) extends If(cond, thenp, elsep) { override def isInline = true override def toString = s"InlineIf($cond, $thenp, $elsep)" @@ -590,33 +588,33 @@ object Trees { * of the closure is a function type, otherwise it is the type * given in `tpt`, which must be a SAM type. */ - case class Closure[-T >: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Closure[+T <: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Closure[T] + type ThisTree[+T <: Untyped] = Closure[T] } /** selector match { cases } */ - case class Match[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + case class Match[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Match[T] + type ThisTree[+T <: Untyped] = Match[T] def isInline = false } - class InlineMatch[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + class InlineMatch[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) extends Match(selector, cases) { override def isInline = true override def toString = s"InlineMatch($selector, $cases)" } /** case pat if guard => body */ - case class CaseDef[-T >: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class CaseDef[+T <: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = CaseDef[T] + type ThisTree[+T <: Untyped] = CaseDef[T] } /** label[tpt]: { expr } */ - case class Labeled[-T >: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Labeled[+T <: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) extends NameTree[T] { - type ThisTree[-T >: Untyped] = Labeled[T] + type ThisTree[+T <: Untyped] = Labeled[T] def name: Name = bind.name } @@ -625,33 +623,33 @@ object Trees { * After program transformations this is not necessarily the enclosing method, because * closures can intervene. */ - case class Return[-T >: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) + case class Return[+T <: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Return[T] + type ThisTree[+T <: Untyped] = Return[T] } /** while (cond) { body } */ - case class WhileDo[-T >: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class WhileDo[+T <: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = WhileDo[T] + type ThisTree[+T <: Untyped] = WhileDo[T] } /** try block catch cases finally finalizer */ - case class Try[-T >: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Try[+T <: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Try[T] + type ThisTree[+T <: Untyped] = Try[T] } /** Seq(elems) * @param tpt The element type of the sequence. */ - case class SeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class SeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = SeqLiteral[T] + type ThisTree[+T <: Untyped] = SeqLiteral[T] } /** Array(elems) */ - class JavaSeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + class JavaSeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends SeqLiteral(elems, elemtpt) { override def toString: String = s"JavaSeqLiteral($elems, $elemtpt)" } @@ -672,17 +670,17 @@ object Trees { * different context: `bindings` represent the arguments to the inlined * call, whereas `expansion` represents the body of the inlined function. */ - case class Inlined[-T >: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Inlined[+T <: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = Inlined[T] + type ThisTree[+T <: Untyped] = Inlined[T] override def isTerm = expansion.isTerm override def isType = expansion.isType } /** A type tree that represents an existing or inferred type */ - case class TypeTree[-T >: Untyped]()(implicit @constructorOnly src: SourceFile) + case class TypeTree[+T <: Untyped]()(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = TypeTree[T] + type ThisTree[+T <: Untyped] = TypeTree[T] override def isEmpty: Boolean = !hasType override def toString: String = s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" @@ -693,25 +691,25 @@ object Trees { * - as a (result-)type of an inferred ValDef or DefDef. * Every TypeVar is created as the type of one InferredTypeTree. */ - class InferredTypeTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] + class InferredTypeTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] /** ref.type */ - case class SingletonTypeTree[-T >: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) + case class SingletonTypeTree[+T <: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = SingletonTypeTree[T] + type ThisTree[+T <: Untyped] = SingletonTypeTree[T] } /** tpt { refinements } */ - case class RefinedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class RefinedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = RefinedTypeTree[T] + type ThisTree[+T <: Untyped] = RefinedTypeTree[T] def forwardTo: Tree[T] = tpt } /** tpt[args] */ - case class AppliedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class AppliedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = AppliedTypeTree[T] + type ThisTree[+T <: Untyped] = AppliedTypeTree[T] def forwardTo: Tree[T] = tpt } @@ -738,40 +736,40 @@ object Trees { * source code written by the user with the trees used by the compiler (for * example, to make "find all references" work in the IDE). */ - case class LambdaTypeTree[-T >: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class LambdaTypeTree[+T <: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = LambdaTypeTree[T] + type ThisTree[+T <: Untyped] = LambdaTypeTree[T] } - case class TermLambdaTypeTree[-T >: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class TermLambdaTypeTree[+T <: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = TermLambdaTypeTree[T] + type ThisTree[+T <: Untyped] = TermLambdaTypeTree[T] } /** [bound] selector match { cases } */ - case class MatchTypeTree[-T >: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + case class MatchTypeTree[+T <: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = MatchTypeTree[T] + type ThisTree[+T <: Untyped] = MatchTypeTree[T] } /** => T */ - case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) + case class ByNameTypeTree[+T <: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = ByNameTypeTree[T] + type ThisTree[+T <: Untyped] = ByNameTypeTree[T] } /** >: lo <: hi * >: lo <: hi = alias for RHS of bounded opaque type */ - case class TypeBoundsTree[-T >: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) + case class TypeBoundsTree[+T <: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = TypeBoundsTree[T] + type ThisTree[+T <: Untyped] = TypeBoundsTree[T] } /** name @ body */ - case class Bind[-T >: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Bind[+T <: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] with PatternTree[T] { - type ThisTree[-T >: Untyped] = Bind[T] + type ThisTree[+T <: Untyped] = Bind[T] override def isType: Boolean = name.isTypeName override def isTerm: Boolean = name.isTermName @@ -780,9 +778,9 @@ object Trees { } /** tree_1 | ... | tree_n */ - case class Alternative[-T >: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class Alternative[+T <: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends PatternTree[T] { - type ThisTree[-T >: Untyped] = Alternative[T] + type ThisTree[+T <: Untyped] = Alternative[T] } /** The typed translation of `extractor(patterns)` in a pattern. The translation has the following @@ -799,26 +797,26 @@ object Trees { * val result = fun(sel)(implicits) * if (result.isDefined) "match patterns against result" */ - case class UnApply[-T >: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class UnApply[+T <: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with PatternTree[T] { - type ThisTree[-T >: Untyped] = UnApply[T] + type ThisTree[+T <: Untyped] = UnApply[T] def forwardTo = fun } /** mods val name: tpt = rhs */ - case class ValDef[-T >: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + case class ValDef[+T <: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) extends ValOrDefDef[T], ValOrTypeDef[T] { - type ThisTree[-T >: Untyped] = ValDef[T] + type ThisTree[+T <: Untyped] = ValDef[T] assert(isEmpty || (tpt ne genericEmptyTree)) def unforced: LazyTree[T] = preRhs protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x } /** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */ - case class DefDef[-T >: Untyped] private[ast] (name: TermName, - paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + case class DefDef[+T <: Untyped] private[ast] (name: TermName, + paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) extends ValOrDefDef[T] { - type ThisTree[-T >: Untyped] = DefDef[T] + type ThisTree[+T <: Untyped] = DefDef[T] assert(tpt ne genericEmptyTree) def unforced: LazyTree[T] = preRhs protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x @@ -842,9 +840,9 @@ object Trees { * mods type name >: lo <: hi, if rhs = TypeBoundsTree(lo, hi) or * mods type name >: lo <: hi = rhs if rhs = TypeBoundsTree(lo, hi, alias) and opaque in mods */ - case class TypeDef[-T >: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + case class TypeDef[+T <: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) extends MemberDef[T], ValOrTypeDef[T] { - type ThisTree[-T >: Untyped] = TypeDef[T] + type ThisTree[+T <: Untyped] = TypeDef[T] /** Is this a definition of a class? */ def isClassDef: Boolean = rhs.isInstanceOf[Template[?]] @@ -857,9 +855,9 @@ object Trees { * if this is of class untpd.DerivingTemplate. * Typed templates only have parents. */ - case class Template[-T >: Untyped] private[ast] (constr: DefDef[T], parentsOrDerived: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + case class Template[+T <: Untyped] private[ast] (constr: DefDef[T], parentsOrDerived: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList[T])(implicit @constructorOnly src: SourceFile) extends DefTree[T] with WithLazyField[List[Tree[T]]] { - type ThisTree[-T >: Untyped] = Template[T] + type ThisTree[+T <: Untyped] = Template[T] def unforcedBody: LazyTreeList[T] = unforced def unforced: LazyTreeList[T] = preBody protected def force(x: List[Tree[T @uncheckedVariance]]): Unit = preBody = x @@ -870,9 +868,9 @@ object Trees { } - abstract class ImportOrExport[-T >: Untyped](implicit @constructorOnly src: SourceFile) + abstract class ImportOrExport[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { - type ThisTree[-T >: Untyped] <: ImportOrExport[T] + type ThisTree[+T <: Untyped] <: ImportOrExport[T] val expr: Tree[T] val selectors: List[untpd.ImportSelector] } @@ -881,36 +879,36 @@ object Trees { * where a selector is either an untyped `Ident`, `name` or * an untyped thicket consisting of `name` and `rename`. */ - case class Import[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + case class Import[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) extends ImportOrExport[T] { - type ThisTree[-T >: Untyped] = Import[T] + type ThisTree[+T <: Untyped] = Import[T] } /** export expr.selectors * where a selector is either an untyped `Ident`, `name` or * an untyped thicket consisting of `name` and `rename`. */ - case class Export[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + case class Export[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) extends ImportOrExport[T] { - type ThisTree[-T >: Untyped] = Export[T] + type ThisTree[+T <: Untyped] = Export[T] } /** package pid { stats } */ - case class PackageDef[-T >: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class PackageDef[+T <: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with WithEndMarker[T] { - type ThisTree[-T >: Untyped] = PackageDef[T] + type ThisTree[+T <: Untyped] = PackageDef[T] def forwardTo: RefTree[T] = pid protected def srcName(using Context): Name = pid.name } /** arg @annot */ - case class Annotated[-T >: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Annotated[+T <: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] { - type ThisTree[-T >: Untyped] = Annotated[T] + type ThisTree[+T <: Untyped] = Annotated[T] def forwardTo: Tree[T] = arg } - trait WithoutTypeOrPos[-T >: Untyped] extends Tree[T] { + trait WithoutTypeOrPos[+T <: Untyped] extends Tree[T] { override def withTypeUnchecked(tpe: Type): ThisTree[Type] = this.asInstanceOf[ThisTree[Type]] override def span: Span = NoSpan override def span_=(span: Span): Unit = {} @@ -921,17 +919,17 @@ object Trees { * The contained trees will be integrated when transformed with * a `transform(List[Tree])` call. */ - case class Thicket[-T >: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class Thicket[+T <: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends Tree[T] with WithoutTypeOrPos[T] { myTpe = NoType.asInstanceOf[T] - type ThisTree[-T >: Untyped] = Thicket[T] + type ThisTree[+T <: Untyped] = Thicket[T] - def mapElems(op: Tree[T] => Tree[T] @uncheckedVariance): Thicket[T] = { + def mapElems[U >: T <: Untyped](op: Tree[T] => Tree[U]): Thicket[U] = { val newTrees = trees.mapConserve(op) if (trees eq newTrees) this else - Thicket[T](newTrees)(source).asInstanceOf[this.type] + Thicket[U](newTrees)(source).asInstanceOf[this.type] } override def foreachInThicket(op: Tree[T] => Unit): Unit = @@ -950,12 +948,12 @@ object Trees { mapElems(_.withSpan(span)).asInstanceOf[this.type] } - class EmptyTree[T >: Untyped] extends Thicket(Nil)(NoSource) { + class EmptyTree[T <: Untyped] extends Thicket(Nil)(NoSource) { // assert(uniqueId != 1492) override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyTree") } - class EmptyValDef[T >: Untyped] extends ValDef[T]( + class EmptyValDef[T <: Untyped] extends ValDef[T]( nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T])(NoSource) with WithoutTypeOrPos[T] { myTpe = NoType.asInstanceOf[T] setMods(untpd.Modifiers(PrivateLocal)) @@ -966,8 +964,8 @@ object Trees { @sharable val theEmptyTree = new EmptyTree[Type]() @sharable val theEmptyValDef = new EmptyValDef[Type]() - def genericEmptyValDef[T >: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] - def genericEmptyTree[T >: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] + def genericEmptyValDef[T <: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] + def genericEmptyTree[T <: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] /** Tree that replaces a level 1 splices in pickled (level 0) quotes. * It is only used when picking quotes (will never be in a TASTy file). @@ -978,13 +976,13 @@ object Trees { * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. * @param tpt Type of the hole */ - case class Hole[-T >: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] <: Hole[T] + case class Hole[+T <: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: Hole[T] override def isTerm: Boolean = isTermHole override def isType: Boolean = !isTermHole } - def flatten[T >: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { + def flatten[T <: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { def recur(buf: ListBuffer[Tree[T]] | Null, remaining: List[Tree[T]]): ListBuffer[Tree[T]] | Null = remaining match { case Thicket(elems) :: remaining1 => @@ -1037,7 +1035,7 @@ object Trees { // ----- Generic Tree Instances, inherited from `tpt` and `untpd`. - abstract class Instance[T >: Untyped <: Type] { inst => + abstract class Instance[T <: Untyped] { inst => type Tree = Trees.Tree[T] type TypTree = Trees.TypTree[T] @@ -1372,7 +1370,7 @@ object Trees { * innermost enclosing call for which the inlined version is currently * processed. */ - protected def inlineContext(call: Tree)(using Context): Context = ctx + protected def inlineContext(call: tpd.Tree)(using Context): Context = ctx /** The context to use when mapping or accumulating over a tree */ def localCtx(tree: Tree)(using Context): Context diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 6f3f134f9342..f72cafd4205d 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -42,7 +42,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** mods object name impl */ case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile) extends MemberDef { - type ThisTree[-T >: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef + type ThisTree[+T <: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl) } @@ -145,7 +145,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case Floating } - /** {x1, ..., xN} T (only relevant under -Ycc) */ + /** {x1, ..., xN} T (only relevant under captureChecking) */ case class CapturingTypeTree(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ @@ -217,7 +217,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class Infix()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Infix) - /** Used under -Ycc to mark impure function types `A => B` in `FunctionWithMods` */ + /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) } @@ -492,6 +492,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def scalaAny(implicit src: SourceFile): Select = scalaDot(tpnme.Any) def javaDotLangDot(name: Name)(implicit src: SourceFile): Select = Select(Select(Ident(nme.java), nme.lang), name) + def captureRoot(using Context): Select = + Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) + def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(using Context): DefDef = DefDef(nme.CONSTRUCTOR, joinParams(tparams, vparamss), TypeTree(), rhs) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala b/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala index 0fd96fe2462c..fd89159e2076 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala @@ -39,8 +39,7 @@ case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) exte override def symbol(using Context) = cls - override def derivedAnnotation(tree: Tree)(using Context): Annotation = - unsupported(i"derivedAnnotation(Tree), $tree, $refs") + override def derivedAnnotation(tree: Tree)(using Context): Annotation = this def derivedAnnotation(refs: CaptureSet, boxed: Boolean)(using Context): Annotation = if (this.refs eq refs) && (this.boxed == boxed) then this diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 0ebf7c1c01e9..0ede1825e611 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -9,6 +9,7 @@ import Decorators.*, NameOps.* import config.Printers.capt import util.Property.Key import tpd.* +import config.Feature private val Captures: Key[CaptureSet] = Key() private val BoxedType: Key[BoxedTypeCache] = Key() @@ -40,6 +41,22 @@ extension (tree: Tree) tree.putAttachment(Captures, refs) refs + /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of + * a by name parameter type, turning the latter into an impure by name parameter type. + */ + def adaptByNameArgUnderPureFuns(using Context): Tree = + if Feature.pureFunsEnabledSomewhere then + val rbn = defn.RetainsByNameAnnot + Annotated(tree, + New(rbn.typeRef).select(rbn.primaryConstructor).appliedTo( + Typed( + SeqLiteral(ref(defn.captureRoot) :: Nil, TypeTree(defn.AnyType)), + TypeTree(defn.RepeatedParamType.appliedTo(defn.AnyType)) + ) + ) + ) + else tree + extension (tp: Type) /** @pre `tp` is a CapturingType */ @@ -96,6 +113,19 @@ extension (tp: Type) /** Is the boxedCaptureSet of this type nonempty? */ def isBoxedCapturing(using Context) = !tp.boxedCaptureSet.isAlwaysEmpty + /** If this type is a capturing type, the version with boxed statues as given by `boxed`. + * If it is a TermRef of a capturing type, and the box status flips, widen to a capturing + * type that captures the TermRef. + */ + def forceBoxStatus(boxed: Boolean)(using Context): Type = tp.widenDealias match + case tp @ CapturingType(parent, refs) if tp.isBoxed != boxed => + val refs1 = tp match + case ref: CaptureRef if ref.isTracked => ref.singletonCaptureSet + case _ => refs + CapturingType(parent, refs1, boxed) + case _ => + tp + /** Map capturing type to their parents. Capturing types accessible * via dealising are also stripped. */ @@ -107,11 +137,11 @@ extension (tp: Type) case _ => tp - /** Under -Ycc, map regular function type to impure function type + /** Under pureFunctions, map regular function type to impure function type */ - def adaptFunctionTypeUnderCC(using Context): Type = tp match + def adaptFunctionTypeUnderPureFuns(using Context): Type = tp match case AppliedType(fn, args) - if ctx.settings.Ycc.value && defn.isFunctionClass(fn.typeSymbol) => + if Feature.pureFunsEnabledSomewhere && defn.isFunctionClass(fn.typeSymbol) => val fname = fn.typeSymbol.name defn.FunctionType( fname.functionArity, @@ -121,8 +151,64 @@ extension (tp: Type) case _ => tp + /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of + * a by name parameter type, turning the latter into an impure by name parameter type. + */ + def adaptByNameArgUnderPureFuns(using Context): Type = + if Feature.pureFunsEnabledSomewhere then + AnnotatedType(tp, + CaptureAnnotation(CaptureSet.universal, boxed = false)(defn.RetainsByNameAnnot)) + else + tp + + def isCapturingType(using Context): Boolean = + tp match + case CapturingType(_, _) => true + case _ => false + + /** Is type known to be always pure by its class structure, + * so that adding a capture set to it would not make sense? + */ + def isAlwaysPure(using Context): Boolean = tp.dealias match + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then sym.isPureClass + else tp.superType.isAlwaysPure + case CapturingType(parent, refs) => + parent.isAlwaysPure || refs.isAlwaysEmpty + case tp: TypeProxy => + tp.superType.isAlwaysPure + case tp: AndType => + tp.tp1.isAlwaysPure || tp.tp2.isAlwaysPure + case tp: OrType => + tp.tp1.isAlwaysPure && tp.tp2.isAlwaysPure + case _ => + false + +extension (cls: ClassSymbol) + + def pureBaseClass(using Context): Option[Symbol] = + cls.baseClasses.find(bc => + defn.pureBaseClasses.contains(bc) + || { + val selfType = bc.givenSelfType + selfType.exists && selfType.captureSet.isAlwaysEmpty + }) + extension (sym: Symbol) + /** A class is pure if: + * - one its base types has an explicitly declared self type with an empty capture set + * - or it is a value class + * - or it is an exception + * - or it is one of Nothing, Null, or String + */ + def isPureClass(using Context): Boolean = sym match + case cls: ClassSymbol => + cls.pureBaseClass.isDefined || defn.pureSimpleClasses.contains(cls) + case _ => + false + /** Does this symbol allow results carrying the universal capability? * Currently this is true only for function type applies (since their * results are unboxed) and `erasedValue` since this function is magic in @@ -150,6 +236,8 @@ extension (sym: Symbol) case _ => false containsEnclTypeParam(sym.info.finalResultType) && !sym.allowsRootCapture + && sym != defn.Caps_unsafeBox + && sym != defn.Caps_unsafeUnbox extension (tp: AnnotatedType) /** Is this a boxed capturing type? */ diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index fb726a73c486..6bf6d7770d8b 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -271,7 +271,7 @@ sealed abstract class CaptureSet extends Showable: map(Substituters.SubstParamsMap(tl, to)) /** Invoke handler if this set has (or later aquires) the root capability `*` */ - def disallowRootCapability(handler: () => Unit)(using Context): this.type = + def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = if isUniversal then handler() this @@ -373,7 +373,7 @@ object CaptureSet: def isAlwaysEmpty = false /** A handler to be invoked if the root reference `*` is added to this set */ - var addRootHandler: () => Unit = () => () + var rootAddedHandler: () => Context ?=> Unit = () => () var description: String = "" @@ -404,7 +404,7 @@ object CaptureSet: def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = if !isConst && recordElemsState() then elems ++= newElems - if isUniversal then addRootHandler() + if isUniversal then rootAddedHandler() // assert(id != 2 || elems.size != 2, this) (CompareResult.OK /: deps) { (r, dep) => r.andAlso(dep.tryInclude(newElems, this)) @@ -421,8 +421,8 @@ object CaptureSet: else CompareResult.fail(this) - override def disallowRootCapability(handler: () => Unit)(using Context): this.type = - addRootHandler = handler + override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = + rootAddedHandler = handler super.disallowRootCapability(handler) private var computingApprox = false @@ -523,7 +523,7 @@ object CaptureSet: private def mapIsIdempotent = tm.isInstanceOf[IdempotentCaptRefMap] - assert(ccAllowUnsoundMaps || mapIsIdempotent) + assert(ccAllowUnsoundMaps || mapIsIdempotent, tm.getClass) private def whereCreated(using Context): String = if stack == null then "" diff --git a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala index 05e813793a63..e9862f1f20b8 100644 --- a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala +++ b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala @@ -41,7 +41,10 @@ object CapturingType: * returned separately by CaptureOps.isBoxed. */ def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = - if ctx.phase == Phases.checkCapturesPhase && tp.annot.symbol == defn.RetainsAnnot then + if ctx.phase == Phases.checkCapturesPhase + && tp.annot.symbol == defn.RetainsAnnot + && !ctx.mode.is(Mode.IgnoreCaptures) + then EventuallyCapturingType.unapply(tp) else None diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index fe22f9f49e13..899914e872c8 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -5,12 +5,13 @@ package cc import core.* import Phases.*, DenotTransformers.*, SymDenotations.* import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* -import Types.*, StdNames.* +import Types.*, StdNames.*, Denotations.* import config.Printers.{capt, recheckr} -import config.Config +import config.{Config, Feature} import ast.{tpd, untpd, Trees} import Trees.* -import typer.RefChecks.{checkAllOverrides, checkParents} +import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents} +import typer.Checking.{checkBounds, checkAppliedTypesIn} import util.{SimpleIdentitySet, EqHashMap, SrcPos} import transform.SymUtils.* import transform.{Recheck, PreRecheck} @@ -18,6 +19,7 @@ import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap} import StdNames.nme +import NameKinds.DefaultGetterName import reporting.trace /** The capture checker */ @@ -26,7 +28,7 @@ object CheckCaptures: class Pre extends PreRecheck, SymTransformer: - override def isEnabled(using Context) = ctx.settings.Ycc.value + override def isEnabled(using Context) = true /** Reset `private` flags of parameter accessors so that we can refine them * in Setup if they have non-empty capture sets. Special handling of some @@ -42,12 +44,20 @@ object CheckCaptures: end Pre /** A class describing environments. - * @param owner the current owner - * @param captured the caputure set containing all references to tracked free variables outside of boxes - * @param isBoxed true if the environment is inside a box (in which case references are not counted) - * @param outer0 the next enclosing environment + * @param owner the current owner + * @param nestedInOwner true if the environment is a temporary one nested in the owner's environment, + * and does not have a different actual owner symbol (this happens when doing box adaptation). + * @param captured the caputure set containing all references to tracked free variables outside of boxes + * @param isBoxed true if the environment is inside a box (in which case references are not counted) + * @param outer0 the next enclosing environment */ - case class Env(owner: Symbol, captured: CaptureSet, isBoxed: Boolean, outer0: Env | Null): + case class Env( + owner: Symbol, + nestedInOwner: Boolean, + captured: CaptureSet, + isBoxed: Boolean, + outer0: Env | Null + ): def outer = outer0.nn def isOutermost = outer0 == null @@ -81,7 +91,7 @@ object CheckCaptures: elem.tpe match case ref: CaptureRef => if !ref.canBeTracked then - report.error(em"$elem cannot be tracked since it is not a parameter or a local variable", elem.srcPos) + report.error(em"$elem cannot be tracked since it is not a parameter or local value", elem.srcPos) case tpe => report.error(em"$elem: $tpe is not a legal element of a capture set", elem.srcPos) @@ -125,13 +135,14 @@ class CheckCaptures extends Recheck, SymTransformer: import CheckCaptures.* def phaseName: String = "cc" - override def isEnabled(using Context) = ctx.settings.Ycc.value + override def isEnabled(using Context) = true def newRechecker()(using Context) = CaptureChecker(ctx) override def run(using Context): Unit = - checkOverrides.traverse(ctx.compilationUnit.tpdTree) - super.run + if Feature.ccEnabled then + checkOverrides.traverse(ctx.compilationUnit.tpdTree) + super.run override def transformSym(sym: SymDenotation)(using Context): SymDenotation = if Synthetics.needsTransform(sym) then Synthetics.transformFromCC(sym) @@ -140,7 +151,7 @@ class CheckCaptures extends Recheck, SymTransformer: /** Check overrides again, taking capture sets into account. * TODO: Can we avoid doing overrides checks twice? * We need to do them here since only at this phase CaptureTypes are relevant - * But maybe we can then elide the check during the RefChecks phase if -Ycc is set? + * But maybe we can then elide the check during the RefChecks phase under captureChecking? */ def checkOverrides = new TreeTraverser: def traverse(t: Tree)(using Context) = @@ -204,7 +215,7 @@ class CheckCaptures extends Recheck, SymTransformer: report.error(i"$header included in allowed capture set ${res.blocking}", pos) /** The current environment */ - private var curEnv: Env = Env(NoSymbol, CaptureSet.empty, isBoxed = false, null) + private var curEnv: Env = Env(NoSymbol, nestedInOwner = false, CaptureSet.empty, isBoxed = false, null) private val myCapturedVars: util.EqHashMap[Symbol, CaptureSet] = EqHashMap() @@ -249,8 +260,12 @@ class CheckCaptures extends Recheck, SymTransformer: if !cs.isAlwaysEmpty then forallOuterEnvsUpTo(ctx.owner.topLevelClass) { env => val included = cs.filter { - case ref: TermRef => env.owner.isProperlyContainedIn(ref.symbol.owner) - case ref: ThisType => env.owner.isProperlyContainedIn(ref.cls) + case ref: TermRef => + (env.nestedInOwner || env.owner != ref.symbol.owner) + && env.owner.isContainedIn(ref.symbol.owner) + case ref: ThisType => + (env.nestedInOwner || env.owner != ref.cls) + && env.owner.isContainedIn(ref.cls) case _ => false } capt.println(i"Include call capture $included in ${env.owner}") @@ -276,16 +291,34 @@ class CheckCaptures extends Recheck, SymTransformer: * outcome of a `mightSubcapture` test. It picks `{f}` if this might subcapture Cr * and Cr otherwise. */ - override def recheckSelection(tree: Select, qualType: Type, name: Name)(using Context) = { - val selType = super.recheckSelection(tree, qualType, name) + override def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context) = { + def disambiguate(denot: Denotation): Denotation = denot match + case MultiDenotation(denot1, denot2) => + // This case can arise when we try to merge multiple types that have different + // capture sets on some part. For instance an asSeenFrom might produce + // a bi-mapped capture set arising from a substition. Applying the same substitution + // to the same type twice will nevertheless produce different capture setsw which can + // lead to a failure in disambiguation since neither alternative is better than the + // other in a frozen constraint. An example test case is disambiguate-select.scala. + // We address the problem by disambiguating while ignoring all capture sets as a fallback. + withMode(Mode.IgnoreCaptures) { + disambiguate(denot1).meet(disambiguate(denot2), qualType) + } + case _ => denot + + val selType = recheckSelection(tree, qualType, name, disambiguate) val selCs = selType.widen.captureSet if selCs.isAlwaysEmpty || selType.widen.isBoxedCapturing || qualType.isBoxedCapturing then selType else val qualCs = qualType.captureSet capt.println(i"intersect $qualType, ${selType.widen}, $qualCs, $selCs in $tree") - if qualCs.mightSubcapture(selCs) then + if qualCs.mightSubcapture(selCs) + && !selCs.mightSubcapture(qualCs) + && !pt.stripCapturing.isInstanceOf[SingletonType] + then selType.widen.stripCapturing.capturing(qualCs) + .showing(i"alternate type for select $tree: $selType --> $result, $qualCs / $selCs", capt) else selType }//.showing(i"recheck sel $tree, $qualType = $result") @@ -302,23 +335,41 @@ class CheckCaptures extends Recheck, SymTransformer: * and Cr otherwise. */ override def recheckApply(tree: Apply, pt: Type)(using Context): Type = - includeCallCaptures(tree.symbol, tree.srcPos) - super.recheckApply(tree, pt) match - case appType @ CapturingType(appType1, refs) => - tree.fun match - case Select(qual, _) - if !tree.fun.symbol.isConstructor - && !qual.tpe.isBoxedCapturing - && !tree.args.exists(_.tpe.isBoxedCapturing) - && qual.tpe.captureSet.mightSubcapture(refs) - && tree.args.forall(_.tpe.captureSet.mightSubcapture(refs)) - => - val callCaptures = tree.args.foldLeft(qual.tpe.captureSet)((cs, arg) => - cs ++ arg.tpe.captureSet) - appType.derivedCapturingType(appType1, callCaptures) - .showing(i"narrow $tree: $appType, refs = $refs, qual = ${qual.tpe.captureSet} --> $result", capt) - case _ => appType - case appType => appType + val meth = tree.fun.symbol + includeCallCaptures(meth, tree.srcPos) + def mapArgUsing(f: Type => Type) = + val arg :: Nil = tree.args: @unchecked + val argType0 = f(recheckStart(arg, pt)) + val argType = super.recheckFinish(argType0, arg, pt) + super.recheckFinish(argType, tree, pt) + + if meth == defn.Caps_unsafeBox then + mapArgUsing(_.forceBoxStatus(true)) + else if meth == defn.Caps_unsafeUnbox then + mapArgUsing(_.forceBoxStatus(false)) + else if meth == defn.Caps_unsafeBoxFunArg then + mapArgUsing { + case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual, isErased) => + defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual, isErased) + } + else + super.recheckApply(tree, pt) match + case appType @ CapturingType(appType1, refs) => + tree.fun match + case Select(qual, _) + if !tree.fun.symbol.isConstructor + && !qual.tpe.isBoxedCapturing + && !tree.args.exists(_.tpe.isBoxedCapturing) + && qual.tpe.captureSet.mightSubcapture(refs) + && tree.args.forall(_.tpe.captureSet.mightSubcapture(refs)) + => + val callCaptures = tree.args.foldLeft(qual.tpe.captureSet)((cs, arg) => + cs ++ arg.tpe.captureSet) + appType.derivedCapturingType(appType1, callCaptures) + .showing(i"narrow $tree: $appType, refs = $refs, qual = ${qual.tpe.captureSet} --> $result", capt) + case _ => appType + case appType => appType + end recheckApply /** Handle an application of method `sym` with type `mt` to arguments of types `argTypes`. * This means: @@ -392,7 +443,8 @@ class CheckCaptures extends Recheck, SymTransformer: block match case closureDef(mdef) => pt.dealias match - case defn.FunctionOf(ptformals, _, _, _) if ptformals.forall(_.captureSet.isAlwaysEmpty) => + case defn.FunctionOf(ptformals, _, _, _) + if ptformals.nonEmpty && ptformals.forall(_.captureSet.isAlwaysEmpty) => // Redo setup of the anonymous function so that formal parameters don't // get capture sets. This is important to avoid false widenings to `*` // when taking the base type of the actual closures's dependent function @@ -402,9 +454,10 @@ class CheckCaptures extends Recheck, SymTransformer: // First, undo the previous setup which installed a completer for `meth`. atPhase(preRecheckPhase.prev)(meth.denot.copySymDenotation()) .installAfter(preRecheckPhase) + // Next, update all parameter symbols to match expected formals meth.paramSymss.head.lazyZip(ptformals).foreach { (psym, pformal) => - psym.copySymDenotation(info = pformal).installAfter(preRecheckPhase) + psym.updateInfoBetween(preRecheckPhase, thisPhase, pformal.mapExprType) } // Next, update types of parameter ValDefs mdef.paramss.head.lazyZip(ptformals).foreach { (param, pformal) => @@ -412,13 +465,13 @@ class CheckCaptures extends Recheck, SymTransformer: tpt.rememberTypeAlways(pformal) } // Next, install a new completer reflecting the new parameters for the anonymous method + val mt = meth.info.asInstanceOf[MethodType] val completer = new LazyType: def complete(denot: SymDenotation)(using Context) = - denot.info = MethodType(ptformals, mdef.tpt.knownType) + denot.info = mt.companion(ptformals, mdef.tpt.knownType) .showing(i"simplify info of $meth to $result", capt) recheckDef(mdef, meth) - meth.copySymDenotation(info = completer, initFlags = meth.flags &~ Touched) - .installAfter(preRecheckPhase) + meth.updateInfoBetween(preRecheckPhase, thisPhase, completer) case _ => case _ => super.recheckBlock(block, pt) @@ -439,7 +492,7 @@ class CheckCaptures extends Recheck, SymTransformer: if !Synthetics.isExcluded(sym) then val saved = curEnv val localSet = capturedVars(sym) - if !localSet.isAlwaysEmpty then curEnv = Env(sym, localSet, isBoxed = false, curEnv) + if !localSet.isAlwaysEmpty then curEnv = Env(sym, nestedInOwner = false, localSet, isBoxed = false, curEnv) try super.recheckDefDef(tree, sym) finally interpolateVarsIn(tree.tpt) @@ -448,19 +501,25 @@ class CheckCaptures extends Recheck, SymTransformer: /** Class-specific capture set relations: * 1. The capture set of a class includes the capture sets of its parents. * 2. The capture set of the self type of a class includes the capture set of the class. - * 3. The capture set of the self type of a class includes the capture set of every class parameter. + * 3. The capture set of the self type of a class includes the capture set of every class parameter, + * unless the parameter is marked @constructorOnly. */ override def recheckClassDef(tree: TypeDef, impl: Template, cls: ClassSymbol)(using Context): Type = val saved = curEnv val localSet = capturedVars(cls) for parent <- impl.parents do // (1) checkSubset(capturedVars(parent.tpe.classSymbol), localSet, parent.srcPos) - if !localSet.isAlwaysEmpty then curEnv = Env(cls, localSet, isBoxed = false, curEnv) + if !localSet.isAlwaysEmpty then curEnv = Env(cls, nestedInOwner = false, localSet, isBoxed = false, curEnv) try val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") checkSubset(localSet, thisSet, tree.srcPos) // (2) for param <- cls.paramGetters do - checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) + if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then + checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) + for pureBase <- cls.pureBaseClass do + checkSubset(thisSet, + CaptureSet.empty.withDescription(i"of pure base class $pureBase"), + tree.srcPos) super.recheckClassDef(tree, impl, cls) finally curEnv = saved @@ -495,14 +554,20 @@ class CheckCaptures extends Recheck, SymTransformer: recheckFinish(result, arg, pt) */ - /** If expected type `pt` is boxed, don't propagate free variables. + /** If expected type `pt` is boxed and the tree is a function or a reference, + * don't propagate free variables. * Otherwise, if the result type is boxed, simulate an unboxing by * adding all references in the boxed capture set to the current environment. */ override def recheck(tree: Tree, pt: Type = WildcardType)(using Context): Type = if tree.isTerm && pt.isBoxedCapturing then val saved = curEnv - curEnv = Env(curEnv.owner, CaptureSet.Var(), isBoxed = true, curEnv) + + tree match + case _: RefTree | closureDef(_) => + curEnv = Env(curEnv.owner, nestedInOwner = false, CaptureSet.Var(), isBoxed = true, curEnv) + case _ => + try super.recheck(tree, pt) finally curEnv = saved else @@ -523,8 +588,6 @@ class CheckCaptures extends Recheck, SymTransformer: tpe case _: Try => tpe - case _: ValDef if tree.symbol.is(Mutable) => - tree.symbol.info case _ => NoType def checkNotUniversal(tp: Type): Unit = tp.widenDealias match @@ -593,31 +656,128 @@ class CheckCaptures extends Recheck, SymTransformer: /** Adapt function type `actual`, which is `aargs -> ares` (possibly with dependencies) * to `expected` type. + * It returns the adapted type along with the additionally captured variable + * during adaptation. * @param reconstruct how to rebuild the adapted function type */ def adaptFun(actual: Type, aargs: List[Type], ares: Type, expected: Type, - covariant: Boolean, - reconstruct: (List[Type], Type) => Type): Type = - val (eargs, eres) = expected.dealias match - case defn.FunctionOf(eargs, eres, _, _) => (eargs, eres) - case _ => (aargs.map(_ => WildcardType), WildcardType) - val aargs1 = aargs.zipWithConserve(eargs)(adapt(_, _, !covariant)) - val ares1 = adapt(ares, eres, covariant) - if (ares1 eq ares) && (aargs1 eq aargs) then actual - else reconstruct(aargs1, ares1) - - def adapt(actual: Type, expected: Type, covariant: Boolean): Type = actual.dealias match - case actual @ CapturingType(parent, refs) => - val parent1 = adapt(parent, expected, covariant) - if actual.isBoxed != expected.isBoxedCapturing then + covariant: Boolean, boxed: Boolean, + reconstruct: (List[Type], Type) => Type): (Type, CaptureSet) = + val saved = curEnv + curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) + + try + val (eargs, eres) = expected.dealias.stripCapturing match + case defn.FunctionOf(eargs, eres, _, _) => (eargs, eres) + case expected: MethodType => (expected.paramInfos, expected.resType) + case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(expected) => (rinfo.paramInfos, rinfo.resType) + case _ => (aargs.map(_ => WildcardType), WildcardType) + val aargs1 = aargs.zipWithConserve(eargs) { (aarg, earg) => adapt(aarg, earg, !covariant) } + val ares1 = adapt(ares, eres, covariant) + + val resTp = + if (ares1 eq ares) && (aargs1 eq aargs) then actual + else reconstruct(aargs1, ares1) + + (resTp, curEnv.captured) + finally + curEnv = saved + + /** Adapt type function type `actual` to the expected type. + * @see [[adaptFun]] + */ + def adaptTypeFun( + actual: Type, ares: Type, expected: Type, + covariant: Boolean, boxed: Boolean, + reconstruct: Type => Type): (Type, CaptureSet) = + val saved = curEnv + curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) + + try + val eres = expected.dealias.stripCapturing match + case RefinedType(_, _, rinfo: PolyType) => rinfo.resType + case expected: PolyType => expected.resType + case _ => WildcardType + + val ares1 = adapt(ares, eres, covariant) + + val resTp = + if ares1 eq ares then actual + else reconstruct(ares1) + + (resTp, curEnv.captured) + finally + curEnv = saved + end adaptTypeFun + + def adaptInfo(actual: Type, expected: Type, covariant: Boolean): String = + val arrow = if covariant then "~~>" else "<~~" + i"adapting $actual $arrow $expected" + + /** Destruct a capturing type `tp` to a tuple (cs, tp0, boxed), + * where `tp0` is not a capturing type. + * + * If `tp` is a nested capturing type, the return tuple always represents + * the innermost capturing type. The outer capture annotations can be + * reconstructed with the returned function. + */ + def destructCapturingType(tp: Type, reconstruct: Type => Type = x => x): ((Type, CaptureSet, Boolean), Type => Type) = + tp.dealias match + case tp @ CapturingType(parent, cs) => + if parent.dealias.isCapturingType then + destructCapturingType(parent, res => reconstruct(tp.derivedCapturingType(res, cs))) + else + ((parent, cs, tp.isBoxed), reconstruct) + case actual => + ((actual, CaptureSet(), false), reconstruct) + + def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { + if expected.isInstanceOf[WildcardType] then actual + else + val ((parent, cs, actualIsBoxed), recon) = destructCapturingType(actual) + + val needsAdaptation = actualIsBoxed != expected.isBoxedCapturing + val insertBox = needsAdaptation && covariant != actualIsBoxed + + val (parent1, cs1) = parent match { + case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => + val (parent1, leaked) = adaptFun(parent, args.init, args.last, expected, covariant, insertBox, + (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) + (parent1, leaked ++ cs) + case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(actual) => + // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) + val (parent1, leaked) = adaptFun(parent, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, + (aargs1, ares1) => + rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) + .toFunctionType(isJava = false, alwaysDependent = true)) + (parent1, leaked ++ cs) + case actual: MethodType => + val (parent1, leaked) = adaptFun(parent, actual.paramInfos, actual.resType, expected, covariant, insertBox, + (aargs1, ares1) => + actual.derivedLambdaType(paramInfos = aargs1, resType = ares1)) + (parent1, leaked ++ cs) + case actual @ RefinedType(p, nme, rinfo: PolyType) if defn.isFunctionOrPolyType(actual) => + val (parent1, leaked) = adaptTypeFun(parent, rinfo.resType, expected, covariant, insertBox, + ares1 => + val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) + val actual1 = actual.derivedRefinedType(p, nme, rinfo1) + actual1 + ) + (parent1, leaked ++ cs) + case _ => + (parent, cs) + } + + if needsAdaptation then val criticalSet = // the set which is not allowed to have `*` - if covariant then refs // can't box with `*` + if covariant then cs1 // can't box with `*` else expected.captureSet // can't unbox with `*` - if criticalSet.isUniversal then + if criticalSet.isUniversal && expected.isValueType then // We can't box/unbox the universal capability. Leave `actual` as it is // so we get an error in checkConforms. This tends to give better error // messages than disallowing the root capability in `criticalSet`. - capt.println(i"cannot box/unbox $actual vs $expected") + if ctx.settings.YccDebug.value then + println(i"cannot box/unbox $actual vs $expected") actual else // Disallow future addition of `*` to `criticalSet`. @@ -627,20 +787,12 @@ class CheckCaptures extends Recheck, SymTransformer: |since one of their capture sets contains the root capability `*`""", pos) } - if covariant == actual.isBoxed then markFree(refs, pos) - CapturingType(parent1, refs, boxed = !actual.isBoxed) + if !insertBox then // unboxing + markFree(criticalSet, pos) + recon(CapturingType(parent1, cs1, !actualIsBoxed)) else - actual.derivedCapturingType(parent1, refs) - case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => - adaptFun(actual, args.init, args.last, expected, covariant, - (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) - case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(actual) => - // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) - adaptFun(actual, rinfo.paramInfos, rinfo.resType, expected, covariant, - (aargs1, ares1) => - rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) - .toFunctionType(isJava = false, alwaysDependent = true)) - case _ => actual + recon(CapturingType(parent1, cs1, actualIsBoxed)) + } var actualw = actual.widenDealias actual match @@ -661,6 +813,7 @@ class CheckCaptures extends Recheck, SymTransformer: override def checkUnit(unit: CompilationUnit)(using Context): Unit = Setup(preRecheckPhase, thisPhase, recheckDef) .traverse(ctx.compilationUnit.tpdTree) + //println(i"SETUP:\n${Recheck.addRecheckedTypes.transform(ctx.compilationUnit.tpdTree)}") withCaptureSetsExplained { super.checkUnit(unit) checkSelfTypes(unit.tpdTree) @@ -697,13 +850,21 @@ class CheckCaptures extends Recheck, SymTransformer: cls => !parentTrees(cls).exists(ptree => parentTrees.contains(ptree.tpe.classSymbol)) } assert(roots.nonEmpty) - for root <- roots do - checkParents(root, parentTrees(root)) + for case root: ClassSymbol <- roots do + checkSelfAgainstParents(root, root.baseClasses) val selfType = root.asClass.classInfo.selfType interpolator(startingVariance = -1).traverse(selfType) if !root.isEffectivelySealed then + def matchesExplicitRefsInBaseClass(refs: CaptureSet, cls: ClassSymbol): Boolean = + cls.baseClasses.tail.exists { psym => + val selfType = psym.asClass.givenSelfType + selfType.exists && selfType.captureSet.elems == refs.elems + } selfType match - case CapturingType(_, refs: CaptureSet.Var) if !refs.isUniversal => + case CapturingType(_, refs: CaptureSet.Var) + if !refs.isUniversal && !matchesExplicitRefsInBaseClass(refs, root) => + // Forbid inferred self types unless they are already implied by an explicit + // self type in a parent. report.error( i"""$root needs an explicitly declared self type since its |inferred self type $selfType @@ -719,6 +880,7 @@ class CheckCaptures extends Recheck, SymTransformer: * - Check that externally visible `val`s or `def`s have empty capture sets. If not, * suggest an explicit type. This is so that separate compilation (where external * symbols have empty capture sets) gives the same results as joint compilation. + * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. */ def postCheck(unit: tpd.Tree)(using Context): Unit = unit.foreachSubTree { @@ -737,15 +899,23 @@ class CheckCaptures extends Recheck, SymTransformer: val isLocal = sym.owner.ownersIterator.exists(_.isTerm) || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) - - // The following classes of definitions need explicit capture types ... - if !isLocal // ... since external capture types are not inferred - || sym.owner.is(Trait) // ... since we do OverridingPairs checking before capture inference - || sym.allOverriddenSymbols.nonEmpty // ... since we do override checking before capture inference - then + def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly + sym.is(Private) // private symbols can always have inferred types + || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be + // too annoying. This is a hole since a defualt getter's result type + // might leak into a type variable. + || // non-local symbols cannot have inferred types since external capture types are not inferred + isLocal // local symbols still need explicit types if + && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference + def isNotPureThis(ref: CaptureRef) = ref match { + case ref: ThisType => !ref.cls.isPureClass + case _ => true + } + if !canUseInferred then val inferred = t.tpt.knownType def checkPure(tp: Type) = tp match - case CapturingType(_, refs) if !refs.elems.isEmpty => + case CapturingType(_, refs) + if !refs.elems.filter(isNotPureThis).isEmpty => val resultStr = if t.isInstanceOf[DefDef] then " result" else "" report.error( em"""Non-local $sym cannot have an inferred$resultStr type @@ -754,8 +924,27 @@ class CheckCaptures extends Recheck, SymTransformer: |The type needs to be declared explicitly.""", t.srcPos) case _ => inferred.foreachPart(checkPure, StopAt.Static) + case t @ TypeApply(fun, args) => + fun.knownType.widen match + case tl: PolyType => + val normArgs = args.lazyZip(tl.paramInfos).map { (arg, bounds) => + arg.withType(arg.knownType.forceBoxStatus( + bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) + } + checkBounds(normArgs, tl) + case _ => case _ => } - + if !ctx.reporter.errorsReported then + // We dont report errors here if previous errors were reported, because other + // errors often result in bad applied types, but flagging these bad types gives + // often worse error messages than the original errors. + val checkApplied = new TreeTraverser: + def traverse(t: Tree)(using Context) = t match + case tree: InferredTypeTree => + case tree: New => + case tree: TypeTree => checkAppliedTypesIn(tree.withKnownType) + case _ => traverseChildren(t) + checkApplied.traverse(unit) end CaptureChecker end CheckCaptures diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index a3e88699e424..95f2e71437a8 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -98,7 +98,10 @@ extends tpd.TreeTraverser: def addCaptureRefinements(tp: Type): Type = tp match case _: TypeRef | _: AppliedType if tp.typeParams.isEmpty => tp.typeSymbol match - case cls: ClassSymbol if !defn.isFunctionClass(cls) => + case cls: ClassSymbol + if !defn.isFunctionClass(cls) && !cls.is(JavaDefined) => + // We assume that Java classes can refer to capturing Scala types only indirectly, + // using type parameters. Hence, no need to refine them. cls.paramGetters.foldLeft(tp) { (core, getter) => if getter.termRef.isTracked then val getterType = tp.memberInfo(getter).strippedDealias @@ -117,14 +120,14 @@ extends tpd.TreeTraverser: case tp: (TypeRef | AppliedType) => val sym = tp.typeSymbol if sym.isClass then - tp.typeSymbol == defn.AnyClass + sym == defn.AnyClass // we assume Any is a shorthand of {*} Any, so if Any is an upper // bound, the type is taken to be impure. else superTypeIsImpure(tp.superType) case tp: (RefinedOrRecType | MatchType) => superTypeIsImpure(tp.underlying) case tp: AndType => - superTypeIsImpure(tp.tp1) || canHaveInferredCapture(tp.tp2) + superTypeIsImpure(tp.tp1) || needsVariable(tp.tp2) case tp: OrType => superTypeIsImpure(tp.tp1) && superTypeIsImpure(tp.tp2) case _ => @@ -132,23 +135,26 @@ extends tpd.TreeTraverser: }.showing(i"super type is impure $tp = $result", capt) /** Should a capture set variable be added on type `tp`? */ - def canHaveInferredCapture(tp: Type): Boolean = { + def needsVariable(tp: Type): Boolean = { tp.typeParams.isEmpty && tp.match case tp: (TypeRef | AppliedType) => val tp1 = tp.dealias - if tp1 ne tp then canHaveInferredCapture(tp1) + if tp1 ne tp then needsVariable(tp1) else val sym = tp1.typeSymbol - if sym.isClass then !sym.isValueClass && sym != defn.AnyClass + if sym.isClass then + !sym.isPureClass && sym != defn.AnyClass else superTypeIsImpure(tp1) case tp: (RefinedOrRecType | MatchType) => - canHaveInferredCapture(tp.underlying) + needsVariable(tp.underlying) case tp: AndType => - canHaveInferredCapture(tp.tp1) && canHaveInferredCapture(tp.tp2) + needsVariable(tp.tp1) && needsVariable(tp.tp2) case tp: OrType => - canHaveInferredCapture(tp.tp1) || canHaveInferredCapture(tp.tp2) - case CapturingType(_, refs) => - refs.isConst && !refs.isUniversal + needsVariable(tp.tp1) || needsVariable(tp.tp2) + case CapturingType(parent, refs) => + needsVariable(parent) + && refs.isConst // if refs is a variable, no need to add another + && !refs.isUniversal // if refs is {*}, an added variable would not change anything case _ => false }.showing(i"can have inferred capture $tp = $result", capt) @@ -181,7 +187,7 @@ extends tpd.TreeTraverser: CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) - case _ if canHaveInferredCapture(tp) => + case _ if needsVariable(tp) => val cs = tp.dealias match case CapturingType(_, refs) => CaptureSet.Var(refs.elems) case _ => CaptureSet.Var() @@ -331,11 +337,12 @@ extends tpd.TreeTraverser: else expandAbbreviations(tp1) /** Transform type of type tree, and remember the transformed type as the type the tree */ - private def transformTT(tree: TypeTree, boxed: Boolean)(using Context): Unit = - tree.rememberType( - if tree.isInstanceOf[InferredTypeTree] - then transformInferredType(tree.tpe, boxed) - else transformExplicitType(tree.tpe, boxed)) + private def transformTT(tree: TypeTree, boxed: Boolean, exact: Boolean)(using Context): Unit = + if !tree.hasRememberedType then + tree.rememberType( + if tree.isInstanceOf[InferredTypeTree] && !exact + then transformInferredType(tree.tpe, boxed) + else transformExplicitType(tree.tpe, boxed)) /** Substitute parameter symbols in `from` to paramRefs in corresponding * method or poly types `to`. We use a single BiTypeMap to do everything. @@ -376,20 +383,32 @@ extends tpd.TreeTraverser: def traverse(tree: Tree)(using Context): Unit = tree match - case tree: DefDef if isExcluded(tree.symbol) => - return - case tree @ ValDef(_, tpt: TypeTree, _) if tree.symbol.is(Mutable) => - transformTT(tpt, boxed = true) // types of mutable variables are boxed + case tree: DefDef => + if isExcluded(tree.symbol) then + return + tree.tpt match + case tpt: TypeTree if tree.symbol.allOverriddenSymbols.hasNext => + tree.paramss.foreach(traverse) + transformTT(tpt, boxed = false, exact = true) + traverse(tree.rhs) + //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") + case _ => + traverseChildren(tree) + case tree @ ValDef(_, tpt: TypeTree, _) => + transformTT(tpt, + boxed = tree.symbol.is(Mutable), // types of mutable variables are boxed + exact = tree.symbol.allOverriddenSymbols.hasNext // types of symbols that override a parent don't get a capture set + ) traverse(tree.rhs) case tree @ TypeApply(fn, args) => traverse(fn) for case arg: TypeTree <- args do - transformTT(arg, boxed = true) // type arguments in type applications are boxed + transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed case _ => traverseChildren(tree) tree match case tree: TypeTree => - transformTT(tree, boxed = false) // other types are not boxed + transformTT(tree, boxed = false, exact = false) // other types are not boxed case tree: ValOrDefDef => val sym = tree.symbol diff --git a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala index e8f7fd502baa..dacbd27e0f35 100644 --- a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala +++ b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala @@ -31,10 +31,12 @@ object Synthetics: * The types of these symbols are transformed in a special way without * looking at the definitions's RHS */ - def needsTransform(sym: SymDenotation)(using Context): Boolean = - isSyntheticCopyMethod(sym) - || isSyntheticCompanionMethod(sym, nme.apply, nme.unapply) - || isSyntheticCopyDefaultGetterMethod(sym) + def needsTransform(symd: SymDenotation)(using Context): Boolean = + isSyntheticCopyMethod(symd) + || isSyntheticCompanionMethod(symd, nme.apply, nme.unapply) + || isSyntheticCopyDefaultGetterMethod(symd) + || (symd.symbol eq defn.Object_eq) + || (symd.symbol eq defn.Object_ne) /** Method is excluded from regular capture checking. * Excluded are synthetic class members @@ -141,13 +143,16 @@ object Synthetics: /** Drop added capture information from the type of an `unapply` */ private def dropUnapplyCaptures(info: Type)(using Context): Type = info match case info: MethodType => - val CapturingType(oldParamInfo, _) :: Nil = info.paramInfos: @unchecked - def oldResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = oldResult(tp.resType)) - case CapturingType(tp, _) => - tp - info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) + info.paramInfos match + case CapturingType(oldParamInfo, _) :: Nil => + def oldResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = oldResult(tp.resType)) + case CapturingType(tp, _) => + tp + info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) + case _ => + info case info: PolyType => info.derivedLambdaType(resType = dropUnapplyCaptures(info.resType)) @@ -163,7 +168,9 @@ object Synthetics: sym.copySymDenotation(info = addUnapplyCaptures(sym.info)) case nme.apply | nme.copy => sym.copySymDenotation(info = addCaptureDeps(sym.info)) - + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = + MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType)) /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method * of a case class, transform it back to what it was before the CC phase. @@ -176,5 +183,7 @@ object Synthetics: sym.copySymDenotation(info = dropUnapplyCaptures(sym.info)) case nme.apply | nme.copy => sym.copySymDenotation(info = dropCaptureDeps(sym.info)) + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = defn.methOfAnyRef(defn.BooleanType)) end Synthetics \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 1b0fea9184d1..cbd50429492e 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -22,6 +22,11 @@ object Config { */ inline val checkConstraintsNonCyclic = false + /** Check that reverse dependencies in constraints are correct and complete. + * Can also be enabled using -Ycheck-constraint-deps. + */ + inline val checkConstraintDeps = false + /** Check that each constraint resulting from a subtype test * is satisfiable. Also check that a type variable instantiation * satisfies its constraints. @@ -184,6 +189,9 @@ object Config { /** If set, prints a trace of all symbol completions */ inline val showCompletions = false + /** If set, show variable/variable reverse dependencies when printing constraints. */ + inline val showConstraintDeps = true + /** If set, method results that are context functions are flattened by adding * the parameters of the context function results to the methods themselves. * This is an optimization that reduces closure allocations. @@ -240,7 +248,7 @@ object Config { */ inline val printCaptureSetsAsPrefix = true - /** If true, allow mappping capture set variables under -Ycc with maps that are neither + /** If true, allow mappping capture set variables under captureChecking with maps that are neither * bijective nor idempotent. We currently do now know how to do this correctly in all * cases, though. */ diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 4a87f5b4a537..e7117f542384 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -28,6 +28,10 @@ object Feature: val symbolLiterals = deprecated("symbolLiterals") val fewerBraces = experimental("fewerBraces") val saferExceptions = experimental("saferExceptions") + val pureFunctions = experimental("pureFunctions") + val captureChecking = experimental("captureChecking") + + val globalOnlyImports: Set[TermName] = Set(pureFunctions, captureChecking) /** Is `feature` enabled by by a command-line setting? The enabling setting is * @@ -75,6 +79,28 @@ object Feature: def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) + /** Is pureFunctions enabled for this compilation unit? */ + def pureFunsEnabled(using Context) = + enabledBySetting(pureFunctions) + || ctx.compilationUnit.knowsPureFuns + || ccEnabled + + /** Is captureChecking enabled for this compilation unit? */ + def ccEnabled(using Context) = + enabledBySetting(captureChecking) + || ctx.compilationUnit.needsCaptureChecking + + /** Is pureFunctions enabled for any of the currently compiled compilation units? */ + def pureFunsEnabledSomewhere(using Context) = + enabledBySetting(pureFunctions) + || ctx.run != null && ctx.run.nn.pureFunsImportEncountered + || ccEnabledSomewhere + + /** Is captureChecking enabled for any of the currently compiled compilation units? */ + def ccEnabledSomewhere(using Context) = + enabledBySetting(captureChecking) + || ctx.run != null && ctx.run.nn.ccImportEncountered + def sourceVersionSetting(using Context): SourceVersion = SourceVersion.valueOf(ctx.settings.source.value) @@ -83,7 +109,11 @@ object Feature: case Some(v) => v case none => sourceVersionSetting - def migrateTo3(using Context): Boolean = sourceVersion == `3.0-migration` + def migrateTo3(using Context): Boolean = + sourceVersion == `3.0-migration` + + def fewerBracesEnabled(using Context) = + sourceVersion.isAtLeast(`3.3`) || enabled(fewerBraces) /** If current source migrates to `version`, issue given warning message * and return `true`, otherwise return `false`. @@ -121,4 +151,21 @@ object Feature: def isExperimentalEnabled(using Context): Boolean = Properties.experimental && !ctx.settings.YnoExperimental.value + /** Handle language import `import language..` if it is one + * of the global imports `pureFunctions` or `captureChecking`. In this case + * make the compilation unit's and current run's fields accordingly. + * @return true iff import that was handled + */ + def handleGlobalLanguageImport(prefix: TermName, imported: Name)(using Context): Boolean = + val fullFeatureName = QualifiedName(prefix, imported.asTermName) + if fullFeatureName == pureFunctions then + ctx.compilationUnit.knowsPureFuns = true + if ctx.run != null then ctx.run.nn.pureFunsImportEncountered = true + true + else if fullFeatureName == captureChecking then + ctx.compilationUnit.needsCaptureChecking = true + if ctx.run != null then ctx.run.nn.ccImportEncountered = true + true + else + false end Feature diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 9e34f8d726b5..f7743dddda4e 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -17,7 +17,7 @@ class ScalaSettings extends SettingGroup with AllScalaSettings object ScalaSettings: // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` private val minTargetVersion = 8 - private val maxTargetVersion = 19 + private val maxTargetVersion = 20 def supportedTargetVersions: List[String] = (minTargetVersion to maxTargetVersion).toList.map(_.toString) @@ -64,7 +64,6 @@ trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSetti val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions.") val indent: Setting[Boolean] = BooleanSetting("-indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") val noindent: Setting[Boolean] = BooleanSetting("-no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) - val YindentColons: Setting[Boolean] = BooleanSetting("-Yindent-colons", "(disabled: use -language:experimental.fewerBraces instead)") /* Decompiler settings */ val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) @@ -149,6 +148,8 @@ private sealed trait VerboseSettings: val Vprofile: Setting[Boolean] = BooleanSetting("-Vprofile", "Show metrics about sources and internal representations to estimate compile-time complexity.") val VprofileSortedBy = ChoiceSetting("-Vprofile-sorted-by", "key", "Show metrics about sources and internal representations sorted by given column name", List("name", "path", "lines", "tokens", "tasty", "complexity"), "") val VprofileDetails = IntSetting("-Vprofile-details", "Show metrics about sources and internal representations of the most complex methods", 0) + val VreplMaxPrintElements: Setting[Int] = IntSetting("-Vrepl-max-print-elements", "Number of elements to be printed before output is truncated.", 1000) + val VreplMaxPrintCharacters: Setting[Int] = IntSetting("-Vrepl-max-print-characters", "Number of characters to be printed before output is truncated.", 50000) /** -W "Warnings" settings */ @@ -307,6 +308,7 @@ private sealed trait YSettings: val YforceSbtPhases: Setting[Boolean] = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") val YdumpSbtInc: Setting[Boolean] = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") val YcheckAllPatmat: Setting[Boolean] = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") + val YcheckConstraintDeps: Setting[Boolean] = BooleanSetting("-Ycheck-constraint-deps", "Check dependency tracking in constraints (used for testing the algorithm).") val YretainTrees: Setting[Boolean] = BooleanSetting("-Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") val YshowTreeIds: Setting[Boolean] = BooleanSetting("-Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty") @@ -326,9 +328,9 @@ private sealed trait YSettings: val YcheckInit: Setting[Boolean] = BooleanSetting("-Ysafe-init", "Ensure safe initialization of objects") val YrequireTargetName: Setting[Boolean] = BooleanSetting("-Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation") val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only)") - val Ycc: Setting[Boolean] = BooleanSetting("-Ycc", "Check captured references (warning: extremely experimental and unstable)") - val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with -Ycc, debug info for captured references") - val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with -Ycc, suppress type abbreviations") + val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references") + val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with captureChecking language import, suppress type abbreviations") + val YlightweightLazyVals: Setting[Boolean] = BooleanSetting("-Ylightweight-lazy-vals", "Use experimental lightweight implementation of lazy vals") /** Area-specific debug output */ val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 545e2f2d9b42..4b9b1b247856 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -8,6 +8,7 @@ import util.Property enum SourceVersion: case `3.0-migration`, `3.0`, `3.1` // Note: do not add `3.1-migration` here, 3.1 is the same language as 3.0. case `3.2-migration`, `3.2` + case `3.3-migration`, `3.3` case `future-migration`, `future` val isMigrating: Boolean = toString.endsWith("-migration") @@ -18,7 +19,7 @@ enum SourceVersion: def isAtLeast(v: SourceVersion) = stable.ordinal >= v.ordinal object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.2` + def defaultSourceVersion = `3.3` /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index 05210ec60811..aa8ead280bbf 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -1,4 +1,5 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package core import Symbols._, Types._, Contexts._, Constants._ @@ -19,6 +20,8 @@ object Annotations { def symbol(using Context): Symbol = annotClass(tree) + def hasSymbol(sym: Symbol)(using Context) = symbol == sym + def matches(cls: Symbol)(using Context): Boolean = symbol.derivesFrom(cls) def appliesToModule: Boolean = true // for now; see remark in SymDenotations @@ -58,7 +61,7 @@ object Annotations { if tm.isRange(x) then x else val tp1 = tm(tree.tpe) - foldOver(if tp1 =:= tree.tpe then x else tp1, tree) + foldOver(if tp1 frozen_=:= tree.tpe then x else tp1, tree) val diff = findDiff(NoType, args) if tm.isRange(diff) then EmptyAnnotation else if diff.exists then derivedAnnotation(tm.mapOver(tree)) @@ -69,7 +72,7 @@ object Annotations { val args = arguments if args.isEmpty then false else tree.existsSubTree { - case id: Ident => id.tpe match + case id: Ident => id.tpe.stripped match case TermParamRef(tl1, _) => tl eq tl1 case _ => false case _ => false @@ -126,6 +129,11 @@ object Annotations { override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] } + class DeferredSymAndTree(symFn: Context ?=> Symbol, treeFn: Context ?=> Tree) + extends LazyAnnotation: + protected var mySym: Symbol | (Context ?=> Symbol) | Null = ctx ?=> symFn(using ctx) + protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> treeFn(using ctx) + /** An annotation indicating the body of a right-hand side, * typically of an inline method. Treated specially in * pickling/unpickling and TypeTreeMaps @@ -192,18 +200,15 @@ object Annotations { apply(New(atp, args)) /** Create an annotation where the tree is computed lazily. */ - def deferred(sym: Symbol)(treeFn: Context ?=> Tree)(using Context): Annotation = + def deferred(sym: Symbol)(treeFn: Context ?=> Tree): Annotation = new LazyAnnotation { protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> treeFn(using ctx) protected var mySym: Symbol | (Context ?=> Symbol) | Null = sym } /** Create an annotation where the symbol and the tree are computed lazily. */ - def deferredSymAndTree(symFn: Context ?=> Symbol)(treeFn: Context ?=> Tree)(using Context): Annotation = - new LazyAnnotation { - protected var mySym: Symbol | (Context ?=> Symbol) | Null = ctx ?=> symFn(using ctx) - protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> treeFn(using ctx) - } + def deferredSymAndTree(symFn: Context ?=> Symbol)(treeFn: Context ?=> Tree): Annotation = + DeferredSymAndTree(symFn, treeFn) /** Extractor for child annotations */ object Child { diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala index 4b441d512dec..a61701eee2d7 100644 --- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -149,7 +149,7 @@ class CheckRealizable(using Context) { */ private def boundsRealizability(tp: Type) = { - val memberProblems = withMode(Mode.CheckBounds) { + val memberProblems = withMode(Mode.CheckBoundsOrSelfType) { for { mbr <- tp.nonClassTypeMembers if !(mbr.info.loBound <:< mbr.info.hiBound) @@ -157,7 +157,7 @@ class CheckRealizable(using Context) { yield new HasProblemBounds(mbr.name, mbr.info) } - val refinementProblems = withMode(Mode.CheckBounds) { + val refinementProblems = withMode(Mode.CheckBoundsOrSelfType) { for { name <- refinedNames(tp) if (name.isTypeName) diff --git a/compiler/src/dotty/tools/dotc/core/Constraint.scala b/compiler/src/dotty/tools/dotc/core/Constraint.scala index 07b6e71cdcc9..fb87aed77c41 100644 --- a/compiler/src/dotty/tools/dotc/core/Constraint.scala +++ b/compiler/src/dotty/tools/dotc/core/Constraint.scala @@ -4,6 +4,7 @@ package core import Types._, Contexts._ import printing.Showable +import util.{SimpleIdentitySet, SimpleIdentityMap} /** Constraint over undetermined type parameters. Constraints are built * over values of the following types: @@ -128,7 +129,7 @@ abstract class Constraint extends Showable { /** Is `tv` marked as hard in the constraint? */ def isHard(tv: TypeVar): Boolean - + /** The same as this constraint, but with `tv` marked as hard. */ def withHard(tv: TypeVar)(using Context): This @@ -165,15 +166,32 @@ abstract class Constraint extends Showable { */ def hasConflictingTypeVarsFor(tl: TypeLambda, that: Constraint): Boolean - /** Check that no constrained parameter contains itself as a bound */ - def checkNonCyclic()(using Context): this.type - /** Does `param` occur at the toplevel in `tp` ? * Toplevel means: the type itself or a factor in some * combination of `&` or `|` types. */ def occursAtToplevel(param: TypeParamRef, tp: Type)(using Context): Boolean + /** A string that shows the reverse dependencies maintained by this constraint + * (coDeps and contraDeps for OrderingConstraints). + */ + def depsToString(using Context): String + + /** Does the constraint restricted to variables outside `except` depend on `tv` + * in the given direction `co`? + * @param `co` If true, test whether the constraint would change if the variable is made larger + * otherwise, test whether the constraint would change if the variable is made smaller. + */ + def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean + + /** Depending on Config settngs: + * - Under `checkConstraintsNonCyclic`, check that no constrained + * parameter contains itself as a bound. + * - Under `checkConstraintDeps`, check hat reverse dependencies in + * constraints are correct and complete. + */ + def checkWellFormed()(using Context): this.type + /** Check that constraint only refers to TypeParamRefs bound by itself */ def checkClosed()(using Context): Unit diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 1dfa04822766..4ed01a5fbe0d 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -58,6 +58,12 @@ trait ConstraintHandling { */ protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty + /** Used for match type reduction: If false, we don't recognize an abstract type + * to be a subtype type of any of its base classes. This is in place only at the + * toplevel; it is turned on again when we add parts of the scrutinee to the constraint. + */ + protected var canWidenAbstract: Boolean = true + protected var myNecessaryConstraintsOnly = false /** When collecting the constraints needed for a particular subtyping * judgment to be true, we sometimes need to approximate the constraint @@ -550,6 +556,13 @@ trait ConstraintHandling { inst end approximation + private def isTransparent(tp: Type, traitOnly: Boolean)(using Context): Boolean = tp match + case AndType(tp1, tp2) => + isTransparent(tp1, traitOnly) && isTransparent(tp2, traitOnly) + case _ => + val cls = tp.underlyingClassRef(refinementOK = false).typeSymbol + cls.isTransparentClass && (!traitOnly || cls.is(Trait)) + /** If `tp` is an intersection such that some operands are transparent trait instances * and others are not, replace as many transparent trait instances as possible with Any * as long as the result is still a subtype of `bound`. But fall back to the @@ -562,18 +575,17 @@ trait ConstraintHandling { var dropped: List[Type] = List() // the types dropped so far, last one on top def dropOneTransparentTrait(tp: Type): Type = - val tpd = tp.dealias - if tpd.typeSymbol.isTransparentTrait && !tpd.isLambdaSub && !kept.contains(tpd) then - dropped = tpd :: dropped + if isTransparent(tp, traitOnly = true) && !kept.contains(tp) then + dropped = tp :: dropped defn.AnyType - else tpd match + else tp match case AndType(tp1, tp2) => val tp1w = dropOneTransparentTrait(tp1) if tp1w ne tp1 then tp1w & tp2 else val tp2w = dropOneTransparentTrait(tp2) if tp2w ne tp2 then tp1 & tp2w - else tpd + else tp case _ => tp @@ -648,7 +660,16 @@ trait ConstraintHandling { val wideInst = if isSingleton(bound) then inst - else dropTransparentTraits(widenIrreducible(widenOr(widenSingle(inst))), bound) + else + val widenedFromSingle = widenSingle(inst) + val widenedFromUnion = widenOr(widenedFromSingle) + val widened = + if (widenedFromUnion ne widenedFromSingle) && isTransparent(widenedFromUnion, traitOnly = false) then + widenedFromSingle + else + dropTransparentTraits(widenedFromUnion, bound) + widenIrreducible(widened) + wideInst match case wideInst: TypeRef if wideInst.symbol.is(Module) => TermRef(wideInst.prefix, wideInst.symbol.sourceModule) @@ -839,13 +860,17 @@ trait ConstraintHandling { //checkPropagated(s"adding $description")(true) // DEBUG in case following fails checkPropagated(s"added $description") { addConstraintInvocations += 1 + val saved = canWidenAbstract + canWidenAbstract = true try bound match case bound: TypeParamRef if constraint contains bound => addParamBound(bound) case _ => val pbound = avoidLambdaParams(bound) kindCompatible(param, pbound) && addBoundTransitively(param, pbound, !fromBelow) - finally addConstraintInvocations -= 1 + finally + canWidenAbstract = saved + addConstraintInvocations -= 1 } end addConstraint diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala index c1b23888b491..d2b1246a8149 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala @@ -12,9 +12,12 @@ trait ConstraintRunInfo { self: Run => maxSize = size maxConstraint = c } - def printMaxConstraint()(using Context): Unit = { - val printer = if (ctx.settings.YdetailedStats.value) default else typr - if (maxSize > 0) printer.println(s"max constraint = ${maxConstraint.nn.show}") - } + def printMaxConstraint()(using Context): Unit = + if maxSize > 0 then + val printer = if ctx.settings.YdetailedStats.value then default else typr + printer.println(s"max constraint size: $maxSize") + try printer.println(s"max constraint = ${maxConstraint.nn.show}") + catch case ex: StackOverflowError => printer.println("max constraint cannot be printed due to stack overflow") + protected def reset(): Unit = maxConstraint = null } diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 919598c41d6e..a6c1a24ebf96 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -156,9 +156,9 @@ object Contexts { final def owner: Symbol = _owner /** The current tree */ - private var _tree: Tree[? >: Untyped]= _ - protected def tree_=(tree: Tree[? >: Untyped]): Unit = _tree = tree - final def tree: Tree[? >: Untyped] = _tree + private var _tree: Tree[?]= _ + protected def tree_=(tree: Tree[?]): Unit = _tree = tree + final def tree: Tree[?] = _tree /** The current scope */ private var _scope: Scope = _ @@ -469,7 +469,7 @@ object Contexts { } /** The context of expression `expr` seen as a member of a statement sequence */ - def exprContext(stat: Tree[? >: Untyped], exprOwner: Symbol): Context = + def exprContext(stat: Tree[?], exprOwner: Symbol): Context = if (exprOwner == this.owner) this else if (untpd.isSuperConstrCall(stat) && this.owner.isClass) superCallContext else fresh.setOwner(exprOwner) @@ -592,7 +592,7 @@ object Contexts { assert(owner != NoSymbol) this.owner = owner this - def setTree(tree: Tree[? >: Untyped]): this.type = + def setTree(tree: Tree[?]): this.type = util.Stats.record("Context.setTree") this.tree = tree this @@ -814,7 +814,7 @@ object Contexts { .updated(notNullInfosLoc, Nil) .updated(compilationUnitLoc, NoCompilationUnit) searchHistory = new SearchRoot - gadt = EmptyGadtConstraint + gadt = GadtConstraint.empty } @sharable object NoContext extends Context((null: ContextBase | Null).uncheckedNN) { diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index 59440d1cb965..54faf9a41177 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -9,6 +9,7 @@ import scala.util.control.NonFatal import Contexts._, Names._, Phases._, Symbols._ import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._ import transform.MegaPhase +import reporting.{Message, NoExplanation} /** This object provides useful implicit decorators for types defined elsewhere */ object Decorators { @@ -57,6 +58,9 @@ object Decorators { padding + s.replace("\n", "\n" + padding) end extension + extension (str: => String) + def toMessage: Message = reporting.NoExplanation(str) + /** Implements a findSymbol method on iterators of Symbols that * works like find but avoids Option, replacing None with NoSymbol. */ @@ -270,6 +274,9 @@ object Decorators { s"[cannot display due to $msg, raw string = $x]" case _ => String.valueOf(x).nn + /** Returns the simple class name of `x`. */ + def className: String = getClass.getSimpleName.nn + extension [T](x: T) def assertingErrorsReported(using Context): T = { assert(ctx.reporter.errorsReported) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 83d945352321..b43857b7d28c 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -14,6 +14,7 @@ import typer.ImportInfo.RootRef import Comments.CommentsContext import Comments.Comment import util.Spans.NoSpan +import config.Feature import Symbols.requiredModuleRef import cc.{CapturingType, CaptureSet, EventuallyCapturingType} @@ -469,7 +470,6 @@ class Definitions { @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) - @tu lazy val captureRoot: TermSymbol = enterPermanentSymbol(nme.CAPTURE_ROOT, AnyType).asTerm /** Method representing a throw */ @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, @@ -943,26 +943,32 @@ class Definitions { @tu lazy val RuntimeTuplesModule: Symbol = requiredModule("scala.runtime.Tuples") @tu lazy val RuntimeTuplesModuleClass: Symbol = RuntimeTuplesModule.moduleClass - lazy val RuntimeTuples_consIterator: Symbol = RuntimeTuplesModule.requiredMethod("consIterator") - lazy val RuntimeTuples_concatIterator: Symbol = RuntimeTuplesModule.requiredMethod("concatIterator") - lazy val RuntimeTuples_apply: Symbol = RuntimeTuplesModule.requiredMethod("apply") - lazy val RuntimeTuples_cons: Symbol = RuntimeTuplesModule.requiredMethod("cons") - lazy val RuntimeTuples_size: Symbol = RuntimeTuplesModule.requiredMethod("size") - lazy val RuntimeTuples_tail: Symbol = RuntimeTuplesModule.requiredMethod("tail") - lazy val RuntimeTuples_concat: Symbol = RuntimeTuplesModule.requiredMethod("concat") - lazy val RuntimeTuples_toArray: Symbol = RuntimeTuplesModule.requiredMethod("toArray") - lazy val RuntimeTuples_productToArray: Symbol = RuntimeTuplesModule.requiredMethod("productToArray") - lazy val RuntimeTuples_isInstanceOfTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfTuple") - lazy val RuntimeTuples_isInstanceOfEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfEmptyTuple") - lazy val RuntimeTuples_isInstanceOfNonEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfNonEmptyTuple") + @tu lazy val RuntimeTuples_consIterator: Symbol = RuntimeTuplesModule.requiredMethod("consIterator") + @tu lazy val RuntimeTuples_concatIterator: Symbol = RuntimeTuplesModule.requiredMethod("concatIterator") + @tu lazy val RuntimeTuples_apply: Symbol = RuntimeTuplesModule.requiredMethod("apply") + @tu lazy val RuntimeTuples_cons: Symbol = RuntimeTuplesModule.requiredMethod("cons") + @tu lazy val RuntimeTuples_size: Symbol = RuntimeTuplesModule.requiredMethod("size") + @tu lazy val RuntimeTuples_tail: Symbol = RuntimeTuplesModule.requiredMethod("tail") + @tu lazy val RuntimeTuples_concat: Symbol = RuntimeTuplesModule.requiredMethod("concat") + @tu lazy val RuntimeTuples_toArray: Symbol = RuntimeTuplesModule.requiredMethod("toArray") + @tu lazy val RuntimeTuples_productToArray: Symbol = RuntimeTuplesModule.requiredMethod("productToArray") + @tu lazy val RuntimeTuples_isInstanceOfTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfTuple") + @tu lazy val RuntimeTuples_isInstanceOfEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfEmptyTuple") + @tu lazy val RuntimeTuples_isInstanceOfNonEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfNonEmptyTuple") @tu lazy val TupledFunctionTypeRef: TypeRef = requiredClassRef("scala.util.TupledFunction") def TupledFunctionClass(using Context): ClassSymbol = TupledFunctionTypeRef.symbol.asClass def RuntimeTupleFunctionsModule(using Context): Symbol = requiredModule("scala.runtime.TupledFunctions") + @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") + @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("*") + @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") + @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") + @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") + @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") + // Annotation base classes @tu lazy val AnnotationClass: ClassSymbol = requiredClass("scala.annotation.Annotation") - @tu lazy val ClassfileAnnotationClass: ClassSymbol = requiredClass("scala.annotation.ClassfileAnnotation") @tu lazy val StaticAnnotationClass: ClassSymbol = requiredClass("scala.annotation.StaticAnnotation") @tu lazy val RefiningAnnotationClass: ClassSymbol = requiredClass("scala.annotation.RefiningAnnotation") @@ -972,7 +978,6 @@ class Definitions { @tu lazy val BooleanBeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BooleanBeanProperty") @tu lazy val BodyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Body") @tu lazy val CapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.capability") - @tu lazy val CaptureCheckedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.CaptureChecked") @tu lazy val ChildAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Child") @tu lazy val ContextResultCountAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ContextResultCount") @tu lazy val ProvisionalSuperClassAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ProvisionalSuperClass") @@ -984,6 +989,7 @@ class Definitions { @tu lazy val ErasedParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ErasedParam") @tu lazy val InvariantBetweenAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InvariantBetween") @tu lazy val MainAnnot: ClassSymbol = requiredClass("scala.main") + @tu lazy val MappedAlternativeAnnot: ClassSymbol = requiredClass("scala.annotation.internal.MappedAlternative") @tu lazy val MigrationAnnot: ClassSymbol = requiredClass("scala.annotation.migration") @tu lazy val NowarnAnnot: ClassSymbol = requiredClass("scala.annotation.nowarn") @tu lazy val TransparentTraitAnnot: ClassSymbol = requiredClass("scala.annotation.transparentTrait") @@ -1007,6 +1013,7 @@ class Definitions { @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") + @tu lazy val WithPureFunsAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WithPureFuns") @tu lazy val FieldMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.field") @tu lazy val GetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.getter") @tu lazy val ParamMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.param") @@ -1154,7 +1161,7 @@ class Definitions { /** Extractor for context function types representing by-name parameters, of the form * `() ?=> T`. - * Under -Ycc, this becomes `() ?-> T` or `{r1, ..., rN} () ?-> T`. + * Under purefunctions, this becomes `() ?-> T` or `{r1, ..., rN} () ?-> T`. */ object ByNameFunction: def apply(tp: Type)(using Context): Type = tp match @@ -1341,6 +1348,15 @@ class Definitions { @tu lazy val untestableClasses: Set[Symbol] = Set(NothingClass, NullClass, SingletonClass) + /** Base classes that are assumed to be pure for the purposes of capture checking. + * Every class inheriting from a pure baseclass is pure. + */ + @tu lazy val pureBaseClasses = Set(defn.AnyValClass, defn.ThrowableClass) + + /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, + */ + @tu lazy val pureSimpleClasses = Set(StringClass, NothingClass, NullClass) + @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0).asInstanceOf[Array[TypeRef]] val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) def AbstractFunctionClass(n: Int)(using Context): Symbol = AbstractFunctionClassPerRun()(using ctx)(n) @@ -1821,20 +1837,53 @@ class Definitions { def isInfix(sym: Symbol)(using Context): Boolean = (sym eq Object_eq) || (sym eq Object_ne) - @tu lazy val assumedTransparentTraits = - Set[Symbol](ComparableClass, ProductClass, SerializableClass, - // add these for now, until we had a chance to retrofit 2.13 stdlib - // we should do a more through sweep through it then. - requiredClass("scala.collection.SortedOps"), - requiredClass("scala.collection.StrictOptimizedSortedSetOps"), - requiredClass("scala.collection.generic.DefaultSerializable"), - requiredClass("scala.collection.generic.IsIterable"), - requiredClass("scala.collection.generic.IsIterableOnce"), - requiredClass("scala.collection.generic.IsMap"), - requiredClass("scala.collection.generic.IsSeq"), - requiredClass("scala.collection.generic.Subtractable"), - requiredClass("scala.collection.immutable.StrictOptimizedSeqOps") - ) + @tu lazy val assumedTransparentNames: Map[Name, Set[Symbol]] = + // add these for now, until we had a chance to retrofit 2.13 stdlib + // we should do a more through sweep through it then. + val strs = Map( + "Any" -> Set("scala"), + "AnyVal" -> Set("scala"), + "Matchable" -> Set("scala"), + "Product" -> Set("scala"), + "Object" -> Set("java.lang"), + "Comparable" -> Set("java.lang"), + "Serializable" -> Set("java.io"), + "BitSetOps" -> Set("scala.collection"), + "IndexedSeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "IterableOnceOps" -> Set("scala.collection"), + "IterableOps" -> Set("scala.collection"), + "LinearSeqOps" -> Set("scala.collection", "scala.collection.immutable"), + "MapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SortedMapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SortedOps" -> Set("scala.collection"), + "SortedSetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "StrictOptimizedIterableOps" -> Set("scala.collection"), + "StrictOptimizedLinearSeqOps" -> Set("scala.collection"), + "StrictOptimizedMapOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSeqOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSetOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSortedMapOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSortedSetOps" -> Set("scala.collection", "scala.collection.immutable"), + "ArrayDequeOps" -> Set("scala.collection.mutable"), + "DefaultSerializable" -> Set("scala.collection.generic"), + "IsIterable" -> Set("scala.collection.generic"), + "IsIterableLowPriority" -> Set("scala.collection.generic"), + "IsIterableOnce" -> Set("scala.collection.generic"), + "IsIterableOnceLowPriority" -> Set("scala.collection.generic"), + "IsMap" -> Set("scala.collection.generic"), + "IsSeq" -> Set("scala.collection.generic")) + strs.map { case (simple, pkgs) => ( + simple.toTypeName, + pkgs.map(pkg => staticRef(pkg.toTermName, isPackage = true).symbol.moduleClass) + ) + } + + def isAssumedTransparent(sym: Symbol): Boolean = + assumedTransparentNames.get(sym.name) match + case Some(pkgs) => pkgs.contains(sym.owner) + case none => false // ----- primitive value class machinery ------------------------------------------ @@ -1977,14 +2026,19 @@ class Definitions { this.initCtx = ctx if (!isInitialized) { // force initialization of every symbol that is synthesized or hijacked by the compiler - val forced = syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() - ++ (JavaEnumClass :: (if ctx.settings.Ycc.value then captureRoot :: Nil else Nil)) - + val forced = + syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() :+ JavaEnumClass isInitialized = true } addSyntheticSymbolsComments } + /** Definitions used in Lazy Vals implementation */ + val LazyValsModuleName = "scala.runtime.LazyVals" + @tu lazy val LazyValsModule = requiredModule(LazyValsModuleName) + @tu lazy val LazyValsWaitingState = requiredClass(s"$LazyValsModuleName.Waiting") + @tu lazy val LazyValsControlState = requiredClass(s"$LazyValsModuleName.LazyValControlState") + def addSyntheticSymbolsComments(using Context): Unit = def add(sym: Symbol, doc: String) = ctx.docCtx.foreach(_.addDocstring(sym, Some(Comment(NoSpan, doc)))) diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 72428d02f5d3..f23dce020f10 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -350,14 +350,14 @@ object Flags { /** Symbol is a method which should be marked ACC_SYNCHRONIZED */ val (_, Synchronized @ _, _) = newFlags(36, "") - /** Symbol is a Java-style varargs method */ - val (_, JavaVarargs @ _, _) = newFlags(37, "") + /** Symbol is a Java-style varargs method / a Java annotation */ + val (_, JavaVarargs @ _, JavaAnnotation @ _) = newFlags(37, "", "") /** Symbol is a Java default method */ val (_, DefaultMethod @ _, _) = newFlags(38, "") /** Symbol is a transparent inline method or trait */ - val (Transparent @ _, _, _) = newFlags(39, "transparent") + val (Transparent @ _, _, TransparentType @ _) = newFlags(39, "transparent") /** Symbol is an enum class or enum case (if used with case) */ val (Enum @ _, EnumVal @ _, _) = newFlags(40, "enum") @@ -477,7 +477,7 @@ object Flags { */ val AfterLoadFlags: FlagSet = commonFlags( FromStartFlags, AccessFlags, Final, AccessorOrSealed, - Abstract, LazyOrTrait, SelfName, JavaDefined, Transparent) + Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent) /** A value that's unstable unless complemented with a Stable flag */ val UnstableValueFlags: FlagSet = Mutable | Method @@ -571,6 +571,7 @@ object Flags { val GivenOrImplicit: FlagSet = Given | Implicit val GivenOrImplicitVal: FlagSet = GivenOrImplicit.toTermFlags val GivenMethod: FlagSet = Given | Method + val LazyGiven: FlagSet = Given | Lazy val InlineOrProxy: FlagSet = Inline | InlineProxy // An inline method or inline argument proxy */ val InlineMethod: FlagSet = Inline | Method val InlineParam: FlagSet = Inline | Param @@ -608,5 +609,4 @@ object Flags { val SyntheticParam: FlagSet = Synthetic | Param val SyntheticTermParam: FlagSet = Synthetic | TermParam val SyntheticTypeParam: FlagSet = Synthetic | TypeParam - val TransparentTrait: FlagSet = Trait | Transparent } diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index d8e1c5276ab6..53fc58595472 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -10,76 +10,25 @@ import util.{SimpleIdentitySet, SimpleIdentityMap} import collection.mutable import printing._ -import scala.annotation.internal.sharable +object GadtConstraint: + def apply(): GadtConstraint = empty + def empty: GadtConstraint = + new ProperGadtConstraint(OrderingConstraint.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty, false) /** Represents GADT constraints currently in scope */ -sealed abstract class GadtConstraint extends Showable { - /** Immediate bounds of `sym`. Does not contain lower/upper symbols (see [[fullBounds]]). */ - def bounds(sym: Symbol)(using Context): TypeBounds | Null - - /** Full bounds of `sym`, including TypeRefs to other lower/upper symbols. - * - * @note this performs subtype checks between ordered symbols. - * Using this in isSubType can lead to infinite recursion. Consider `bounds` instead. - */ - def fullBounds(sym: Symbol)(using Context): TypeBounds | Null - - /** Is `sym1` ordered to be less than `sym2`? */ - def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean - - /** Add symbols to constraint, correctly handling inter-dependencies. - * - * @see [[ConstraintHandling.addToConstraint]] - */ - def addToConstraint(syms: List[Symbol])(using Context): Boolean - def addToConstraint(sym: Symbol)(using Context): Boolean = addToConstraint(sym :: Nil) - - /** Further constrain a symbol already present in the constraint. */ - def addBound(sym: Symbol, bound: Type, isUpper: Boolean)(using Context): Boolean - - /** Is the symbol registered in the constraint? - * - * @note this is true even if the symbol is constrained to be equal to another type, unlike [[Constraint.contains]]. - */ - def contains(sym: Symbol)(using Context): Boolean - - /** GADT constraint narrows bounds of at least one variable */ - def isNarrowing: Boolean - - /** See [[ConstraintHandling.approximation]] */ - def approximation(sym: Symbol, fromBelow: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type - - def symbols: List[Symbol] - - def fresh: GadtConstraint - - /** Restore the state from other [[GadtConstraint]], probably copied using [[fresh]] */ - def restore(other: GadtConstraint): Unit - - def debugBoundsDescription(using Context): String -} - -final class ProperGadtConstraint private( +sealed trait GadtConstraint ( private var myConstraint: Constraint, private var mapping: SimpleIdentityMap[Symbol, TypeVar], private var reverseMapping: SimpleIdentityMap[TypeParamRef, Symbol], private var wasConstrained: Boolean -) extends GadtConstraint with ConstraintHandling { - import dotty.tools.dotc.config.Printers.{gadts, gadtsConstr} +) extends Showable { + this: ConstraintHandling => - def this() = this( - myConstraint = new OrderingConstraint(SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentitySet.empty), - mapping = SimpleIdentityMap.empty, - reverseMapping = SimpleIdentityMap.empty, - wasConstrained = false - ) + import dotty.tools.dotc.config.Printers.{gadts, gadtsConstr} /** Exposes ConstraintHandling.subsumes */ def subsumes(left: GadtConstraint, right: GadtConstraint, pre: GadtConstraint)(using Context): Boolean = { - def extractConstraint(g: GadtConstraint) = g match { - case s: ProperGadtConstraint => s.constraint - case EmptyGadtConstraint => OrderingConstraint.empty - } + def extractConstraint(g: GadtConstraint) = g.constraint subsumes(extractConstraint(left), extractConstraint(right), extractConstraint(pre)) } @@ -88,7 +37,12 @@ final class ProperGadtConstraint private( // the case where they're valid, so no approximating is needed. rawBound - override def addToConstraint(params: List[Symbol])(using Context): Boolean = { + /** Add symbols to constraint, correctly handling inter-dependencies. + * + * @see [[ConstraintHandling.addToConstraint]] + */ + def addToConstraint(sym: Symbol)(using Context): Boolean = addToConstraint(sym :: Nil) + def addToConstraint(params: List[Symbol])(using Context): Boolean = { import NameKinds.DepParamName val poly1 = PolyType(params.map { sym => DepParamName.fresh(sym.name.toTypeName) })( @@ -134,10 +88,11 @@ final class ProperGadtConstraint private( // The replaced symbols are picked up here. addToConstraint(poly1, tvars) - .showing(i"added to constraint: [$poly1] $params%, %\n$debugBoundsDescription", gadts) + .showing(i"added to constraint: [$poly1] $params%, % gadt = $this", gadts) } - override def addBound(sym: Symbol, bound: Type, isUpper: Boolean)(using Context): Boolean = { + /** Further constrain a symbol already present in the constraint. */ + def addBound(sym: Symbol, bound: Type, isUpper: Boolean)(using Context): Boolean = { @annotation.tailrec def stripInternalTypeVar(tp: Type): Type = tp match { case tv: TypeVar => val inst = constraint.instType(tv) @@ -178,10 +133,16 @@ final class ProperGadtConstraint private( result } - override def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean = + /** Is `sym1` ordered to be less than `sym2`? */ + def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean = constraint.isLess(tvarOrError(sym1).origin, tvarOrError(sym2).origin) - override def fullBounds(sym: Symbol)(using Context): TypeBounds | Null = + /** Full bounds of `sym`, including TypeRefs to other lower/upper symbols. + * + * @note this performs subtype checks between ordered symbols. + * Using this in isSubType can lead to infinite recursion. Consider `bounds` instead. + */ + def fullBounds(sym: Symbol)(using Context): TypeBounds | Null = mapping(sym) match { case null => null // TODO: Improve flow typing so that ascription becomes redundant @@ -190,7 +151,8 @@ final class ProperGadtConstraint private( // .ensuring(containsNoInternalTypes(_)) } - override def bounds(sym: Symbol)(using Context): TypeBounds | Null = + /** Immediate bounds of `sym`. Does not contain lower/upper symbols (see [[fullBounds]]). */ + def bounds(sym: Symbol)(using Context): TypeBounds | Null = mapping(sym) match { case null => null // TODO: Improve flow typing so that ascription becomes redundant @@ -201,11 +163,17 @@ final class ProperGadtConstraint private( //.ensuring(containsNoInternalTypes(_)) } - override def contains(sym: Symbol)(using Context): Boolean = mapping(sym) != null + /** Is the symbol registered in the constraint? + * + * @note this is true even if the symbol is constrained to be equal to another type, unlike [[Constraint.contains]]. + */ + def contains(sym: Symbol)(using Context): Boolean = mapping(sym) != null + /** GADT constraint narrows bounds of at least one variable */ def isNarrowing: Boolean = wasConstrained - override def approximation(sym: Symbol, fromBelow: Boolean, maxLevel: Int)(using Context): Type = { + /** See [[ConstraintHandling.approximation]] */ + def approximation(sym: Symbol, fromBelow: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = { val res = approximation(tvarOrError(sym).origin, fromBelow, maxLevel) match case tpr: TypeParamRef => @@ -219,23 +187,16 @@ final class ProperGadtConstraint private( res } - override def symbols: List[Symbol] = mapping.keys - - override def fresh: GadtConstraint = new ProperGadtConstraint( - myConstraint, - mapping, - reverseMapping, - wasConstrained - ) - - def restore(other: GadtConstraint): Unit = other match { - case other: ProperGadtConstraint => - this.myConstraint = other.myConstraint - this.mapping = other.mapping - this.reverseMapping = other.reverseMapping - this.wasConstrained = other.wasConstrained - case _ => ; - } + def symbols: List[Symbol] = mapping.keys + + def fresh: GadtConstraint = new ProperGadtConstraint(myConstraint, mapping, reverseMapping, wasConstrained) + + /** Restore the state from other [[GadtConstraint]], probably copied using [[fresh]] */ + def restore(other: GadtConstraint): Unit = + this.myConstraint = other.myConstraint + this.mapping = other.mapping + this.reverseMapping = other.reverseMapping + this.wasConstrained = other.wasConstrained // ---- Protected/internal ----------------------------------------------- @@ -291,41 +252,15 @@ final class ProperGadtConstraint private( override def constr = gadtsConstr - override def toText(printer: Printer): Texts.Text = constraint.toText(printer) + override def toText(printer: Printer): Texts.Text = printer.toText(this) - override def debugBoundsDescription(using Context): String = { - val sb = new mutable.StringBuilder - sb ++= constraint.show - sb += '\n' - mapping.foreachBinding { case (sym, _) => - sb ++= i"$sym: ${fullBounds(sym)}\n" - } - sb.result - } + /** Provides more information than toText, by showing the underlying Constraint details. */ + def debugBoundsDescription(using Context): String = i"$this\n$constraint" } -@sharable object EmptyGadtConstraint extends GadtConstraint { - override def bounds(sym: Symbol)(using Context): TypeBounds | Null = null - override def fullBounds(sym: Symbol)(using Context): TypeBounds | Null = null - - override def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean = unsupported("EmptyGadtConstraint.isLess") - - override def isNarrowing: Boolean = false - - override def contains(sym: Symbol)(using Context) = false - - override def addToConstraint(params: List[Symbol])(using Context): Boolean = unsupported("EmptyGadtConstraint.addToConstraint") - override def addBound(sym: Symbol, bound: Type, isUpper: Boolean)(using Context): Boolean = unsupported("EmptyGadtConstraint.addBound") - - override def approximation(sym: Symbol, fromBelow: Boolean, maxLevel: Int)(using Context): Type = unsupported("EmptyGadtConstraint.approximation") - - override def symbols: List[Symbol] = Nil - - override def fresh = new ProperGadtConstraint - override def restore(other: GadtConstraint): Unit = - assert(!other.isNarrowing, "cannot restore a non-empty GADTMap") - - override def debugBoundsDescription(using Context): String = "EmptyGadtConstraint" - - override def toText(printer: Printer): Texts.Text = "EmptyGadtConstraint" -} +private class ProperGadtConstraint ( + myConstraint: Constraint, + mapping: SimpleIdentityMap[Symbol, TypeVar], + reverseMapping: SimpleIdentityMap[TypeParamRef, Symbol], + wasConstrained: Boolean, +) extends ConstraintHandling with GadtConstraint(myConstraint, mapping, reverseMapping, wasConstrained) diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index d141cf7032ee..40a45b9f4678 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -70,14 +70,26 @@ object Mode { /** We are currently unpickling Scala2 info */ val Scala2Unpickling: Mode = newMode(13, "Scala2Unpickling") - /** We are currently checking bounds to be non-empty, so we should not - * do any widening when computing members of refined types. + /** Signifies one of two possible situations: + * 1. We are currently checking bounds to be non-empty, so we should not + * do any widening when computing members of refined types. + * 2. We are currently checking self type conformance, so we should not + * ignore capture sets added to otherwise pure classes (only needed + * for capture checking). */ - val CheckBounds: Mode = newMode(14, "CheckBounds") + val CheckBoundsOrSelfType: Mode = newMode(14, "CheckBoundsOrSelfType") /** Use Scala2 scheme for overloading and implicit resolution */ val OldOverloadingResolution: Mode = newMode(15, "OldOverloadingResolution") + /** Treat CapturingTypes as plain AnnotatedTypes even in phase CheckCaptures. + * Reuses the value of OldOverloadingResolution to save Mode bits. + * This is OK since OldOverloadingResolution only affects implicit search, which + * is done during phases Typer and Inlinig, and IgnoreCaptures only has an + * effect during phase CheckCaptures. + */ + val IgnoreCaptures = OldOverloadingResolution + /** Allow hk applications of type lambdas to wildcard arguments; * used for checking that such applications do not normally arise */ diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala index 8ec095036b35..7c1073852681 100644 --- a/compiler/src/dotty/tools/dotc/core/NameOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala @@ -124,10 +124,9 @@ object NameOps { * it is also called from the backend. */ def stripModuleClassSuffix: N = likeSpacedN { - val semName = name.toTermName match { - case name: SimpleName if name.endsWith("$") => name.unmangleClassName + val semName = name.toTermName match + case name: SimpleName if name.endsWith(str.MODULE_SUFFIX) && name.lastPart != MODULE_SUFFIX => name.unmangleClassName case _ => name - } semName.exclude(ModuleClassName) } @@ -209,14 +208,13 @@ object NameOps { if str == mustHave then found = true idx + str.length else idx - val start = if ctx.settings.Ycc.value then skip(0, "Impure") else 0 - skip(skip(start, "Erased"), "Context") == suffixStart + skip(skip(skip(0, "Impure"), "Erased"), "Context") == suffixStart && found } /** Same as `funArity`, except that it returns -1 if the prefix * is not one of a (possibly empty) concatenation of a subset of - * "Impure" (only under -Ycc), "Erased" and "Context" (in that order). + * "Impure" (only under pureFunctions), "Erased" and "Context" (in that order). */ private def checkedFunArity(suffixStart: Int)(using Context): Int = if isFunctionPrefix(suffixStart) then funArity(suffixStart) else -1 diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index 1341fac7d735..ac6cb78f9e91 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -16,27 +16,34 @@ import cc.{CapturingType, derivedCapturingType} object OrderingConstraint { - type ArrayValuedMap[T] = SimpleIdentityMap[TypeLambda, Array[T]] + /** If true, use reverse dependencies in `replace` to avoid checking the bounds + * of all parameters in the constraint. This can speed things up, but there are some + * rare corner cases where reverse dependencies miss a parameter. Specifically, + * if a constraint contains a free reference to TypeParam P and afterwards the + * same P is added as a bound variable to the constraint, a backwards link would + * then become necessary at this point but is missing. This causes two CB projects + * to fail when reverse dependencies are checked (parboiled2 and perspective). + * In these rare cases `replace` could behave differently when optimized. However, + * no deviation was found in the two projects. It is not clear what the "right" + * behavior of `replace` should be in these cases. Normally, PolyTypes added + * to constraints are supposed to be fresh, so that would mean that the behavior + * with optimizeReplace = true would be correct. But the previous behavior without + * reverse dependency checking corresponds to `optimizeReplace = false`. This behavior + * makes sense if we assume that the added polytype was simply added too late, so we + * want to establish the link between newly bound variable and pre-existing reference. + */ + private final val optimizeReplace = true + + private type ArrayValuedMap[T] = SimpleIdentityMap[TypeLambda, Array[T]] /** The type of `OrderingConstraint#boundsMap` */ - type ParamBounds = ArrayValuedMap[Type] + private type ParamBounds = ArrayValuedMap[Type] /** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */ - type ParamOrdering = ArrayValuedMap[List[TypeParamRef]] - - /** A new constraint with given maps and given set of hard typevars */ - private def newConstraint( - boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering, - hardVars: TypeVars)(using Context) : OrderingConstraint = - if boundsMap.isEmpty && lowerMap.isEmpty && upperMap.isEmpty then - empty - else - val result = new OrderingConstraint(boundsMap, lowerMap, upperMap, hardVars) - if ctx.run != null then ctx.run.nn.recordConstraintSize(result, result.boundsMap.size) - result + private type ParamOrdering = ArrayValuedMap[List[TypeParamRef]] /** A lens for updating a single entry array in one of the three constraint maps */ - abstract class ConstraintLens[T <: AnyRef: ClassTag] { + private abstract class ConstraintLens[T <: AnyRef: ClassTag] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[T] | Null def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[T])(using Context): OrderingConstraint def initial: T @@ -47,7 +54,7 @@ object OrderingConstraint { } /** The `current` constraint but with the entry for `param` updated to `entry`. - * `current` is used linearly. If it is different from `prev` it is + * `current` is used linearly. If it is different from `prev` then `current` is * known to be dead after the call. Hence it is OK to update destructively * parts of `current` which are not shared by `prev`. */ @@ -89,27 +96,27 @@ object OrderingConstraint { map(prev, current, param.binder, param.paramNum, f) } - val boundsLens: ConstraintLens[Type] = new ConstraintLens[Type] { + private val boundsLens: ConstraintLens[Type] = new ConstraintLens[Type] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[Type] | Null = c.boundsMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[Type])(using Context): OrderingConstraint = - newConstraint(c.boundsMap.updated(poly, entries), c.lowerMap, c.upperMap, c.hardVars) + c.newConstraint(boundsMap = c.boundsMap.updated(poly, entries)) def initial = NoType } - val lowerLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { + private val lowerLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[List[TypeParamRef]] | Null = c.lowerMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[List[TypeParamRef]])(using Context): OrderingConstraint = - newConstraint(c.boundsMap, c.lowerMap.updated(poly, entries), c.upperMap, c.hardVars) + c.newConstraint(lowerMap = c.lowerMap.updated(poly, entries)) def initial = Nil } - val upperLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { + private val upperLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[List[TypeParamRef]] | Null = c.upperMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[List[TypeParamRef]])(using Context): OrderingConstraint = - newConstraint(c.boundsMap, c.lowerMap, c.upperMap.updated(poly, entries), c.hardVars) + c.newConstraint(upperMap = c.upperMap.updated(poly, entries)) def initial = Nil } @@ -143,11 +150,27 @@ class OrderingConstraint(private val boundsMap: ParamBounds, private val lowerMap : ParamOrdering, private val upperMap : ParamOrdering, private val hardVars : TypeVars) extends Constraint { + thisConstraint => import UnificationDirection.* type This = OrderingConstraint + /** A new constraint with given maps and given set of hard typevars */ + def newConstraint( // !!! Dotty problem: Making newConstraint `private` causes -Ytest-pickler failure. + boundsMap: ParamBounds = this.boundsMap, + lowerMap: ParamOrdering = this.lowerMap, + upperMap: ParamOrdering = this.upperMap, + hardVars: TypeVars = this.hardVars)(using Context) : OrderingConstraint = + if boundsMap.isEmpty && lowerMap.isEmpty && upperMap.isEmpty then + empty + else + val result = new OrderingConstraint(boundsMap, lowerMap, upperMap, hardVars) + if ctx.run != null then ctx.run.nn.recordConstraintSize(result, result.boundsMap.size) + result.coDeps = this.coDeps + result.contraDeps = this.contraDeps + result + // ----------- Basic indices -------------------------------------------------- /** The number of type parameters in the given entry array */ @@ -217,6 +240,189 @@ class OrderingConstraint(private val boundsMap: ParamBounds, if tvar == null then NoType else tvar +// ------------- Type parameter dependencies ---------------------------------------- + + private type ReverseDeps = SimpleIdentityMap[TypeParamRef, SimpleIdentitySet[TypeParamRef]] + + /** A map that associates type parameters of this constraint with all other type + * parameters that refer to them in their bounds covariantly, such that, if the + * type parameter is instantiated to a larger type, the constraint would be narrowed + * (i.e. solution set changes other than simply being made larger). + */ + private var coDeps: ReverseDeps = SimpleIdentityMap.empty + + /** A map that associates type parameters of this constraint with all other type + * parameters that refer to them in their bounds covariantly, such that, if the + * type parameter is instantiated to a smaller type, the constraint would be narrowed. + * (i.e. solution set changes other than simply being made larger). + */ + private var contraDeps: ReverseDeps = SimpleIdentityMap.empty + + /** Null-safe indexing */ + extension (deps: ReverseDeps) def at(param: TypeParamRef): SimpleIdentitySet[TypeParamRef] = + val result = deps(param) + if null == result // swapped operand order important since `==` is overloaded in `SimpleIdentitySet` + then SimpleIdentitySet.empty + else result + + override def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean = + def origin(tv: TypeVar) = + assert(!instType(tv).exists) + tv.origin + val param = origin(tv) + val excluded = except.map(origin) + val qualifies: TypeParamRef => Boolean = !excluded.contains(_) + def test(deps: ReverseDeps, lens: ConstraintLens[List[TypeParamRef]]) = + deps.at(param).exists(qualifies) + || lens(this, tv.origin.binder, tv.origin.paramNum).exists(qualifies) + if co then test(coDeps, upperLens) else test(contraDeps, lowerLens) + + /** Modify traversals in two respects: + * - when encountering an application C[Ts], where C is a type variable or parameter + * that has an instantiation in this constraint, assume the type parameters of + * the instantiation instead of the type parameters of C when traversing the + * arguments Ts. That can make a difference for the variance in which an argument + * is traversed. Example constraint: + * + * constrained types: C[X], A + * A >: C[B] + * C := Option + * + * Here, B is traversed with variance +1 instead of 0. Test case: pos/t3152.scala + * + * - When typing a prefx, don't avoid negative variances. This matters only for the + * corner case where a parameter is instantiated to Nothing (see comment in + * TypeAccumulator#applyToPrefix). When determining instantiation directions in + * interpolations (which is what dependency variances are for), it can be ignored. + */ + private trait ConstraintAwareTraversal[T] extends TypeAccumulator[T]: + + override def tyconTypeParams(tp: AppliedType)(using Context): List[ParamInfo] = + def tparams(tycon: Type): List[ParamInfo] = tycon match + case tycon: TypeVar if !tycon.inst.exists => tparams(tycon.origin) + case tycon: TypeParamRef => + entry(tycon) match + case _: TypeBounds => tp.tyconTypeParams + case tycon1 if tycon1.typeParams.nonEmpty => tycon1.typeParams + case _ => tp.tyconTypeParams + case _ => tp.tyconTypeParams + tparams(tp.tycon) + + override def applyToPrefix(x: T, tp: NamedType): T = + this(x, tp.prefix) + end ConstraintAwareTraversal + + private class Adjuster(srcParam: TypeParamRef)(using Context) + extends TypeTraverser, ConstraintAwareTraversal[Unit]: + + var add: Boolean = compiletime.uninitialized + val seen = util.HashSet[LazyRef]() + + def update(deps: ReverseDeps, referenced: TypeParamRef): ReverseDeps = + val prev = deps.at(referenced) + val newSet = if add then prev + srcParam else prev - srcParam + if newSet.isEmpty then deps.remove(referenced) + else deps.updated(referenced, newSet) + + def traverse(t: Type) = t match + case param: TypeParamRef => + entry(param) match + case _: TypeBounds => + if variance >= 0 then coDeps = update(coDeps, param) + if variance <= 0 then contraDeps = update(contraDeps, param) + case tp => + traverse(tp) + case tp: LazyRef => + if !seen.contains(tp) then + seen += tp + traverse(tp.ref) + case _ => traverseChildren(t) + end Adjuster + + /** Adjust dependencies to account for the delta of previous entry `prevEntry` + * and the new bound `entry` for the type parameter `srcParam`. + */ + def adjustDeps(entry: Type | Null, prevEntry: Type | Null, srcParam: TypeParamRef)(using Context): this.type = + val adjuster = new Adjuster(srcParam) + + /** Adjust reverse dependencies of all type parameters referenced by `bound` + * @param isLower `bound` is a lower bound + * @param add if true, add referenced variables to dependencoes, otherwise drop them. + */ + def adjustReferenced(bound: Type, isLower: Boolean, add: Boolean) = + adjuster.variance = if isLower then 1 else -1 + adjuster.add = add + adjuster.seen.clear() + adjuster.traverse(bound) + + /** Use an optimized strategy to adjust dependencies to account for the delta + * of previous bound `prevBound` and new bound `bound`: If `prevBound` is some + * and/or prefix of `bound`, and `baseCase` is true, just add the new parts of `bound`. + * @param isLower `bound` and `prevBound` are lower bounds + * @return true iff the delta strategy succeeded, false if it failed in which case + * the constraint is left unchanged. + */ + def adjustDelta(bound: Type, prevBound: Type, isLower: Boolean, baseCase: => Boolean): Boolean = + if bound eq prevBound then + baseCase + else bound match + case bound: AndOrType => + adjustDelta(bound.tp1, prevBound, isLower, baseCase) && { + adjustReferenced(bound.tp2, isLower, add = true) + true + } + case _ => false + + /** Add or remove depenencies referenced in `bounds`. + * @param add if true, dependecies are added, otherwise they are removed + */ + def adjustBounds(bounds: TypeBounds, add: Boolean) = + adjustReferenced(bounds.lo, isLower = true, add) + adjustReferenced(bounds.hi, isLower = false, add) + + entry match + case entry @ TypeBounds(lo, hi) => + prevEntry match + case prevEntry @ TypeBounds(plo, phi) => + if !adjustDelta(lo, plo, isLower = true, + adjustDelta(hi, phi, isLower = false, true)) + then + adjustBounds(prevEntry, add = false) + adjustBounds(entry, add = true) + case _ => + adjustBounds(entry, add = true) + case _ => + prevEntry match + case prevEntry: TypeBounds => + adjustBounds(prevEntry, add = false) + case _ => + dropDeps(srcParam) // srcParam is instantiated, so its dependencies can be dropped + this + end adjustDeps + + /** Adjust dependencies to account for adding or dropping all `entries` associated + * with `poly`. + * @param add if true, entries is added, otherwise it is dropped + */ + def adjustDeps(poly: TypeLambda, entries: Array[Type], add: Boolean)(using Context): this.type = + for n <- 0 until paramCount(entries) do + if add + then adjustDeps(entries(n), NoType, poly.paramRefs(n)) + else adjustDeps(NoType, entries(n), poly.paramRefs(n)) + this + + /** Remove all reverse dependencies of `param` */ + def dropDeps(param: TypeParamRef)(using Context): Unit = + coDeps = coDeps.remove(param) + contraDeps = contraDeps.remove(param) + + /** A string representing the two dependency maps */ + def depsToString(using Context): String = + def depsStr(deps: ReverseDeps): String = + def depStr(param: TypeParamRef) = i"$param --> ${deps.at(param).toList}%, %" + if deps.isEmpty then "" else i"\n ${deps.toList.map((k, v) => depStr(k))}%\n %" + i" co-deps:${depsStr(coDeps)}\n contra-deps:${depsStr(contraDeps)}\n" + // ---------- Adding TypeLambdas -------------------------------------------------- /** The bound type `tp` without constrained parameters which are clearly @@ -282,7 +488,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val entries1 = new Array[Type](nparams * 2) poly.paramInfos.copyToArray(entries1, 0) tvars.copyToArray(entries1, nparams) - newConstraint(boundsMap.updated(poly, entries1), lowerMap, upperMap, hardVars).init(poly) + newConstraint(boundsMap = this.boundsMap.updated(poly, entries1)) + .init(poly) } /** Split dependent parameters off the bounds for parameters in `poly`. @@ -298,13 +505,14 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val param = poly.paramRefs(i) val bounds = dropWildcards(nonParamBounds(param)) val stripped = stripParams(bounds, todos, isUpper = true) - current = updateEntry(current, param, stripped) + current = boundsLens.update(this, current, param, stripped) while todos.nonEmpty do current = todos.head(current, param) todos.dropInPlace(1) i += 1 } - current.checkNonCyclic() + current.adjustDeps(poly, current.boundsMap(poly).nn, add = true) + .checkWellFormed() } // ---------- Updates ------------------------------------------------------------ @@ -426,10 +634,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds, case _ => Nil - private def updateEntry(current: This, param: TypeParamRef, tp: Type)(using Context): This = { - if Config.checkNoWildcardsInConstraint then assert(!tp.containsWildcardTypes) - var current1 = boundsLens.update(this, current, param, tp) - tp match { + private def updateEntry(current: This, param: TypeParamRef, newEntry: Type)(using Context): This = { + if Config.checkNoWildcardsInConstraint then assert(!newEntry.containsWildcardTypes) + val oldEntry = current.entry(param) + var current1 = boundsLens.update(this, current, param, newEntry) + .adjustDeps(newEntry, oldEntry, param) + newEntry match { case TypeBounds(lo, hi) => for p <- dependentParams(lo, isUpper = false) do current1 = order(current1, p, param) @@ -442,10 +652,10 @@ class OrderingConstraint(private val boundsMap: ParamBounds, /** The public version of `updateEntry`. Guarantees that there are no cycles */ def updateEntry(param: TypeParamRef, tp: Type)(using Context): This = - updateEntry(this, param, ensureNonCyclic(param, tp)).checkNonCyclic() + updateEntry(this, param, ensureNonCyclic(param, tp)).checkWellFormed() def addLess(param1: TypeParamRef, param2: TypeParamRef, direction: UnificationDirection)(using Context): This = - order(this, param1, param2, direction).checkNonCyclic() + order(this, param1, param2, direction).checkWellFormed() // ---------- Replacements and Removals ------------------------------------- @@ -455,24 +665,81 @@ class OrderingConstraint(private val boundsMap: ParamBounds, */ def replace(param: TypeParamRef, tp: Type)(using Context): OrderingConstraint = val replacement = tp.dealiasKeepAnnots.stripTypeVar - if param == replacement then this.checkNonCyclic() + if param == replacement then this.checkWellFormed() else assert(replacement.isValueTypeOrLambda) - var current = - if isRemovable(param.binder) then remove(param.binder) - else updateEntry(this, param, replacement) - - def removeParam(ps: List[TypeParamRef]) = ps.filterConserve(param ne _) - def replaceParam(tp: Type, atPoly: TypeLambda, atIdx: Int): Type = - current.ensureNonCyclic(atPoly.paramRefs(atIdx), tp.substParam(param, replacement)) - - current.foreachParam { (p, i) => - current = boundsLens.map(this, current, p, i, replaceParam(_, p, i)) - current = lowerLens.map(this, current, p, i, removeParam) - current = upperLens.map(this, current, p, i, removeParam) - } - current.checkNonCyclic() + val replacedTypeVar = typeVarOfParam(param) + //println(i"replace $param with $replacement in $this") + + def mapReplacedTypeVarTo(to: Type) = new TypeMap: + override def apply(t: Type): Type = + if (t eq replacedTypeVar) && t.exists then to else mapOver(t) + + var current = this + + def removeParamFrom(ps: List[TypeParamRef]) = + ps.filterConserve(param ne _) + + for lo <- lower(param) do + current = upperLens.map(this, current, lo, removeParamFrom) + for hi <- upper(param) do + current = lowerLens.map(this, current, hi, removeParamFrom) + + def replaceParamIn(other: TypeParamRef) = + val oldEntry = current.entry(other) + val newEntry = current.ensureNonCyclic(other, oldEntry.substParam(param, replacement)) + current = boundsLens.update(this, current, other, newEntry) + var oldDepEntry = oldEntry + var newDepEntry = newEntry + replacedTypeVar match + case tvar: TypeVar => + if tvar.inst.exists // `isInstantiated` would use ctx.typerState.constraint rather than the current constraint + then + // If the type variable has been instantiated, we need to forget about + // the instantiation for old dependencies. + // I.e. to find out what the old entry was, we should not follow + // the newly instantiated type variable but assume the type variable's origin `param`. + // An example where this happens is if `replace` is called from TypeVar's `instantiateWith`. + oldDepEntry = mapReplacedTypeVarTo(param)(oldDepEntry) + else + // If the type variable has not been instantiated, we need to replace references to it + // in the new entry by `replacement`. Otherwise we would get stuck in an uninstantiated + // type variable. + // An example where this happens is if `replace` is called from unify. + newDepEntry = mapReplacedTypeVarTo(replacement)(newDepEntry) + case _ => + if oldDepEntry ne newDepEntry then + if current eq this then + // We can end up here if oldEntry eq newEntry, so posssibly no new constraint + // was created, but oldDepEntry ne newDepEntry. In that case we must make + // sure we have a new constraint before updating dependencies. + current = newConstraint() + current.adjustDeps(newDepEntry, oldDepEntry, other) + end replaceParamIn + + if optimizeReplace then + val co = current.coDeps.at(param) + val contra = current.contraDeps.at(param) + current.foreachParam { (p, i) => + val other = p.paramRefs(i) + entry(other) match + case _: TypeBounds => + if co.contains(other) || contra.contains(other) then + replaceParamIn(other) + case _ => replaceParamIn(other) + } + else + current.foreachParam { (p, i) => + val other = p.paramRefs(i) + if other != param then replaceParamIn(other) + } + + current = + if isRemovable(param.binder) then current.remove(param.binder) + else updateEntry(current, param, replacement) + current.dropDeps(param) + current.checkWellFormed() end replace def remove(pt: TypeLambda)(using Context): This = { @@ -485,7 +752,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, } val hardVars1 = pt.paramRefs.foldLeft(hardVars)((hvs, param) => hvs - typeVarOfParam(param)) newConstraint(boundsMap.remove(pt), removeFromOrdering(lowerMap), removeFromOrdering(upperMap), hardVars1) - .checkNonCyclic() + .adjustDeps(pt, boundsMap(pt).nn, add = false) + .checkWellFormed() } def isRemovable(pt: TypeLambda): Boolean = { @@ -511,7 +779,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def swapKey[T](m: ArrayValuedMap[T]) = val info = m(from) if info == null then m else m.remove(from).updated(to, info) - var current = newConstraint(swapKey(boundsMap), swapKey(lowerMap), swapKey(upperMap), hardVars) + var current = newConstraint(swapKey(boundsMap), swapKey(lowerMap), swapKey(upperMap)) def subst[T <: Type](x: T): T = x.subst(from, to).asInstanceOf[T] current.foreachParam {(p, i) => current = boundsLens.map(this, current, p, i, subst) @@ -519,12 +787,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds, current = upperLens.map(this, current, p, i, _.map(subst)) } constr.println(i"renamed $this to $current") - current.checkNonCyclic() + current.checkWellFormed() def isHard(tv: TypeVar) = hardVars.contains(tv) def withHard(tv: TypeVar)(using Context) = - newConstraint(boundsMap, lowerMap, upperMap, hardVars + tv) + newConstraint(hardVars = this.hardVars + tv) def instType(tvar: TypeVar): Type = entry(tvar.origin) match case _: TypeBounds => NoType @@ -551,6 +819,26 @@ class OrderingConstraint(private val boundsMap: ParamBounds, assert(tvar.origin == param, i"mismatch $tvar, $param") case _ => + def occursAtToplevel(param: TypeParamRef, inst: Type)(using Context): Boolean = + def occurs(tp: Type)(using Context): Boolean = tp match + case tp: AndOrType => + occurs(tp.tp1) || occurs(tp.tp2) + case tp: TypeParamRef => + (tp eq param) || entry(tp).match + case NoType => false + case TypeBounds(lo, hi) => (lo eq hi) && occurs(lo) + case inst => occurs(inst) + case tp: TypeVar => + occurs(tp.underlying) + case TypeBounds(lo, hi) => + occurs(lo) || occurs(hi) + case _ => + val tp1 = tp.dealias + (tp1 ne tp) && occurs(tp1) + + occurs(inst) + end occursAtToplevel + // ---------- Exploration -------------------------------------------------------- def domainLambdas: List[TypeLambda] = boundsMap.keys @@ -603,7 +891,57 @@ class OrderingConstraint(private val boundsMap: ParamBounds, // ---------- Checking ----------------------------------------------- - def checkNonCyclic()(using Context): this.type = + def checkWellFormed()(using Context): this.type = + + /** Check that each dependency A -> B in coDeps and contraDeps corresponds to + * a reference to A at the right variance in the entry of B. + */ + def checkBackward(deps: ReverseDeps, depsName: String, v: Int)(using Context): Unit = + deps.foreachBinding { (param, params) => + for srcParam <- params do + assert(contains(srcParam) && occursAtVariance(param, v, in = entry(srcParam)), + i"wrong $depsName backwards reference $param -> $srcParam in $thisConstraint") + } + + /** A type traverser that checks that all references bound in the constraint + * are accounted for in coDeps and/or contraDeps. + */ + def checkForward(srcParam: TypeParamRef)(using Context) = + new TypeTraverser with ConstraintAwareTraversal[Unit]: + val seen = util.HashSet[LazyRef]() + def traverse(t: Type): Unit = t match + case param: TypeParamRef if param ne srcParam => + def check(deps: ReverseDeps, directDeps: List[TypeParamRef], depsName: String) = + assert(deps.at(param).contains(srcParam) || directDeps.contains(srcParam), + i"missing $depsName backwards reference $param -> $srcParam in $thisConstraint") + entry(param) match + case _: TypeBounds => + if variance >= 0 then check(contraDeps, upper(param), "contra") + if variance <= 0 then check(coDeps, lower(param), "co") + case tp => + traverse(tp) + case tp: LazyRef => + if !seen.contains(tp) then + seen += tp + traverse(tp.ref) + case _ => traverseChildren(t) + + /** Does `param` occur at variance `v` or else at variance 0 in entry `in`? */ + def occursAtVariance(param: TypeParamRef, v: Int, in: Type)(using Context): Boolean = + val test = new TypeAccumulator[Boolean] with ConstraintAwareTraversal[Boolean]: + def apply(x: Boolean, t: Type): Boolean = + if x then true + else t match + case t: TypeParamRef => + entry(t) match + case _: TypeBounds => + t == param && (variance == 0 || variance == v) + case e => + apply(x, e) + case _ => + foldOver(x, t) + test(false, in) + if Config.checkConstraintsNonCyclic then domainParams.foreach { param => val inst = entry(param) @@ -612,28 +950,13 @@ class OrderingConstraint(private val boundsMap: ParamBounds, assert(!occursAtToplevel(param, inst), s"cyclic bound for $param: ${inst.show} in ${this.show}") } - this - - def occursAtToplevel(param: TypeParamRef, inst: Type)(using Context): Boolean = - - def occurs(tp: Type)(using Context): Boolean = tp match - case tp: AndOrType => - occurs(tp.tp1) || occurs(tp.tp2) - case tp: TypeParamRef => - (tp eq param) || entry(tp).match - case NoType => false - case TypeBounds(lo, hi) => (lo eq hi) && occurs(lo) - case inst => occurs(inst) - case tp: TypeVar => - occurs(tp.underlying) - case TypeBounds(lo, hi) => - occurs(lo) || occurs(hi) - case _ => - val tp1 = tp.dealias - (tp1 ne tp) && occurs(tp1) + if Config.checkConstraintDeps || ctx.settings.YcheckConstraintDeps.value then + checkBackward(coDeps, "co", -1) + checkBackward(contraDeps, "contra", +1) + domainParams.foreach(p => if contains(p) then checkForward(p).traverse(entry(p))) - occurs(inst) - end occursAtToplevel + this + end checkWellFormed override def checkClosed()(using Context): Unit = @@ -663,13 +986,16 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val constrainedText = " constrained types = " + domainLambdas.mkString("\n") val boundsText = - " bounds = " + { + "\n bounds = " + { val assocs = for (param <- domainParams) yield s"${param.binder.paramNames(param.paramNum)}: ${entryText(entry(param))}" assocs.mkString("\n") } - constrainedText + "\n" + boundsText + val depsText = + "\n coDeps = " + coDeps + + "\n contraDeps = " + contraDeps + constrainedText + boundsText + depsText } } diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index c5f126580df5..ff9a5cd0aed7 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -261,7 +261,7 @@ trait PatternTypeConstrainer { self: TypeComparer => val assumeInvariantRefinement = migrateTo3 || forceInvariantRefinement || refinementIsInvariant(patternTp) - trace(i"constraining simple pattern type $tp >:< $pt", gadts, res => s"$res\ngadt = ${ctx.gadt.debugBoundsDescription}") { + trace(i"constraining simple pattern type $tp >:< $pt", gadts, (res: Boolean) => i"$res gadt = ${ctx.gadt}") { (tp, pt) match { case (AppliedType(tyconS, argsS), AppliedType(tyconP, argsP)) => val saved = state.nn.constraint diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index b4a2dcac1b85..205554e418ed 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -197,6 +197,14 @@ object Phases { config.println(s"nextDenotTransformerId = ${nextDenotTransformerId.toList}") } + /** Unlink `phase` from Denot transformer chain. This means that + * any denotation transformer defined by the phase will not be executed. + */ + def unlinkPhaseAsDenotTransformer(phase: Phase)(using Context) = + for i <- 0 until nextDenotTransformerId.length do + if nextDenotTransformerId(i) == phase.id then + nextDenotTransformerId(i) = nextDenotTransformerId(phase.id + 1) + private var myParserPhase: Phase = _ private var myTyperPhase: Phase = _ private var myPostTyperPhase: Phase = _ diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala index 863ae4fa6b7f..99076b422358 100644 --- a/compiler/src/dotty/tools/dotc/core/Scopes.scala +++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala @@ -467,7 +467,7 @@ object Scopes { override def size: Int = 0 override def nestingLevel: Int = 0 override def toList(using Context): List[Symbol] = Nil - override def cloneScope(using Context): MutableScope = unsupported("cloneScope") + override def cloneScope(using Context): MutableScope = newScope(nestingLevel) override def lookupEntry(name: Name)(using Context): ScopeEntry | Null = null override def lookupNextEntry(entry: ScopeEntry)(using Context): ScopeEntry | Null = null } diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index bff957721b23..50c96191143c 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -243,7 +243,6 @@ object StdNames { final val ToString: N = "ToString" final val Xor: N = "^" - final val ClassfileAnnotation: N = "ClassfileAnnotation" final val ClassManifest: N = "ClassManifest" final val Enum: N = "Enum" final val Group: N = "Group" @@ -425,6 +424,8 @@ object StdNames { val bytes: N = "bytes" val canEqual_ : N = "canEqual" val canEqualAny : N = "canEqualAny" + val caps: N = "caps" + val captureChecking: N = "captureChecking" val checkInitialized: N = "checkInitialized" val classOf: N = "classOf" val classType: N = "classType" @@ -445,6 +446,7 @@ object StdNames { val derived: N = "derived" val derives: N = "derives" val doubleHash: N = "doubleHash" + val dotty: N = "dotty" val drop: N = "drop" val dynamics: N = "dynamics" val elem: N = "elem" diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 66a1e44622b8..a4f1bf3c5e80 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -768,7 +768,7 @@ object SymDenotations { /** Is this a getter? */ final def isGetter(using Context): Boolean = - this.is(Accessor) && !originalName.isSetterName && !originalName.isScala2LocalSuffix + this.is(Accessor) && !originalName.isSetterName && !(originalName.isScala2LocalSuffix && symbol.owner.is(Scala2x)) /** Is this a setter? */ final def isSetter(using Context): Boolean = @@ -808,7 +808,7 @@ object SymDenotations { /** Is this a Scala or Java annotation ? */ def isAnnotation(using Context): Boolean = - isClass && derivesFrom(defn.AnnotationClass) + isClass && (derivesFrom(defn.AnnotationClass) || is(JavaAnnotation)) /** Is this symbol a class that extends `java.io.Serializable` ? */ def isSerializable(using Context): Boolean = @@ -1151,9 +1151,9 @@ object SymDenotations { final def isEffectivelySealed(using Context): Boolean = isOneOf(FinalOrSealed) || isClass && !isOneOf(EffectivelyOpenFlags) - final def isTransparentTrait(using Context): Boolean = - isAllOf(TransparentTrait) - || defn.assumedTransparentTraits.contains(symbol) + final def isTransparentClass(using Context): Boolean = + is(TransparentType) + || defn.isAssumedTransparent(symbol) || isClass && hasAnnotation(defn.TransparentTraitAnnot) /** The class containing this denotation which has the given effective name. */ diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 73fbcca6f6ed..775062c26b0c 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -348,6 +348,27 @@ object Symbols { def paramVariance(using Context): Variance = denot.variance def paramRef(using Context): TypeRef = denot.typeRef + /** Copy a symbol, overriding selective fields. + * Note that `coord` and `associatedFile` will be set from the fields in `owner`, not + * the fields in `sym`. */ + def copy(using Context)( + owner: Symbol = this.owner, + name: ThisName = name, + flags: FlagSet = this.flags, + info: Type = this.info, + privateWithin: Symbol = this.privateWithin, + coord: Coord = NoCoord, // Can be `= owner.coord` once we bootstrap + associatedFile: AbstractFile | Null = null // Can be `= owner.associatedFile` once we bootstrap + ): Symbol = { + val coord1 = if (coord == NoCoord) owner.coord else coord + val associatedFile1 = if (associatedFile == null) owner.associatedFile else associatedFile + + if isClass then + newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord1, associatedFile1) + else + newSymbol(owner, name, flags, info, privateWithin, coord1) + } + // -------- Printing -------------------------------------------------------- /** The prefix string to be used when displaying this symbol without denotation */ @@ -469,30 +490,6 @@ object Symbols { NoDenotation // force it in order to set `denot` field of NoSymbol - extension [N <: Name](sym: Symbol { type ThisName = N })(using Context) { - /** Copy a symbol, overriding selective fields. - * Note that `coord` and `associatedFile` will be set from the fields in `owner`, not - * the fields in `sym`. - */ - def copy( - owner: Symbol = sym.owner, - name: N = sym.name, - flags: FlagSet = sym.flags, - info: Type = sym.info, - privateWithin: Symbol = sym.privateWithin, - coord: Coord = NoCoord, // Can be `= owner.coord` once we bootstrap - associatedFile: AbstractFile | Null = null // Can be `= owner.associatedFile` once we bootstrap - ): Symbol = { - val coord1 = if (coord == NoCoord) owner.coord else coord - val associatedFile1 = if (associatedFile == null) owner.associatedFile else associatedFile - - if (sym.isClass) - newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord1, associatedFile1) - else - newSymbol(owner, name, flags, info, privateWithin, coord1) - } - } - /** Makes all denotation operations available on symbols */ implicit def toDenot(sym: Symbol)(using Context): SymDenotation = sym.denot diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 26132c7c0205..81f822811456 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -204,6 +204,12 @@ class TypeApplications(val self: Type) extends AnyVal { } } + /** Substitute in `self` the type parameters of `tycon` by some other types. */ + final def substTypeParams(tycon: Type, to: List[Type])(using Context): Type = + (tycon.typeParams: @unchecked) match + case LambdaParam(lam, _) :: _ => self.substParams(lam, to) + case params: List[Symbol @unchecked] => self.subst(params, to) + /** If `self` is a higher-kinded type, its type parameters, otherwise Nil */ final def hkTypeParams(using Context): List[TypeParamInfo] = if (isLambdaSub) typeParams else Nil @@ -346,7 +352,7 @@ class TypeApplications(val self: Type) extends AnyVal { } if ((dealiased eq stripped) || followAlias) try - val instantiated = dealiased.instantiate(args) + val instantiated = dealiased.instantiate(args.mapConserve(_.boxedUnlessFun(self))) if (followAlias) instantiated.normalized else instantiated catch case ex: IndexOutOfBoundsException => diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 78fbea352bf3..658bf4122aa4 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -23,7 +23,7 @@ import typer.ProtoTypes.constrained import typer.Applications.productSelectorTypes import reporting.trace import annotation.constructorOnly -import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBoxedCapturing, boxed, boxedUnlessFun, boxedIfTypeParam} +import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBoxedCapturing, boxed, boxedUnlessFun, boxedIfTypeParam, isAlwaysPure} /** Provides methods to compare types. */ @@ -60,8 +60,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** Indicates whether the subtype check used GADT bounds */ private var GADTused: Boolean = false - protected var canWidenAbstract: Boolean = true - private var myInstance: TypeComparer = this def currentInstance: TypeComparer = myInstance @@ -311,8 +309,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling thirdTryNamed(tp2) else ( (tp1.name eq tp2.name) - && tp1.isMemberRef - && tp2.isMemberRef + && tp2.isPrefixDependentMemberRef && isSubPrefix(tp1.prefix, tp2.prefix) && tp1.signature == tp2.signature && !(sym1.isClass && sym2.isClass) // class types don't subtype each other @@ -523,7 +520,9 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling res case CapturingType(parent1, refs1) => - if subCaptures(refs1, tp2.captureSet, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) + if tp2.isAny then true + else if subCaptures(refs1, tp2.captureSet, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) + || !ctx.mode.is(Mode.CheckBoundsOrSelfType) && tp1.isAlwaysPure then recur(parent1, tp2) else thirdTry case tp1: AnnotatedType if !tp1.isRefining => @@ -634,6 +633,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case (info1: MethodType, info2: MethodType) => matchingMethodParams(info1, info2, precise = false) && isSubInfo(info1.resultType, info2.resultType.subst(info2, info1)) + case (info1 @ CapturingType(parent1, refs1), info2: Type) => + subCaptures(refs1, info2.captureSet, frozenConstraint).isOK && sameBoxed(info1, info2, refs1) + && isSubInfo(parent1, info2) + case (info1: Type, CapturingType(parent2, refs2)) => + val refs1 = info1.captureSet + (refs1.isAlwaysEmpty || subCaptures(refs1, refs2, frozenConstraint).isOK) && sameBoxed(info1, info2, refs1) + && isSubInfo(info1, parent2) case _ => isSubType(info1, info2) @@ -820,7 +826,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if refs1.isAlwaysEmpty then recur(tp1, parent2) else subCaptures(refs1, refs2, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) - && recur(tp1.widen.stripCapturing, parent2) + && (recur(tp1.widen.stripCapturing, parent2) + || tp1.isInstanceOf[SingletonType] && recur(tp1, parent2) + // this alternative is needed in case the right hand side is a + // capturing type that contains the lhs as an alternative of a union type. + ) catch case ex: AssertionError => println(i"assertion failed while compare captured $tp1 <:< $tp2") throw ex @@ -1067,12 +1077,16 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * * - k := args.length * - d := otherArgs.length - k + * - T_0, ..., T_k-1 fresh type parameters + * - bodyArgs := otherArgs.take(d), T_0, ..., T_k-1 * - * `adaptedTycon` will be: + * Then, * - * [T_0, ..., T_k-1] =>> otherTycon[otherArgs(0), ..., otherArgs(d-1), T_0, ..., T_k-1] + * adaptedTycon := [T_0, ..., T_k-1] =>> otherTycon[bodyArgs] * - * where `T_n` has the same bounds as `otherTycon.typeParams(d+n)` + * where the bounds of `T_i` are set based on the bounds of `otherTycon.typeParams(d+i)` + * after substituting type parameter references by the corresponding argument + * in `bodyArgs` (see `adaptedBounds` in the implementation). * * Historical note: this strategy is known in Scala as "partial unification" * (even though the type constructor variable isn't actually unified but only @@ -1097,11 +1111,18 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling variancesConform(remainingTparams, tparams) && { val adaptedTycon = if d > 0 then + val initialArgs = otherArgs.take(d) + /** The arguments passed to `otherTycon` in the body of `tl` */ + def bodyArgs(tl: HKTypeLambda) = initialArgs ++ tl.paramRefs + /** The bounds of the type parameters of `tl` */ + def adaptedBounds(tl: HKTypeLambda) = + val bodyArgsComputed = bodyArgs(tl) + remainingTparams.map(_.paramInfo) + .mapconserve(_.substTypeParams(otherTycon, bodyArgsComputed).bounds) + HKTypeLambda(remainingTparams.map(_.paramName))( - tl => remainingTparams.map(remainingTparam => - tl.integrate(remainingTparams, remainingTparam.paramInfo).bounds), - tl => otherTycon.appliedTo( - otherArgs.take(d) ++ tl.paramRefs)) + adaptedBounds, + tl => otherTycon.appliedTo(bodyArgs(tl))) else otherTycon (assumedTrue(tycon) || directionalIsSubType(tycon, adaptedTycon)) && @@ -1831,11 +1852,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val preGadt = ctx.gadt.fresh def allSubsumes(leftGadt: GadtConstraint, rightGadt: GadtConstraint, left: Constraint, right: Constraint): Boolean = - subsumes(left, right, preConstraint) && preGadt.match - case preGadt: ProperGadtConstraint => - preGadt.subsumes(leftGadt, rightGadt, preGadt) - case _ => - true + subsumes(left, right, preConstraint) && preGadt.subsumes(leftGadt, rightGadt, preGadt) if op1 then val op1Constraint = constraint @@ -1955,6 +1972,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val info1 = m.info.widenExpr isSubInfo(info1, tp2.refinedInfo.widenExpr, m.symbol.info.orElse(info1)) || matchAbstractTypeMember(m.info) + || (tp1.isStable && isSubType(TermRef(tp1, m.symbol), tp2.refinedInfo)) tp1.member(name) match // inlined hasAltWith for performance case mbr: SingleDenotation => qualifies(mbr) @@ -3007,12 +3025,19 @@ object TypeComparer { } object TrackingTypeComparer: - enum MatchResult: + import printing.*, Texts.* + enum MatchResult extends Showable: case Reduced(tp: Type) case Disjoint case Stuck case NoInstance(fails: List[(Name, TypeBounds)]) + def toText(p: Printer): Text = this match + case Reduced(tp) => "Reduced(" ~ p.toText(tp) ~ ")" + case Disjoint => "Disjoint" + case Stuck => "Stuck" + case NoInstance(fails) => "NoInstance(" ~ Text(fails.map(p.toText(_) ~ p.toText(_)), ", ") ~ ")" + class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { import TrackingTypeComparer.* @@ -3076,7 +3101,7 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { } /** Match a single case. */ - def matchCase(cas: Type): MatchResult = trace(i"match case $cas vs $scrut", matchTypes) { + def matchCase(cas: Type): MatchResult = trace(i"$scrut match ${MatchTypeTrace.caseText(cas)}", matchTypes, show = true) { val cas1 = cas match { case cas: HKTypeLambda => caseLambda = constrained(cas) @@ -3220,7 +3245,7 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { } override def gadtAddBound(sym: Symbol, b: Type, isUpper: Boolean): Boolean = - traceIndented(s"add GADT constraint ${show(sym)} ${if isUpper then "<:" else ">:"} ${show(b)} $frozenNotice, GADT constraint = ${show(ctx.gadt.debugBoundsDescription)}") { + traceIndented(s"add GADT constraint ${show(sym)} ${if isUpper then "<:" else ">:"} ${show(b)} $frozenNotice, GADT constraint = ${show(ctx.gadt)}") { super.gadtAddBound(sym, b, isUpper) } diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 5816e1254873..a3b594eb0f09 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -18,13 +18,14 @@ class TypeError(msg: String) extends Exception(msg) { def this() = this("") final def toMessage(using Context): Message = withMode(Mode.Printing)(produceMessage) - def produceMessage(using Context): Message = super.getMessage.nn + def produceMessage(using Context): Message = super.getMessage.nn.toMessage override def getMessage: String = super.getMessage.nn } class MalformedType(pre: Type, denot: Denotation, absMembers: Set[Name]) extends TypeError { override def produceMessage(using Context): Message = i"malformed type: $pre is not a legal prefix for $denot because it contains abstract type member${if (absMembers.size == 1) "" else "s"} ${absMembers.mkString(", ")}" + .toMessage } class MissingType(pre: Type, name: Name) extends TypeError { @@ -38,6 +39,7 @@ class MissingType(pre: Type, name: Name) extends TypeError { if (ctx.debug) printStackTrace() i"""cannot resolve reference to type $pre.$name |the classfile defining the type might be missing from the classpath${otherReason(pre)}""" + .toMessage } } diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index dfdd66734cf9..9363b27b4dde 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -45,7 +45,7 @@ object TypeOps: val widenedAsf = new AsSeenFromMap(pre.info, cls) val ret = widenedAsf.apply(tp) - if (!widenedAsf.approximated) + if widenedAsf.approxCount == 0 then return ret Stats.record("asSeenFrom skolem prefix required") @@ -57,8 +57,14 @@ object TypeOps: /** The TypeMap handling the asSeenFrom */ class AsSeenFromMap(pre: Type, cls: Symbol)(using Context) extends ApproximatingTypeMap, IdempotentCaptRefMap { - /** Set to true when the result of `apply` was approximated to avoid an unstable prefix. */ - var approximated: Boolean = false + + /** The number of range approximations in invariant or contravariant positions + * performed by this TypeMap. + * - Incremented each time we produce a range. + * - Decremented each time we drop a prefix range by forwarding to a type alias + * or singleton type. + */ + private[TypeOps] var approxCount: Int = 0 def apply(tp: Type): Type = { @@ -76,17 +82,8 @@ object TypeOps: case _ => if (thiscls.derivesFrom(cls) && pre.baseType(thiscls).exists) if (variance <= 0 && !isLegalPrefix(pre)) - if (variance < 0) { - approximated = true - defn.NothingType - } - else - // Don't set the `approximated` flag yet: if this is a prefix - // of a path, we might be able to dealias the path instead - // (this is handled in `ApproximatingTypeMap`). If dealiasing - // is not possible, then `expandBounds` will end up being - // called which we override to set the `approximated` flag. - range(defn.NothingType, pre) + approxCount += 1 + range(defn.NothingType, pre) else pre else if (pre.termSymbol.is(Package) && !thiscls.is(Package)) toPrefix(pre.select(nme.PACKAGE), cls, thiscls) @@ -119,10 +116,10 @@ object TypeOps: // derived infos have already been subjected to asSeenFrom, hence to need to apply the map again. tp - override protected def expandBounds(tp: TypeBounds): Type = { - approximated = true - super.expandBounds(tp) - } + override protected def useAlternate(tp: Type): Type = + assert(approxCount > 0) + approxCount -= 1 + tp } def isLegalPrefix(pre: Type)(using Context): Boolean = @@ -189,7 +186,7 @@ object TypeOps: if (normed.exists) normed else mapOver case tp: MethodicType => // See documentation of `Types#simplified` - val addTypeVars = new TypeMap: + val addTypeVars = new TypeMap with IdempotentCaptRefMap: val constraint = ctx.typerState.constraint def apply(t: Type): Type = t match case t: TypeParamRef => constraint.typeVarOfParam(t).orElse(t) @@ -228,16 +225,18 @@ object TypeOps: */ def orDominator(tp: Type)(using Context): Type = { - /** a faster version of cs1 intersect cs2 that treats bottom types correctly */ + /** a faster version of cs1 intersect cs2 */ def intersect(cs1: List[ClassSymbol], cs2: List[ClassSymbol]): List[ClassSymbol] = - if cs1.head == defn.NothingClass then cs2 - else if cs2.head == defn.NothingClass then cs1 - else if cs1.head == defn.NullClass && !ctx.explicitNulls && cs2.head.derivesFrom(defn.ObjectClass) then cs2 - else if cs2.head == defn.NullClass && !ctx.explicitNulls && cs1.head.derivesFrom(defn.ObjectClass) then cs1 - else - val cs2AsSet = new util.HashSet[ClassSymbol](128) - cs2.foreach(cs2AsSet += _) - cs1.filter(cs2AsSet.contains) + val cs2AsSet = BaseClassSet(cs2) + cs1.filter(cs2AsSet.contains) + + /** a version of Type#baseClasses that treats bottom types correctly */ + def orBaseClasses(tp: Type): List[ClassSymbol] = tp.stripTypeVar match + case OrType(tp1, tp2) => + if tp1.isBottomType && (tp1 frozen_<:< tp2) then orBaseClasses(tp2) + else if tp2.isBottomType && (tp2 frozen_<:< tp1) then orBaseClasses(tp1) + else intersect(orBaseClasses(tp1), orBaseClasses(tp2)) + case _ => tp.baseClasses /** The minimal set of classes in `cs` which derive all other classes in `cs` */ def dominators(cs: List[ClassSymbol], accu: List[ClassSymbol]): List[ClassSymbol] = (cs: @unchecked) match { @@ -372,7 +371,7 @@ object TypeOps: } // Step 3: Intersect base classes of both sides - val commonBaseClasses = tp.mapReduceOr(_.baseClasses)(intersect) + val commonBaseClasses = orBaseClasses(tp) val doms = dominators(commonBaseClasses, Nil) def baseTp(cls: ClassSymbol): Type = tp.baseType(cls).mapReduceOr(identity)(mergeRefinedOrApplied) @@ -534,6 +533,18 @@ object TypeOps: val sym = tp.symbol forbidden.contains(sym) + /** We need to split the set into upper and lower approximations + * only if it contains a local element. The idea here is that at the + * time we perform an `avoid` all local elements are already accounted for + * and no further elements will be added afterwards. So we can just keep + * the set as it is. See comment by @linyxus on #16261. + */ + override def needsRangeIfInvariant(refs: CaptureSet): Boolean = + refs.elems.exists { + case ref: TermRef => toAvoid(ref) + case _ => false + } + override def apply(tp: Type): Type = tp match case tp: TypeVar if mapCtx.typerState.constraint.contains(tp) => val lo = TypeComparer.instanceType( @@ -604,7 +615,7 @@ object TypeOps: boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type, app: Type)( - using Context): List[BoundsViolation] = withMode(Mode.CheckBounds) { + using Context): List[BoundsViolation] = withMode(Mode.CheckBoundsOrSelfType) { val argTypes = args.tpes /** Replace all wildcards in `tps` with `#` where `` is the @@ -773,20 +784,15 @@ object TypeOps: tref case tp: TypeRef if !tp.symbol.isClass => - def lo = LazyRef.of(apply(tp.underlying.loBound)) - def hi = LazyRef.of(apply(tp.underlying.hiBound)) val lookup = boundTypeParams.lookup(tp) if lookup != null then lookup else - val tv = newTypeVar(TypeBounds(lo, hi)) + val TypeBounds(lo, hi) = tp.underlying.bounds + val tv = newTypeVar(TypeBounds(defn.NothingType, hi.topType)) boundTypeParams(tp) = tv - // Force lazy ref eagerly using current context - // Otherwise, the lazy ref will be forced with a unknown context, - // which causes a problem in tests/patmat/i3645e.scala - lo.ref - hi.ref + assert(tv <:< apply(hi)) + apply(lo) <:< tv // no assert, since bounds might conflict tv - end if case tp @ AppliedType(tycon: TypeRef, _) if !tycon.dealias.typeSymbol.isClass && !tp.isMatchAlias => @@ -869,6 +875,10 @@ object TypeOps: } def instantiate(): Type = { + // if there's a change in variance in type parameters (between subtype tp1 and supertype tp2) + // then we don't want to maximise the type variables in the wrong direction. + // For instance 15967, A[-Z] and B[Y] extends A[Y], we don't want to maximise Y to Any + maximizeType(protoTp1.baseType(tp2.classSymbol), NoSpan) maximizeType(protoTp1, NoSpan) wildApprox(protoTp1) } diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 0df3fa368d5a..29a2496ab2a7 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -745,16 +745,6 @@ object Types { // which means that we always defensively copy the type in the future. This second // measure is necessary because findMember calls might be cached, so do not // necessarily appear in nested order. - // Without the defensive copy, Typer.scala fails to compile at the line - // - // untpd.rename(lhsCore, setterName).withType(setterType), WildcardType) - // - // because the subtype check - // - // ThisTree[Untyped]#ThisTree[Typed] <: Tree[Typed] - // - // fails (in fact it thinks the underlying type of the LHS is `Tree[Untyped]`.) - // // Without the `openedTwice` trick, Typer.scala fails to Ycheck // at phase resolveSuper. val rt = @@ -775,11 +765,11 @@ object Types { val rinfo = tp.refinedInfo if (name.isTypeName && !pinfo.isInstanceOf[ClassInfo]) { // simplified case that runs more efficiently val jointInfo = - if rinfo.isInstanceOf[TypeAlias] && !ctx.mode.is(Mode.CheckBounds) then + if rinfo.isInstanceOf[TypeAlias] && !ctx.mode.is(Mode.CheckBoundsOrSelfType) then // In normal situations, the only way to "improve" on rinfo is to return an empty type bounds // So, we do not lose anything essential in "widening" to rinfo. // We need to compute the precise info only when checking for empty bounds - // which is communicated by the CheckBounds mode. + // which is communicated by the CheckBoundsOrSelfType mode. rinfo else if ctx.base.pendingMemberSearches.contains(name) then pinfo safe_& rinfo @@ -1287,11 +1277,14 @@ object Types { * then the top-level union isn't widened. This is needed so that type inference can infer nullable types. */ def widenUnion(using Context): Type = widen match - case tp @ OrNull(tp1): OrType => - // Don't widen `T|Null`, since otherwise we wouldn't be able to infer nullable unions. - val tp1Widen = tp1.widenUnionWithoutNull - if (tp1Widen.isRef(defn.AnyClass)) tp1Widen - else tp.derivedOrType(tp1Widen, defn.NullType) + case tp: OrType => tp match + case OrNull(tp1) => + // Don't widen `T|Null`, since otherwise we wouldn't be able to infer nullable unions. + val tp1Widen = tp1.widenUnionWithoutNull + if (tp1Widen.isRef(defn.AnyClass)) tp1Widen + else tp.derivedOrType(tp1Widen, defn.NullType) + case _ => + tp.widenUnionWithoutNull case tp => tp.widenUnionWithoutNull @@ -1872,7 +1865,10 @@ object Types { def dropRepeatedAnnot(using Context): Type = dropAnnot(defn.RepeatedAnnot) def annotatedToRepeated(using Context): Type = this match { - case tp @ ExprType(tp1) => tp.derivedExprType(tp1.annotatedToRepeated) + case tp @ ExprType(tp1) => + tp.derivedExprType(tp1.annotatedToRepeated) + case self @ AnnotatedType(tp, annot) if annot matches defn.RetainsByNameAnnot => + self.derivedAnnotatedType(tp.annotatedToRepeated, annot) case AnnotatedType(tp, annot) if annot matches defn.RepeatedAnnot => val typeSym = tp.typeSymbol.asClass assert(typeSym == defn.SeqClass || typeSym == defn.ArrayClass) @@ -2345,7 +2341,8 @@ object Types { lastDenotation match { case lastd0: SingleDenotation => val lastd = lastd0.skipRemoved - if (lastd.validFor.runId == ctx.runId && (checkedPeriod != Nowhere)) finish(lastd.current) + if lastd.validFor.runId == ctx.runId && checkedPeriod != Nowhere then + finish(lastd.current) else lastd match { case lastd: SymDenotation => if (stillValid(lastd) && (checkedPeriod != Nowhere)) finish(lastd.current) @@ -2450,6 +2447,8 @@ object Types { } private def checkDenot()(using Context) = {} + //if name.toString == "getConstructor" then + // println(i"set denot of $this to ${denot.info}, ${denot.getClass}, ${Phases.phaseOf(denot.validFor.lastPhaseId)} at ${ctx.phase}") private def checkSymAssign(sym: Symbol)(using Context) = { def selfTypeOf(sym: Symbol) = @@ -2496,8 +2495,10 @@ object Types { symd.maybeOwner.membersNeedAsSeenFrom(prefix) && !symd.is(NonMember) || prefix.isInstanceOf[Types.ThisType] && symd.is(Opaque) // see pos/i11277.scala for a test where this matters - /** Is this a reference to a class or object member? */ - def isMemberRef(using Context): Boolean = designator match { + /** Is this a reference to a class or object member with an info that might depend + * on the prefix? + */ + def isPrefixDependentMemberRef(using Context): Boolean = designator match { case sym: Symbol => infoDependsOnPrefix(sym, prefix) case _ => true } @@ -2785,7 +2786,7 @@ object Types { ((prefix eq NoPrefix) || symbol.is(ParamAccessor) && (prefix eq symbol.owner.thisType) || isRootCapability - ) && !symbol.is(Method) + ) && !symbol.isOneOf(UnstableValueFlags) override def isRootCapability(using Context): Boolean = name == nme.CAPTURE_ROOT && symbol == defn.captureRoot @@ -3418,25 +3419,29 @@ object Types { private var myAtoms: Atoms = _ private var myWidened: Type = _ + private def computeAtoms()(using Context): Atoms = + if tp1.hasClassSymbol(defn.NothingClass) then tp2.atoms + else if tp2.hasClassSymbol(defn.NothingClass) then tp1.atoms + else tp1.atoms | tp2.atoms + + private def computeWidenSingletons()(using Context): Type = + val tp1w = tp1.widenSingletons + val tp2w = tp2.widenSingletons + if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else TypeComparer.lub(tp1w, tp2w, isSoft = isSoft) + private def ensureAtomsComputed()(using Context): Unit = - if atomsRunId != ctx.runId then - myAtoms = - if tp1.hasClassSymbol(defn.NothingClass) then tp2.atoms - else if tp2.hasClassSymbol(defn.NothingClass) then tp1.atoms - else tp1.atoms | tp2.atoms - val tp1w = tp1.widenSingletons - val tp2w = tp2.widenSingletons - myWidened = if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else TypeComparer.lub(tp1w, tp2w, isSoft = isSoft) + if atomsRunId != ctx.runId && !isProvisional then + myAtoms = computeAtoms() + myWidened = computeWidenSingletons() atomsRunId = ctx.runId override def atoms(using Context): Atoms = ensureAtomsComputed() - myAtoms + if isProvisional then computeAtoms() else myAtoms - override def widenSingletons(using Context): Type = { + override def widenSingletons(using Context): Type = ensureAtomsComputed() - myWidened - } + if isProvisional then computeWidenSingletons() else myWidened def derivedOrType(tp1: Type, tp2: Type, soft: Boolean = isSoft)(using Context): Type = if ((tp1 eq this.tp1) && (tp2 eq this.tp2) && soft == isSoft) this @@ -3598,10 +3603,14 @@ object Types { /** The type `[tparams := paramRefs] tp`, where `tparams` can be * either a list of type parameter symbols or a list of lambda parameters + * + * @pre If `tparams` is a list of lambda parameters, then it must be the + * full, in-order list of type parameters of some type constructor, as + * can be obtained using `TypeApplications#typeParams`. */ def integrate(tparams: List[ParamInfo], tp: Type)(using Context): Type = (tparams: @unchecked) match { - case LambdaParam(lam, _) :: _ => tp.subst(lam, this) + case LambdaParam(lam, _) :: _ => tp.subst(lam, this) // This is where the precondition is necessary. case params: List[Symbol @unchecked] => tp.subst(params, paramRefs) } @@ -4229,7 +4238,7 @@ object Types { final val Unknown: DependencyStatus = 0 // not yet computed final val NoDeps: DependencyStatus = 1 // no dependent parameters found final val FalseDeps: DependencyStatus = 2 // all dependent parameters are prefixes of non-depended alias types - final val CaptureDeps: DependencyStatus = 3 // dependencies in capture sets under -Ycc, otherwise only false dependencoes + final val CaptureDeps: DependencyStatus = 3 // dependencies in capture sets under captureChecking, otherwise only false dependencoes final val TrueDeps: DependencyStatus = 4 // some truly dependent parameters exist final val StatusMask: DependencyStatus = 7 // the bits indicating actual dependency status final val Provisional: DependencyStatus = 8 // set if dependency status can still change due to type variable instantiations @@ -5290,16 +5299,18 @@ object Types { val et = new PreviousErrorType ctx.base.errorTypeMsg(et) = m et + def apply(s: => String)(using Context): ErrorType = + apply(s.toMessage) end ErrorType class PreviousErrorType extends ErrorType: def msg(using Context): Message = ctx.base.errorTypeMsg.get(this) match case Some(m) => m - case None => "error message from previous run no longer available" + case None => "error message from previous run no longer available".toMessage object UnspecifiedErrorType extends ErrorType { - override def msg(using Context): Message = "unspecified error" + override def msg(using Context): Message = "unspecified error".toMessage } /* Type used to track Select nodes that could not resolve a member and their qualifier is a scala.Dynamic. */ @@ -5486,6 +5497,14 @@ object Types { stop == StopAt.Static && tp.currentSymbol.isStatic && isStaticPrefix(tp.prefix) || stop == StopAt.Package && tp.currentSymbol.is(Package) } + + /** The type parameters of the constructor of this applied type. + * Overridden in OrderingConstraint's ConstraintAwareTraversal to take account + * of instantiations in the constraint that are not yet propagated to the + * instance types of type variables. + */ + protected def tyconTypeParams(tp: AppliedType)(using Context): List[ParamInfo] = + tp.tyconTypeParams end VariantTraversal /** A supertrait for some typemaps that are bijections. Used for capture checking. @@ -5593,17 +5612,11 @@ object Types { case tp: NamedType => if stopBecauseStaticOrLocal(tp) then tp else - val prefix1 = atVariance(variance max 0)(this(tp.prefix)) - // A prefix is never contravariant. Even if say `p.A` is used in a contravariant - // context, we cannot assume contravariance for `p` because `p`'s lower - // bound might not have a binding for `A` (e.g. the lower bound could be `Nothing`). - // By contrast, covariance does translate to the prefix, since we have that - // if `p <: q` then `p.A <: q.A`, and well-formedness requires that `A` is a member - // of `p`'s upper bound. + val prefix1 = atVariance(variance max 0)(this(tp.prefix)) // see comment of TypeAccumulator's applyToPrefix derivedSelect(tp, prefix1) case tp: AppliedType => - derivedAppliedType(tp, this(tp.tycon), mapArgs(tp.args, tp.tyconTypeParams)) + derivedAppliedType(tp, this(tp.tycon), mapArgs(tp.args, tyconTypeParams(tp))) case tp: LambdaType => mapOverLambda(tp) @@ -5766,6 +5779,13 @@ object Types { private var expandingBounds: Boolean = false + /** Use an alterate type `tp` that replaces a range. This can happen if the + * prefix of a Select is a range and the selected symbol is an alias type + * or a value with a singleton type. In both cases we can forget the prefix + * and use the symbol's type. + */ + protected def useAlternate(tp: Type): Type = reapply(tp) + /** Whether it is currently expanding bounds * * It is used to avoid following LazyRef in F-Bounds @@ -5789,7 +5809,7 @@ object Types { case TypeAlias(alias) => // if H#T = U, then for any x in L..H, x.T =:= U, // hence we can replace with U under all variances - reapply(alias.rewrapAnnots(tp1)) + useAlternate(alias.rewrapAnnots(tp1)) case bounds: TypeBounds => // If H#T = ? >: S <: U, then for any x in L..H, S <: x.T <: U, // hence we can replace with S..U under all variances @@ -5797,7 +5817,7 @@ object Types { case info: SingletonType => // if H#x: y.type, then for any x in L..H, x.type =:= y.type, // hence we can replace with y.type under all variances - reapply(info) + useAlternate(info) case _ => NoType } @@ -5813,10 +5833,10 @@ object Types { case arg @ TypeRef(pre, _) if pre.isArgPrefixOf(arg.symbol) => arg.info match { case argInfo: TypeBounds => expandBounds(argInfo) - case argInfo => reapply(arg) + case argInfo => useAlternate(arg) } case arg: TypeBounds => expandBounds(arg) - case arg => reapply(arg) + case arg => useAlternate(arg) } /** Derived selection. @@ -5923,7 +5943,7 @@ object Types { case nil => true } - if (distributeArgs(args, tp.tyconTypeParams)) + if (distributeArgs(args, tyconTypeParams(tp))) range(tp.derivedAppliedType(tycon, loBuf.toList), tp.derivedAppliedType(tycon, hiBuf.toList)) else if tycon.isLambdaSub || args.exists(isRangeOfNonTermTypes) then @@ -6007,8 +6027,11 @@ object Types { tp.derivedLambdaType(tp.paramNames, formals, restpe) } + /** Overridden in TypeOps.avoid */ + protected def needsRangeIfInvariant(refs: CaptureSet): Boolean = true + override def mapCapturingType(tp: Type, parent: Type, refs: CaptureSet, v: Int): Type = - if v == 0 then + if v == 0 && needsRangeIfInvariant(refs) then range(mapCapturingType(tp, parent, refs, -1), mapCapturingType(tp, parent, refs, 1)) else super.mapCapturingType(tp, parent, refs, v) @@ -6019,14 +6042,10 @@ object Types { /** A range of possible types between lower bound `lo` and upper bound `hi`. * Only used internally in `ApproximatingTypeMap`. */ - case class Range(lo: Type, hi: Type) extends UncachedGroundType { + case class Range(lo: Type, hi: Type) extends UncachedGroundType: assert(!lo.isInstanceOf[Range]) assert(!hi.isInstanceOf[Range]) - override def toText(printer: Printer): Text = - lo.toText(printer) ~ ".." ~ hi.toText(printer) - } - /** Approximate wildcards by their bounds */ class AvoidWildcardsMap(using Context) extends ApproximatingTypeMap: protected def mapWild(t: WildcardType) = @@ -6045,8 +6064,17 @@ object Types { protected def applyToAnnot(x: T, annot: Annotation): T = x // don't go into annotations - protected final def applyToPrefix(x: T, tp: NamedType): T = - atVariance(variance max 0)(this(x, tp.prefix)) // see remark on NamedType case in TypeMap + /** A prefix is never contravariant. Even if say `p.A` is used in a contravariant + * context, we cannot assume contravariance for `p` because `p`'s lower + * bound might not have a binding for `A`, since the lower bound could be `Nothing`. + * By contrast, covariance does translate to the prefix, since we have that + * if `p <: q` then `p.A <: q.A`, and well-formedness requires that `A` is a member + * of `p`'s upper bound. + * Overridden in OrderingConstraint's ConstraintAwareTraversal, where a + * more relaxed scheme is used. + */ + protected def applyToPrefix(x: T, tp: NamedType): T = + atVariance(variance max 0)(this(x, tp.prefix)) def foldOver(x: T, tp: Type): T = { record(s"foldOver $getClass") @@ -6069,7 +6097,7 @@ object Types { } foldArgs(acc, tparams.tail, args.tail) } - foldArgs(this(x, tycon), tp.tyconTypeParams, args) + foldArgs(this(x, tycon), tyconTypeParams(tp), args) case _: BoundType | _: ThisType => x diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala index 3b05ee351b86..4aa60d973264 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala @@ -346,6 +346,7 @@ object ClassfileConstants { case JAVA_ACC_ENUM => Enum case JAVA_ACC_ABSTRACT => if (isClass) Abstract else Deferred case JAVA_ACC_INTERFACE => PureInterfaceCreationFlags | JavaDefined + case JAVA_ACC_ANNOTATION => JavaAnnotation case _ => EmptyFlags } @@ -353,18 +354,16 @@ object ClassfileConstants { if (jflag == 0) base else base | translateFlag(jflag) private def translateFlags(jflags: Int, baseFlags: FlagSet): FlagSet = { - val nflags = - if ((jflags & JAVA_ACC_ANNOTATION) == 0) jflags - else jflags & ~(JAVA_ACC_ABSTRACT | JAVA_ACC_INTERFACE) // annotations are neither abstract nor interfaces var res: FlagSet = baseFlags | JavaDefined - res = addFlag(res, nflags & JAVA_ACC_PRIVATE) - res = addFlag(res, nflags & JAVA_ACC_PROTECTED) - res = addFlag(res, nflags & JAVA_ACC_FINAL) - res = addFlag(res, nflags & JAVA_ACC_SYNTHETIC) - res = addFlag(res, nflags & JAVA_ACC_STATIC) - res = addFlag(res, nflags & JAVA_ACC_ENUM) - res = addFlag(res, nflags & JAVA_ACC_ABSTRACT) - res = addFlag(res, nflags & JAVA_ACC_INTERFACE) + res = addFlag(res, jflags & JAVA_ACC_PRIVATE) + res = addFlag(res, jflags & JAVA_ACC_PROTECTED) + res = addFlag(res, jflags & JAVA_ACC_FINAL) + res = addFlag(res, jflags & JAVA_ACC_SYNTHETIC) + res = addFlag(res, jflags & JAVA_ACC_STATIC) + res = addFlag(res, jflags & JAVA_ACC_ENUM) + res = addFlag(res, jflags & JAVA_ACC_ABSTRACT) + res = addFlag(res, jflags & JAVA_ACC_INTERFACE) + res = addFlag(res, jflags & JAVA_ACC_ANNOTATION) res } diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 0b5fda49d63c..33a1e1dd6e73 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -165,11 +165,7 @@ class ClassfileParser( * Updates the read pointer of 'in'. */ def parseParents: List[Type] = { val superType = - if (isAnnotation) { - in.nextChar - defn.AnnotationClass.typeRef - } - else if (classRoot.symbol == defn.ComparableClass || + if (classRoot.symbol == defn.ComparableClass || classRoot.symbol == defn.JavaCloneableClass || classRoot.symbol == defn.JavaSerializableClass) { // Treat these interfaces as universal traits @@ -186,7 +182,6 @@ class ClassfileParser( // Consequently, no best implicit for the "Integral" evidence parameter of "range" // is found. Previously, this worked because of weak conformance, which has been dropped. - if (isAnnotation) ifaces = defn.ClassfileAnnotationClass.typeRef :: ifaces superType :: ifaces } @@ -275,6 +270,9 @@ class ClassfileParser( def complete(denot: SymDenotation)(using Context): Unit = { val sym = denot.symbol val isEnum = (jflags & JAVA_ACC_ENUM) != 0 + val isNative = (jflags & JAVA_ACC_NATIVE) != 0 + val isTransient = (jflags & JAVA_ACC_TRANSIENT) != 0 + val isVolatile = (jflags & JAVA_ACC_VOLATILE) != 0 val isConstructor = name eq nme.CONSTRUCTOR /** Strip leading outer param from constructor and trailing access tag for @@ -313,6 +311,12 @@ class ClassfileParser( val isVarargs = denot.is(Flags.Method) && (jflags & JAVA_ACC_VARARGS) != 0 denot.info = sigToType(sig, isVarargs = isVarargs) if (isConstructor) normalizeConstructorParams() + if isNative then + attrCompleter.annotations ::= Annotation.deferredSymAndTree(defn.NativeAnnot)(New(defn.NativeAnnot.typeRef, Nil)) + if isTransient then + attrCompleter.annotations ::= Annotation.deferredSymAndTree(defn.TransientAnnot)(New(defn.TransientAnnot.typeRef, Nil)) + if isVolatile then + attrCompleter.annotations ::= Annotation.deferredSymAndTree(defn.VolatileAnnot)(New(defn.VolatileAnnot.typeRef, Nil)) denot.info = translateTempPoly(attrCompleter.complete(denot.info, isVarargs)) if (isConstructor) normalizeConstructorInfo() @@ -836,7 +840,7 @@ class ClassfileParser( class AnnotConstructorCompleter(classInfo: TempClassInfoType) extends LazyType { def complete(denot: SymDenotation)(using Context): Unit = { - val attrs = classInfo.decls.toList.filter(sym => sym.isTerm && sym != denot.symbol) + val attrs = classInfo.decls.toList.filter(sym => sym.isTerm && sym != denot.symbol && sym.name != nme.CONSTRUCTOR) val paramNames = attrs.map(_.name.asTermName) val paramTypes = attrs.map(_.info.resultType) denot.info = MethodType(paramNames, paramTypes, classRoot.typeRef) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 475a258e8330..34c22439a932 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -451,6 +451,7 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleTree(qual); if (!mix.isEmpty) { + // mixinType being a TypeRef when mix is non-empty is enforced by TreeChecker#checkSuper val SuperType(_, mixinType: TypeRef) = tree.tpe: @unchecked pickleTree(mix.withType(mixinType)) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 6887937ed6fe..617a2c55a7ad 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -32,7 +32,7 @@ import ast.{Trees, tpd, untpd} import Trees._ import Decorators._ import transform.SymUtils._ -import cc.adaptFunctionTypeUnderCC +import cc.{adaptFunctionTypeUnderPureFuns, adaptByNameArgUnderPureFuns} import dotty.tools.tasty.{TastyBuffer, TastyReader} import TastyBuffer._ @@ -87,8 +87,8 @@ class TreeUnpickler(reader: TastyReader, /** The root owner tree. See `OwnerTree` class definition. Set by `enterTopLevel`. */ private var ownerTree: OwnerTree = _ - /** Was unpickled class compiled with -Ycc? */ - private var wasCaptureChecked: Boolean = false + /** Was unpickled class compiled with pureFunctions? */ + private var knowsPureFuns: Boolean = false private def registerSym(addr: Addr, sym: Symbol) = symAtAddr(addr) = sym @@ -455,7 +455,8 @@ class TreeUnpickler(reader: TastyReader, val ref = readAddr() typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType()) case BYNAMEtype => - ExprType(readType()) + val arg = readType() + ExprType(if knowsPureFuns then arg else arg.adaptByNameArgUnderPureFuns) case _ => ConstantType(readConstant(tag)) } @@ -489,11 +490,11 @@ class TreeUnpickler(reader: TastyReader, def readTermRef()(using Context): TermRef = readType().asInstanceOf[TermRef] - /** Under -Ycc, map all function types to impure function types, - * unless the unpickled class was also compiled with -Ycc. + /** Under pureFunctions, map all function types to impure function types, + * unless the unpickled class was also compiled with pureFunctions. */ private def postProcessFunction(tp: Type)(using Context): Type = - if wasCaptureChecked then tp else tp.adaptFunctionTypeUnderCC + if knowsPureFuns then tp else tp.adaptFunctionTypeUnderPureFuns // ------ Reading definitions ----------------------------------------------------- @@ -624,7 +625,7 @@ class TreeUnpickler(reader: TastyReader, else newSymbol(ctx.owner, name, flags, completer, privateWithin, coord) } - val annots = annotFns.map(_(sym.owner)) + val annots = annotFns.map(_(sym.owner)) sym.annotations = annots if sym.isOpaqueAlias then sym.setFlag(Deferred) val isScala2MacroDefinedInScala3 = flags.is(Macro, butNot = Inline) && flags.is(Erased) @@ -642,8 +643,8 @@ class TreeUnpickler(reader: TastyReader, } registerSym(start, sym) if (isClass) { - if sym.owner.is(Package) && annots.exists(_.symbol == defn.CaptureCheckedAnnot) then - wasCaptureChecked = true + if sym.owner.is(Package) && annots.exists(_.hasSymbol(defn.WithPureFunsAnnot)) then + knowsPureFuns = true sym.completer.withDecls(newScope) forkAt(templateStart).indexTemplateParams()(using localContext(sym)) } @@ -737,7 +738,15 @@ class TreeUnpickler(reader: TastyReader, val tp = readType() val lazyAnnotTree = readLaterWithOwner(end, _.readTerm()) owner => - Annotation.deferredSymAndTree(tp.typeSymbol)(lazyAnnotTree(owner).complete) + new DeferredSymAndTree(tp.typeSymbol, lazyAnnotTree(owner).complete): + // Only force computation of symbol if it has the right name. This added + // amount of laziness is sometimes necessary to avid cycles. Test case pos/i15980. + override def hasSymbol(sym: Symbol)(using Context) = tp match + case tp: TypeRef => + tp.designator match + case name: Name => name == sym.name && tp.symbol == sym + case _ => tp.symbol == sym + case _ => this.symbol == sym /** Create symbols for the definitions in the statement sequence between * current address and `end`. @@ -1170,7 +1179,8 @@ class TreeUnpickler(reader: TastyReader, case SINGLETONtpt => SingletonTypeTree(readTerm()) case BYNAMEtpt => - ByNameTypeTree(readTpt()) + val arg = readTpt() + ByNameTypeTree(if knowsPureFuns then arg else arg.adaptByNameArgUnderPureFuns) case NAMEDARG => NamedArg(readName(), readTerm()) case _ => diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 333cd9fa9ec3..561b1eac2391 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -20,6 +20,7 @@ import printing.Texts._ import printing.Printer import io.AbstractFile import util.common._ +import util.NoSourcePosition import typer.Checking.checkNonCyclic import typer.Nullables._ import transform.SymUtils._ @@ -32,7 +33,7 @@ import scala.collection.mutable import scala.collection.mutable.ListBuffer import scala.annotation.switch import reporting._ -import cc.adaptFunctionTypeUnderCC +import cc.{adaptFunctionTypeUnderPureFuns, adaptByNameArgUnderPureFuns} object Scala2Unpickler { @@ -744,7 +745,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val anyTypes = boundSyms map (_ => defn.AnyType) val boundBounds = boundSyms map (_.info.bounds.hi) val tp2 = tp1.subst(boundSyms, boundBounds).subst(boundSyms, anyTypes) - report.warning(FailureToEliminateExistential(tp, tp1, tp2, boundSyms, classRoot.symbol)) + report.warning(FailureToEliminateExistential(tp, tp1, tp2, boundSyms, classRoot.symbol), NoSourcePosition) tp2 } else tp1 @@ -816,7 +817,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } val tycon = select(pre, sym) val args = until(end, () => readTypeRef()) - if (sym == defn.ByNameParamClass2x) ExprType(args.head) + if (sym == defn.ByNameParamClass2x) ExprType(args.head.adaptByNameArgUnderPureFuns) else if (ctx.settings.scalajs.value && args.length == 2 && sym.owner == JSDefinitions.jsdefn.ScalaJSJSPackageClass && sym == JSDefinitions.jsdefn.PseudoUnionClass) { // Treat Scala.js pseudo-unions as real unions, this requires a @@ -825,7 +826,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } else if args.nonEmpty then tycon.safeAppliedTo(EtaExpandIfHK(sym.typeParams, args.map(translateTempPoly))) - .adaptFunctionTypeUnderCC + .adaptFunctionTypeUnderPureFuns else if (sym.typeParams.nonEmpty) tycon.EtaExpand(sym.typeParams) else tycon case TYPEBOUNDStpe => diff --git a/compiler/src/dotty/tools/dotc/coverage/Coverage.scala b/compiler/src/dotty/tools/dotc/coverage/Coverage.scala index 8ae249c1f5a3..e41bfcd5d09a 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Coverage.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Coverage.scala @@ -13,7 +13,6 @@ class Coverage: /** A statement that can be invoked, and thus counted as "covered" by code coverage tools. */ case class Statement( - source: String, location: Location, id: Int, start: Int, diff --git a/compiler/src/dotty/tools/dotc/coverage/Location.scala b/compiler/src/dotty/tools/dotc/coverage/Location.scala index faf1e97d0c01..c565c2bb1116 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Location.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Location.scala @@ -5,6 +5,7 @@ import ast.tpd._ import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags.* import java.nio.file.Path +import dotty.tools.dotc.util.SourceFile /** Information about the location of a coverable piece of code. * @@ -12,7 +13,7 @@ import java.nio.file.Path * @param className name of the closest enclosing class * @param fullClassName fully qualified name of the closest enclosing class * @param classType "type" of the closest enclosing class: Class, Trait or Object - * @param method name of the closest enclosing method + * @param method name of the closest enclosing method * @param sourcePath absolute path of the source file */ final case class Location( @@ -20,17 +21,19 @@ final case class Location( className: String, fullClassName: String, classType: String, - method: String, + methodName: String, sourcePath: Path ) object Location: /** Extracts the location info of a Tree. */ - def apply(tree: Tree)(using ctx: Context): Location = + def apply(tree: Tree, source: SourceFile)(using ctx: Context): Location = - val enclosingClass = ctx.owner.denot.enclosingClass - val packageName = ctx.owner.denot.enclosingPackageClass.name.toSimpleName.toString + val ownerDenot = ctx.owner.denot + val enclosingClass = ownerDenot.enclosingClass + val packageName = ownerDenot.enclosingPackageClass.fullName.toSimpleName.toString val className = enclosingClass.name.toSimpleName.toString + val methodName = ownerDenot.enclosingMethod.name.toSimpleName.toString val classType: String = if enclosingClass.is(Trait) then "Trait" @@ -42,6 +45,6 @@ object Location: className, s"$packageName.$className", classType, - ctx.owner.denot.enclosingMethod.name.toSimpleName.toString(), - ctx.source.file.absolute.jpath + methodName, + source.file.absolute.jpath ) diff --git a/compiler/src/dotty/tools/dotc/coverage/Serializer.scala b/compiler/src/dotty/tools/dotc/coverage/Serializer.scala index 23ab73f6d42e..26efa8934e00 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Serializer.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Serializer.scala @@ -4,6 +4,7 @@ package coverage import java.nio.file.{Path, Paths, Files} import java.io.Writer import scala.language.unsafeNulls +import scala.collection.mutable.StringBuilder /** * Serializes scoverage data. @@ -62,21 +63,21 @@ object Serializer: def writeStatement(stmt: Statement, writer: Writer): Unit = // Note: we write 0 for the count because we have not measured the actual coverage at this point writer.write(s"""${stmt.id} - |${getRelativePath(stmt.location.sourcePath)} - |${stmt.location.packageName} - |${stmt.location.className} + |${getRelativePath(stmt.location.sourcePath).escaped} + |${stmt.location.packageName.escaped} + |${stmt.location.className.escaped} |${stmt.location.classType} - |${stmt.location.fullClassName} - |${stmt.location.method} + |${stmt.location.fullClassName.escaped} + |${stmt.location.methodName.escaped} |${stmt.start} |${stmt.end} |${stmt.line} - |${stmt.symbolName} + |${stmt.symbolName.escaped} |${stmt.treeName} |${stmt.branch} |0 |${stmt.ignored} - |${stmt.desc} + |${stmt.desc.escaped} |\f |""".stripMargin) @@ -84,3 +85,27 @@ object Serializer: coverage.statements.toSeq .sortBy(_.id) .foreach(stmt => writeStatement(stmt, writer)) + + /** Makes a String suitable for output in the coverage statement data as a single line. + * Escaped characters: '\\' (backslash), '\n', '\r', '\f' + */ + extension (str: String) def escaped: String = + val builder = StringBuilder(str.length) + var i = 0 + while + i < str.length + do + str.charAt(i) match + case '\\' => + builder ++= "\\\\" + case '\n' => + builder ++= "\\n" + case '\r' => + builder ++= "\\r" + case '\f' => + builder ++= "\\f" + case c => + builder += c + i += 1 + end while + builder.result() diff --git a/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala b/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala index debf51872d5a..460d0a61c252 100644 --- a/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala +++ b/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala @@ -269,12 +269,21 @@ class InlineReducer(inliner: Inliner)(using Context): } } - // Extractors contain Bind nodes in type parameter lists, the tree looks like this: + // Extractors can contain Bind nodes in type parameter lists, + // for that case tree looks like this: // UnApply[t @ t](pats)(implicits): T[t] // Test case is pos/inline-caseclass.scala. + // Alternatively, for explicitly specified type binds in type annotations like in + // case A(B): A[t] + // the tree will look like this: + // Unapply[t](pats)(implicits) : T[t @ t] + // and the binds will be found in the type tree instead + // Test case is pos-macros/i15971 + val tptBinds = getBinds(Set.empty[TypeSymbol], tpt) val binds: Set[TypeSymbol] = pat match { - case UnApply(TypeApply(_, tpts), _, _) => getBinds(Set.empty[TypeSymbol], tpts) - case _ => getBinds(Set.empty[TypeSymbol], tpt) + case UnApply(TypeApply(_, tpts), _, _) => + getBinds(Set.empty[TypeSymbol], tpts) ++ tptBinds + case _ => tptBinds } val extractBindVariance = new TypeAccumulator[TypeBindsMap] { diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index d1a88406fe45..1806cdfc909b 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -85,7 +85,10 @@ object Inlines: if (tree.symbol == defn.CompiletimeTesting_typeChecks) return Intrinsics.typeChecks(tree) if (tree.symbol == defn.CompiletimeTesting_typeCheckErrors) return Intrinsics.typeCheckErrors(tree) - CrossVersionChecks.checkExperimentalRef(tree.symbol, tree.srcPos) + if ctx.isAfterTyper then + // During typer we wait with cross version checks until PostTyper, in order + // not to provoke cyclic references. See i16116 for a test case. + CrossVersionChecks.checkExperimentalRef(tree.symbol, tree.srcPos) if tree.symbol.isConstructor then return tree // error already reported for the inline constructor definition @@ -155,7 +158,7 @@ object Inlines: tree, i"""|Maximal number of $reason (${setting.value}) exceeded, |Maybe this is caused by a recursive inline method? - |You can use ${setting.name} to change the limit.""", + |You can use ${setting.name} to change the limit.""".toMessage, (tree :: enclosingInlineds).last.srcPos ) if ctx.base.stopInlining && enclosingInlineds.isEmpty then diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index 4611554a01a3..183845fcf3ec 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -822,7 +822,7 @@ object JavaParsers { val iface = atSpan(start, nameOffset) { TypeDef( name, - makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.Trait | Flags.JavaInterface | Flags.Abstract) + makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.JavaInterface) } addCompanionObject(statics, iface) } @@ -858,10 +858,9 @@ object JavaParsers { } (statics.toList, members.toList) } - def annotationParents: List[Select] = List( - scalaAnnotationDot(tpnme.Annotation), - Select(javaLangDot(nme.annotation), tpnme.Annotation), - scalaAnnotationDot(tpnme.ClassfileAnnotation) + def annotationParents: List[Tree] = List( + javaLangObject(), + Select(javaLangDot(nme.annotation), tpnme.Annotation) ) def annotationDecl(start: Offset, mods: Modifiers): List[Tree] = { accept(AT) @@ -877,7 +876,7 @@ object JavaParsers { List(constructorParams), TypeTree(), EmptyTree).withMods(Modifiers(Flags.JavaDefined)) val templ = makeTemplate(annotationParents, constr :: body, List(), true) val annot = atSpan(start, nameOffset) { - TypeDef(name, templ).withMods(mods | Flags.Abstract) + TypeDef(name, templ).withMods(mods | Flags.JavaInterface | Flags.JavaAnnotation) } addCompanionObject(statics, annot) } diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 309dd8a20aba..a198cccc85cc 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -15,7 +15,7 @@ import core._ import Flags._ import Contexts._ import Names._ -import NameKinds.WildcardParamName +import NameKinds.{WildcardParamName, QualifiedName} import NameOps._ import ast.{Positioned, Trees} import ast.Trees._ @@ -30,7 +30,7 @@ import scala.annotation.tailrec import rewrites.Rewrites.{patch, overlapsPatch} import reporting._ import config.Feature -import config.Feature.{sourceVersion, migrateTo3} +import config.Feature.{sourceVersion, migrateTo3, globalOnlyImports} import config.SourceVersion._ import config.SourceVersion @@ -142,7 +142,12 @@ object Parsers { val length = if offset == in.offset && in.name != null then in.name.show.length else 0 syntaxError(msg, Span(offset, offset + length)) lastErrorOffset = in.offset - end if + + def syntaxError(msg: => String, offset: Int): Unit = + syntaxError(msg.toMessage, offset) + + def syntaxError(msg: => String): Unit = + syntaxError(msg, in.offset) /** Unconditionally issue an error at given span, without * updating lastErrorOffset. @@ -150,8 +155,11 @@ object Parsers { def syntaxError(msg: Message, span: Span): Unit = report.error(msg, source.atSpan(span)) + def syntaxError(msg: => String, span: Span): Unit = + syntaxError(msg.toMessage, span) + def unimplementedExpr(using Context): Select = - Select(Select(rootDot(nme.scala), nme.Predef), nme.???) + Select(scalaDot(nme.Predef), nme.???) } trait OutlineParserCommon extends ParserCommon { @@ -188,7 +196,7 @@ object Parsers { def isIdent = in.isIdent def isIdent(name: Name) = in.isIdent(name) - def isPureArrow(name: Name): Boolean = ctx.settings.Ycc.value && isIdent(name) + def isPureArrow(name: Name): Boolean = isIdent(name) && Feature.pureFunsEnabled def isPureArrow: Boolean = isPureArrow(nme.PUREARROW) || isPureArrow(nme.PURECTXARROW) def isErased = isIdent(nme.erased) && in.erasedEnabled def isSimpleLiteral = @@ -259,9 +267,6 @@ object Parsers { in.skip() lastErrorOffset = in.offset - def warning(msg: Message, sourcePos: SourcePosition): Unit = - report.warning(msg, sourcePos) - def warning(msg: Message, offset: Int = in.offset): Unit = report.warning(msg, source.atSpan(Span(offset))) @@ -283,6 +288,9 @@ object Parsers { syntaxError(msg, offset) skip() + def syntaxErrorOrIncomplete(msg: => String): Unit = + syntaxErrorOrIncomplete(msg.toMessage, in.offset) + def syntaxErrorOrIncomplete(msg: Message, span: Span): Unit = if in.token == EOF then incompleteInputError(msg) @@ -350,7 +358,7 @@ object Parsers { val statFollows = mustStartStatTokens.contains(found) syntaxError( if noPrevStat then IllegalStartOfStatement(what, isModifier, statFollows) - else i"end of $what expected but ${showToken(found)} found") + else i"end of $what expected but ${showToken(found)} found".toMessage) if mustStartStatTokens.contains(found) then false // it's a statement that might be legal in an outer context else @@ -610,11 +618,11 @@ object Parsers { if in.isNewLine && !(nextIndentWidth < startIndentWidth) then warning( if startIndentWidth <= nextIndentWidth then - i"""Line is indented too far to the right, or a `{` is missing before: + i"""Line is indented too far to the right, or a `{` is missing before: | - |${t.tryToShow}""" + |${t.tryToShow}""".toMessage else - in.spaceTabMismatchMsg(startIndentWidth, nextIndentWidth), + in.spaceTabMismatchMsg(startIndentWidth, nextIndentWidth).toMessage, in.next.offset ) t @@ -627,7 +635,7 @@ object Parsers { if in.isNewLine then val nextIndentWidth = in.indentWidth(in.next.offset) if in.currentRegion.indentWidth < nextIndentWidth then - warning(i"Line is indented too far to the right, or a `{` or `:` is missing", in.next.offset) + warning(i"Line is indented too far to the right, or a `{` or `:` is missing".toMessage, in.next.offset) /* -------- REWRITES ----------------------------------------------------------- */ @@ -770,7 +778,7 @@ object Parsers { } }) canRewrite &= (in.isAfterLineEnd || statCtdTokens.contains(in.token)) // test (5) - if (canRewrite && (!underColonSyntax || in.fewerBracesEnabled)) { + if canRewrite && (!underColonSyntax || Feature.fewerBracesEnabled) then val openingPatchStr = if !colonRequired then "" else if testChar(startOpening - 1, Chars.isOperatorPart(_)) then " :" @@ -778,7 +786,6 @@ object Parsers { val (startClosing, endClosing) = closingElimRegion() patch(source, Span(startOpening, endOpening), openingPatchStr) patch(source, Span(startClosing, endClosing), "") - } t } @@ -960,11 +967,11 @@ object Parsers { isArrowIndent() else false - /** Under -Ycc: is the following token sequuence a capture set `{ref1, ..., refN}` - * followed by a token that can start a type? + /** Under captureChecking language import: is the following token sequence a + * capture set `{ref1, ..., refN}` followed by a token that can start a type? */ def followingIsCaptureSet(): Boolean = - ctx.settings.Ycc.value && { + Feature.ccEnabled && { val lookahead = in.LookaheadScanner() def followingIsTypeStart() = lookahead.nextToken() @@ -1017,7 +1024,7 @@ object Parsers { * body */ def isColonLambda = - in.fewerBracesEnabled && in.token == COLONfollow && followingIsLambdaAfterColon() + Feature.fewerBracesEnabled && in.token == COLONfollow && followingIsLambdaAfterColon() /** operand { infixop operand | MatchClause } [postfixop], * @@ -1436,9 +1443,12 @@ object Parsers { /** CaptureRef ::= ident | `this` */ def captureRef(): Tree = - if in.token == THIS then simpleRef() else termIdent() + if in.token == THIS then simpleRef() + else termIdent() match + case Ident(nme.CAPTURE_ROOT) => captureRoot + case id => id - /** CaptureSet ::= `{` CaptureRef {`,` CaptureRef} `}` -- under -Ycc + /** CaptureSet ::= `{` CaptureRef {`,` CaptureRef} `}` -- under captureChecking */ def captureSet(): List[Tree] = inBraces { if in.token == RBRACE then Nil else commaSeparated(captureRef) @@ -1449,12 +1459,12 @@ object Parsers { * | FunParamClause ‘=>>’ Type * | MatchType * | InfixType - * | CaptureSet Type -- under -Ycc + * | CaptureSet Type -- under captureChecking * FunType ::= (MonoFunType | PolyFunType) * MonoFunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type - * | (‘->’ | ‘?->’ ) Type -- under -Ycc + * | (‘->’ | ‘?->’ ) Type -- under pureFunctions * PolyFunType ::= HKTypeParamClause '=>' Type - * | HKTypeParamClause ‘->’ Type -- under -Ycc + * | HKTypeParamClause ‘->’ Type -- under pureFunctions * FunTypeArgs ::= InfixType * | `(' [ [ ‘[using]’ ‘['erased'] FunArgType {`,' FunArgType } ] `)' * | '(' [ ‘[using]’ ‘['erased'] TypedFunParam {',' TypedFunParam } ')' @@ -1474,8 +1484,9 @@ object Parsers { if !imods.flags.isEmpty || params.isEmpty then syntaxError(em"illegal parameter list for type lambda", start) token = ARROW - else if ctx.settings.Ycc.value then - // `=>` means impure function under -Ycc whereas `->` is a regular function. + else if Feature.pureFunsEnabled then + // `=>` means impure function under pureFunctions or captureChecking + // language imports, whereas `->` is then a regular function. imods |= Impure if token == CTXARROW then @@ -1732,7 +1743,7 @@ object Parsers { Ident(tpnme.USCOREkw).withSpan(Span(start, in.lastOffset, start)) else if sourceVersion.isAtLeast(future) then - deprecationWarning(em"`_` is deprecated for wildcard arguments of types: use `?` instead") + deprecationWarning(em"`_` is deprecated for wildcard arguments of types: use `?` instead".toMessage) patch(source, Span(in.offset, in.offset + 1), "?") val start = in.skipToken() typeBounds().withSpan(Span(start, in.lastOffset, start)) @@ -1879,7 +1890,7 @@ object Parsers { if in.token == ARROW || isPureArrow(nme.PUREARROW) then val isImpure = in.token == ARROW val tp = atSpan(in.skipToken()) { ByNameTypeTree(core()) } - if isImpure && ctx.settings.Ycc.value then ImpureByNameTypeTree(tp) else tp + if isImpure && Feature.pureFunsEnabled then ImpureByNameTypeTree(tp) else tp else if in.token == LBRACE && followingIsCaptureSet() then val start = in.offset val cs = captureSet() @@ -2171,10 +2182,11 @@ object Parsers { else Literal(Constant(())) // finally without an expression } else { - if (handler.isEmpty) warning( - EmptyCatchAndFinallyBlock(body), - source.atSpan(Span(tryOffset, endOffset(body))) - ) + if handler.isEmpty then + report.warning( + EmptyCatchAndFinallyBlock(body), + source.atSpan(Span(tryOffset, endOffset(body))) + ) EmptyTree } ParsedTry(body, handler, finalizer) @@ -2357,7 +2369,7 @@ object Parsers { /** PostfixExpr ::= InfixExpr [id [nl]] * InfixExpr ::= PrefixExpr * | InfixExpr id [nl] InfixExpr - * | InfixExpr id `:` IndentedExpr + * | InfixExpr id ColonArgument * | InfixExpr MatchClause */ def postfixExpr(location: Location = Location.ElseWhere): Tree = @@ -2401,10 +2413,11 @@ object Parsers { * | SimpleExpr `.` MatchClause * | SimpleExpr (TypeArgs | NamedTypeArgs) * | SimpleExpr1 ArgumentExprs - * | SimpleExpr1 `:` ColonArgument -- under language.experimental.fewerBraces - * ColonArgument ::= indent (CaseClauses | Block) outdent - * | FunParams (‘=>’ | ‘?=>’) ColonArgBody - * | HkTypeParamClause ‘=>’ ColonArgBody + * | SimpleExpr1 ColonArgument + * ColonArgument ::= colon [LambdaStart] + * indent (CaseClauses | Block) outdent + * LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + * | HkTypeParamClause ‘=>’ * ColonArgBody ::= indent (CaseClauses | Block) outdent * Quoted ::= ‘'’ ‘{’ Block ‘}’ * | ‘'’ ‘[’ Type ‘]’ @@ -2768,7 +2781,7 @@ object Parsers { warning(i"""Misleading indentation: this expression forms part of the preceding catch case. |If this is intended, it should be indented for clarity. |Otherwise, if the handler is intended to be empty, use a multi-line catch with - |an indented case.""") + |an indented case.""".toMessage) expr() else block() }) @@ -2809,11 +2822,14 @@ object Parsers { if (isIdent(nme.raw.BAR)) { in.nextToken(); pattern1(location) :: patternAlts(location) } else Nil - /** Pattern1 ::= Pattern2 [Ascription] + /** Pattern1 ::= PatVar Ascription + * | [‘-’] integerLiteral Ascription + * | [‘-’] floatingPointLiteral Ascription + * | Pattern2 */ def pattern1(location: Location = Location.InPattern): Tree = val p = pattern2() - if in.isColon then + if (isVarPattern(p) || p.isInstanceOf[Number]) && in.isColon then in.nextToken() ascription(p, location) else p @@ -2989,7 +3005,8 @@ object Parsers { inBrackets { if in.token == THIS then if sourceVersion.isAtLeast(future) then - deprecationWarning("The [this] qualifier will be deprecated in the future; it should be dropped.") + deprecationWarning( + "The [this] qualifier will be deprecated in the future; it should be dropped.".toMessage) in.nextToken() mods | Local else mods.withPrivateWithin(ident().toTypeName) @@ -3294,25 +3311,25 @@ object Parsers { languageImport(tree) match case Some(prefix) => in.languageImportContext = in.languageImportContext.importContext(imp, NoSymbol) - for - case ImportSelector(id @ Ident(imported), EmptyTree, _) <- selectors - if allSourceVersionNames.contains(imported) - do - if !outermost then - syntaxError(i"source version import is only allowed at the toplevel", id.span) - else if ctx.compilationUnit.sourceVersion.isDefined then - syntaxError(i"duplicate source version import", id.span) - else if illegalSourceVersionNames.contains(imported) then - val candidate = - val nonMigration = imported.toString.replace("-migration", "") - validSourceVersionNames.find(_.show == nonMigration) - val baseMsg = i"`$imported` is not a valid source version" - val msg = candidate match - case Some(member) => i"$baseMsg, did you mean language.`$member`?" - case _ => baseMsg - syntaxError(msg, id.span) - else - ctx.compilationUnit.sourceVersion = Some(SourceVersion.valueOf(imported.toString)) + for case ImportSelector(id @ Ident(imported), EmptyTree, _) <- selectors do + if Feature.handleGlobalLanguageImport(prefix, imported) && !outermost then + syntaxError(i"this language import is only allowed at the toplevel", id.span) + if allSourceVersionNames.contains(imported) && prefix.isEmpty then + if !outermost then + syntaxError(i"source version import is only allowed at the toplevel", id.span) + else if ctx.compilationUnit.sourceVersion.isDefined then + syntaxError(i"duplicate source version import", id.span) + else if illegalSourceVersionNames.contains(imported) then + val candidate = + val nonMigration = imported.toString.replace("-migration", "") + validSourceVersionNames.find(_.show == nonMigration) + val baseMsg = i"`$imported` is not a valid source version" + val msg = candidate match + case Some(member) => i"$baseMsg, did you mean language.`$member`?" + case _ => baseMsg + syntaxError(msg, id.span) + else + ctx.compilationUnit.sourceVersion = Some(SourceVersion.valueOf(imported.toString)) case None => imp @@ -3471,7 +3488,8 @@ object Parsers { if sourceVersion.isAtLeast(future) then deprecationWarning( em"""`= _` has been deprecated; use `= uninitialized` instead. - |`uninitialized` can be imported with `scala.compiletime.uninitialized`.""", rhsOffset) + |`uninitialized` can be imported with `scala.compiletime.uninitialized`.""".toMessage, + rhsOffset) placeholderParams = placeholderParams.tail atSpan(rhs0.span) { Ident(nme.WILDCARD) } case rhs0 => rhs0 @@ -3792,7 +3810,7 @@ object Parsers { if !(name.isEmpty && noParams) then acceptColon() val parents = if isSimpleLiteral then rejectWildcardType(annotType()) :: Nil - else constrApp() :: withConstrApps() + else refinedTypeRest(constrApp()) :: withConstrApps() val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then accept(EQUALS) diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index 082112d800d9..0540ef27a4d3 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -17,7 +17,7 @@ import scala.collection.mutable import scala.collection.immutable.SortedMap import rewrites.Rewrites.patch import config.Feature -import config.Feature.migrateTo3 +import config.Feature.{migrateTo3, fewerBracesEnabled} import config.SourceVersion.`3.0` import reporting.{NoProfile, Profile} @@ -112,7 +112,7 @@ object Scanners { /** signal an error where the input ended in the middle of a token */ def incompleteInputError(msg: String): Unit = { - report.incompleteInputError(msg, sourcePos()) + report.incompleteInputError(msg.toMessage, sourcePos()) token = EOF errOffset = offset } @@ -202,25 +202,6 @@ object Scanners { def featureEnabled(name: TermName) = Feature.enabled(name)(using languageImportContext) def erasedEnabled = featureEnabled(Feature.erasedDefinitions) - private inline val fewerBracesByDefault = false - // turn on to study impact on codebase if `fewerBraces` was the default - - private var fewerBracesEnabledCache = false - private var fewerBracesEnabledCtx: Context = NoContext - - def fewerBracesEnabled = - if fewerBracesEnabledCtx ne myLanguageImportContext then - fewerBracesEnabledCache = - featureEnabled(Feature.fewerBraces) - || fewerBracesByDefault && indentSyntax && !migrateTo3 - // ensure that fewer braces is not the default for 3.0-migration since - // { x: T => - // expr - // } - // would be ambiguous - fewerBracesEnabledCtx = myLanguageImportContext - fewerBracesEnabledCache - private var postfixOpsEnabledCache = false private var postfixOpsEnabledCtx: Context = NoContext diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index 13bcfcb511df..7d27b3ca82b9 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -247,7 +247,7 @@ object Tokens extends TokensCommon { final val modifierTokensOrCase: TokenSet = modifierTokens | BitSet(CASE) - final val modifierFollowers = modifierTokensOrCase | defIntroTokens + final val modifierFollowers = modifierTokens | defIntroTokens /** Is token only legal as start of statement (eof also included)? */ final val mustStartStatTokens: TokenSet = defIntroTokens | modifierTokens | BitSet(IMPORT, EXPORT, PACKAGE) diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala index 591042961dbb..3d9f5fb7ad6d 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala @@ -13,6 +13,7 @@ import Parsers._ import util.Spans._ import core._ import Constants._ +import Decorators.toMessage import util.SourceFile import Utility._ @@ -379,7 +380,7 @@ object MarkupParsers { ts(0) } }, - msg => parser.incompleteInputError(msg) + msg => parser.incompleteInputError(msg.toMessage) ) /** @see xmlPattern. resynchronizes after successful parse diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index 348390d9c7e2..f85845517d8c 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -58,9 +58,9 @@ object Formatting { def show(x: Seq[X]) = new CtxShow: def run(using Context) = x.map(show1) - given [A: Show, B: Show]: Show[(A, B)] with - def show(x: (A, B)) = new CtxShow: - def run(using Context) = (show1(x._1), show1(x._2)) + given [H: Show, T <: Tuple: Show]: Show[H *: T] with + def show(x: H *: T) = new CtxShow: + def run(using Context) = show1(x.head) *: Show[T].show(x.tail).ctxShow.asInstanceOf[Tuple] given [X: Show]: Show[X | Null] with def show(x: X | Null) = if x == null then "null" else Show[X].show(x.nn) diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index d62b7afef707..f0479f818c9f 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -14,7 +14,7 @@ import Variances.varianceSign import util.SourcePosition import scala.util.control.NonFatal import scala.annotation.switch -import config.Config +import config.{Config, Feature} import cc.{CapturingType, EventuallyCapturingType, CaptureSet, isBoxed} class PlainPrinter(_ctx: Context) extends Printer { @@ -111,8 +111,14 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def refinementNameString(tp: RefinedType): String = nameString(tp.refinedName) /** String representation of a refinement */ - protected def toTextRefinement(rt: RefinedType): Closed = - (refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close + protected def toTextRefinement(rt: RefinedType): Text = + val keyword = rt.refinedInfo match { + case _: ExprType | _: MethodOrPoly => "def " + case _: TypeBounds => "type " + case _: TypeProxy => "val " + case _ => "" + } + (keyword ~ refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close protected def argText(arg: Type): Text = homogenizeArg(arg) match { case arg: TypeBounds => "?" ~ toText(arg) @@ -242,7 +248,7 @@ class PlainPrinter(_ctx: Context) extends Printer { else toText(CapturingType(ExprType(parent), refs)) case ExprType(restp) => changePrec(GlobalPrec) { - (if ctx.settings.Ycc.value then "-> " else "=> ") ~ toText(restp) + (if Feature.pureFunsEnabled then "-> " else "=> ") ~ toText(restp) } case tp: HKTypeLambda => changePrec(GlobalPrec) { @@ -258,8 +264,9 @@ class PlainPrinter(_ctx: Context) extends Printer { if annot.symbol == defn.InlineParamAnnot || annot.symbol == defn.ErasedParamAnnot then toText(tpe) else toTextLocal(tpe) ~ " " ~ toText(annot) case tp: TypeVar => + def toTextCaret(tp: Type) = if printDebug then toTextLocal(tp) ~ Str("^") else toText(tp) if (tp.isInstantiated) - toTextLocal(tp.instanceOpt) ~ (Str("^") provided printDebug) + toTextCaret(tp.instanceOpt) else { val constr = ctx.typerState.constraint val bounds = @@ -267,7 +274,7 @@ class PlainPrinter(_ctx: Context) extends Printer { withMode(Mode.Printing)(TypeComparer.fullBounds(tp.origin)) else TypeBounds.empty - if (bounds.isTypeAlias) toText(bounds.lo) ~ (Str("^") provided printDebug) + if (bounds.isTypeAlias) toTextCaret(bounds.lo) else if (ctx.settings.YshowVarBounds.value) "(" ~ toText(tp.origin) ~ "?" ~ toText(bounds) ~ ")" else toText(tp.origin) } @@ -278,6 +285,8 @@ class PlainPrinter(_ctx: Context) extends Printer { case ex: Throwable => Str("...") } "LazyRef(" ~ refTxt ~ ")" + case Range(lo, hi) => + toText(lo) ~ ".." ~ toText(hi) case _ => tp.fallbackToText(this) } @@ -376,6 +385,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def toTextCaptureRef(tp: Type): Text = homogenize(tp) match + case tp: TermRef if tp.symbol == defn.captureRoot => Str("*") case tp: SingletonType => toTextRef(tp) case _ => toText(tp) @@ -606,7 +616,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def toText(sc: Scope): Text = ("Scope{" ~ dclsText(sc.toList) ~ "}").close - def toText[T >: Untyped](tree: Tree[T]): Text = { + def toText[T <: Untyped](tree: Tree[T]): Text = { def toTextElem(elem: Any): Text = elem match { case elem: Showable => elem.toText(this) case elem: List[?] => "List(" ~ Text(elem map toTextElem, ",") ~ ")" @@ -688,11 +698,18 @@ class PlainPrinter(_ctx: Context) extends Printer { Text(ups.map(toText), ", ") Text(deps, "\n") } + val depsText = if Config.showConstraintDeps then c.depsToString else "" //Printer.debugPrintUnique = false - Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText)) + Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText, depsText)) finally ctx.typerState.constraint = savedConstraint + def toText(g: GadtConstraint): Text = + val deps = for sym <- g.symbols yield + val bound = g.fullBounds(sym).nn + (typeText(toText(sym.typeRef)) ~ toText(bound)).close + ("GadtConstraint(" ~ Text(deps, ", ") ~ ")").close + def plain: PlainPrinter = this protected def keywordStr(text: String): String = coloredStr(text, SyntaxHighlighting.KeywordColor) diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index b883b6be805b..326630844dde 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -31,7 +31,7 @@ abstract class Printer { * ### `atPrec` vs `changePrec` * * This is to be used when changing precedence inside some sort of parentheses: - * for instance, to print T[A]` use + * for instance, to print `T[A]` use * `toText(T) ~ '[' ~ atPrec(GlobalPrec) { toText(A) } ~ ']'`. * * If the presence of the parentheses depends on precedence, inserting them manually is most certainly a bug. @@ -60,8 +60,7 @@ abstract class Printer { * A op B op' C parses as (A op B) op' C if op and op' are left-associative, and as * A op (B op' C) if they're right-associative, so we need respectively * ```scala - * val isType = ??? // is this a term or type operator? - * val prec = parsing.precedence(op, isType) + * val prec = parsing.precedence(op) * // either: * changePrec(prec) { toText(a) ~ op ~ atPrec(prec + 1) { toText(b) } } // for left-associative op and op' * // or: @@ -149,7 +148,7 @@ abstract class Printer { def toText(sc: Scope): Text /** Textual representation of tree */ - def toText[T >: Untyped](tree: Tree[T]): Text + def toText[T <: Untyped](tree: Tree[T]): Text /** Textual representation of source position */ def toText(pos: SourcePosition): Text @@ -163,6 +162,9 @@ abstract class Printer { /** Textual representation of a constraint */ def toText(c: OrderingConstraint): Text + /** Textual representation of a GADT constraint */ + def toText(c: GadtConstraint): Text + /** Render element within highest precedence */ def toTextLocal(elem: Showable): Text = atPrec(DotPrec) { elem.toText(this) } diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 619bfafeb775..62e1cd5baec8 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -24,7 +24,7 @@ import NameKinds.{WildcardParamName, DefaultGetterName} import util.Chars.isOperatorPart import transform.TypeUtils._ import transform.SymUtils._ -import config.Config +import config.{Config, Feature} import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.ast.untpd.{MemberDef, Modifiers, PackageDef, RefTree, Template, TypeDef, ValOrDefDef} @@ -40,7 +40,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { override def printerContext: Context = myCtx - def withEnclosingDef(enclDef: Tree[? >: Untyped])(op: => Text): Text = { + def withEnclosingDef(enclDef: Tree[?])(op: => Text): Text = { val savedCtx = myCtx if (enclDef.hasType && enclDef.symbol.exists) myCtx = ctx.withOwner(enclDef.symbol) @@ -58,6 +58,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { try op finally myCtx = savedCtx } + inline def inContextBracket(inline op: Text): Text = + val savedCtx = myCtx + try op finally myCtx = savedCtx + def withoutPos(op: => Text): Text = { val savedPrintPos = printPos printPos = false @@ -221,7 +225,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if tycon.isRepeatedParam then toTextLocal(args.head) ~ "*" else if defn.isFunctionSymbol(tsym) then toTextFunction(args, tsym.name.isContextFunction, tsym.name.isErasedFunction, - isPure = ctx.settings.Ycc.value && !tsym.name.isImpureFunction) + isPure = Feature.pureFunsEnabled && !tsym.name.isImpureFunction) else if isInfixType(tp) then val l :: r :: Nil = args: @unchecked val opName = tyconName(tycon) @@ -248,7 +252,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { toText(tycon) case tp: RefinedType if defn.isFunctionOrPolyType(tp) && !printDebug => toTextMethodAsFunction(tp.refinedInfo, - isPure = ctx.settings.Ycc.value && !tp.typeSymbol.name.isImpureFunction) + isPure = Feature.pureFunsEnabled && !tp.typeSymbol.name.isImpureFunction) case tp: TypeRef => if (tp.symbol.isAnonymousClass && !showUniqueIds) toText(tp.info) @@ -272,6 +276,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case tp: LazyRef if !printDebug => try toText(tp.ref) catch case ex: Throwable => "..." + case AnySelectionProto => + "a type that can be selected or applied" case tp: SelectionProto => "?{ " ~ toText(tp.name) ~ (Str(" ") provided !tp.name.toSimpleName.last.isLetterOrDigit) ~ @@ -302,13 +308,15 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { protected def exprToText(tp: ExprType): Text = "=> " ~ toText(tp.resType) - protected def blockToText[T >: Untyped](block: Block[T]): Text = + protected def blockToText[T <: Untyped](block: Block[T]): Text = blockText(block.stats :+ block.expr) - protected def blockText[T >: Untyped](trees: List[Tree[T]]): Text = - ("{" ~ toText(trees, "\n") ~ "}").close + protected def blockText[T <: Untyped](trees: List[Tree[T]]): Text = + inContextBracket { + ("{" ~ toText(trees, "\n") ~ "}").close + } - protected def typeApplyText[T >: Untyped](tree: TypeApply[T]): Text = { + protected def typeApplyText[T <: Untyped](tree: TypeApply[T]): Text = { val funText = toTextLocal(tree.fun) tree.fun match { case Select(New(tpt), nme.CONSTRUCTOR) if tpt.typeOpt.dealias.isInstanceOf[AppliedType] => @@ -318,7 +326,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } - protected def toTextCore[T >: Untyped](tree: Tree[T]): Text = { + protected def toTextCore[T <: Untyped](tree: Tree[T]): Text = { import untpd._ def isLocalThis(tree: Tree) = tree.typeOpt match { @@ -554,7 +562,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { (" <: " ~ toText(bound) provided !bound.isEmpty) } case ByNameTypeTree(tpt) => - (if ctx.settings.Ycc.value then "-> " else "=> ") + (if Feature.pureFunsEnabled then "-> " else "=> ") ~ toTextLocal(tpt) case TypeBoundsTree(lo, hi, alias) => if (lo eq hi) && alias.isEmpty then optText(lo)(" = " ~ _) @@ -596,7 +604,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { typeDefText(tparamsTxt, optText(rhs)(" = " ~ _)) } recur(rhs, "", true) - case Import(expr, selectors) => + case tree @ Import(expr, selectors) => + myCtx = myCtx.importContext(tree, tree.symbol) keywordText("import ") ~ importText(expr, selectors) case Export(expr, selectors) => keywordText("export ") ~ importText(expr, selectors) @@ -616,7 +625,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { try changePrec(GlobalPrec)(toText(captureSet) ~ " " ~ toText(arg)) catch case ex: IllegalCaptureRef => toTextAnnot if annot.symbol.maybeOwner == defn.RetainsAnnot - && ctx.settings.Ycc.value && Config.printCaptureSetsAsPrefix && !printDebug + && Feature.ccEnabled && Config.printCaptureSetsAsPrefix && !printDebug then toTextRetainsAnnot else toTextAnnot case EmptyTree => @@ -662,7 +671,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { ~ ")" } val isPure = - ctx.settings.Ycc.value + Feature.pureFunsEnabled && tree.match case tree: FunctionWithMods => !tree.mods.is(Impure) case _ => true @@ -730,7 +739,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } - override def toText[T >: Untyped](tree: Tree[T]): Text = controlled { + override def toText[T <: Untyped](tree: Tree[T]): Text = controlled { import untpd._ var txt = toTextCore(tree) @@ -817,7 +826,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { protected def dropAnnotForModText(sym: Symbol): Boolean = sym == defn.BodyAnnot - protected def optAscription[T >: Untyped](tpt: Tree[T]): Text = optText(tpt)(": " ~ _) + protected def optAscription[T <: Untyped](tpt: Tree[T]): Text = optText(tpt)(": " ~ _) private def idText(tree: untpd.Tree): Text = (if showUniqueIds && tree.hasType && tree.symbol.exists then s"#${tree.symbol.id}" else "") ~ @@ -833,7 +842,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { private def useSymbol(tree: untpd.Tree) = tree.hasType && tree.symbol.exists && ctx.settings.YprintSyms.value - protected def nameIdText[T >: Untyped](tree: NameTree[T]): Text = + protected def nameIdText[T <: Untyped](tree: NameTree[T]): Text = if (tree.hasType && tree.symbol.exists) { val str = nameString(tree.symbol) tree match { @@ -847,13 +856,13 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { private def toTextOwner(tree: Tree[?]) = "[owner = " ~ tree.symbol.maybeOwner.show ~ "]" provided ctx.settings.YprintDebugOwners.value - protected def dclTextOr[T >: Untyped](tree: Tree[T])(treeText: => Text): Text = + protected def dclTextOr[T <: Untyped](tree: Tree[T])(treeText: => Text): Text = toTextOwner(tree) ~ { if (useSymbol(tree)) annotsText(tree.symbol) ~~ dclText(tree.symbol) else treeText } - def paramsText[T>: Untyped](params: ParamClause[T]): Text = (params: @unchecked) match + def paramsText[T <: Untyped](params: ParamClause[T]): Text = (params: @unchecked) match case Nil => "()" case untpd.ValDefs(vparams @ (vparam :: _)) => @@ -863,10 +872,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case untpd.TypeDefs(tparams) => "[" ~ toText(tparams, ", ") ~ "]" - def addParamssText[T >: Untyped](leading: Text, paramss: List[ParamClause[T]]): Text = + def addParamssText[T <: Untyped](leading: Text, paramss: List[ParamClause[T]]): Text = paramss.foldLeft(leading)((txt, params) => txt ~ paramsText(params)) - protected def valDefToText[T >: Untyped](tree: ValDef[T]): Text = { + protected def valDefToText[T <: Untyped](tree: ValDef[T]): Text = { dclTextOr(tree) { modText(tree.mods, tree.symbol, keywordStr(if (tree.mods.is(Mutable)) "var" else "val"), isType = false) ~~ valDefText(nameIdText(tree)) ~ optAscription(tree.tpt) ~ @@ -874,7 +883,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } - protected def defDefToText[T >: Untyped](tree: DefDef[T]): Text = { + protected def defDefToText[T <: Untyped](tree: DefDef[T]): Text = { import untpd._ dclTextOr(tree) { val defKeyword = modText(tree.mods, tree.symbol, keywordStr("def"), isType = false) @@ -963,7 +972,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } else impl.body - val bodyText = " {" ~~ selfText ~ toTextGlobal(primaryConstrs ::: body, "\n") ~ "}" + val bodyText = inContextBracket( + " {" ~~ selfText ~ toTextGlobal(primaryConstrs ::: body, "\n") ~ "}") prefix ~ keywordText(" extends").provided(!ofNew && impl.parents.nonEmpty) ~~ parentsText ~ @@ -979,14 +989,14 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { ) } - protected def toTextPackageId[T >: Untyped](pid: Tree[T]): Text = - if (homogenizedView && pid.hasType) toTextLocal(pid.tpe.asInstanceOf[Showable]) + protected def toTextPackageId[T <: Untyped](pid: Tree[T]): Text = + if (homogenizedView && pid.hasType) toTextLocal(pid.typeOpt) else toTextLocal(pid) protected def packageDefText(tree: PackageDef): Text = { val statsText = tree.stats match { case (pdef: PackageDef) :: Nil => toText(pdef) - case _ => toTextGlobal(tree.stats, "\n") + case _ => inContextBracket(toTextGlobal(tree.stats, "\n")) } val bodyText = if (currentPrecedence == TopLevelPrec) "\n" ~ statsText else " {" ~ statsText ~ "}" @@ -1034,10 +1044,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def optText(name: Name)(encl: Text => Text): Text = if (name.isEmpty) "" else encl(toText(name)) - def optText[T >: Untyped](tree: Tree[T])(encl: Text => Text): Text = + def optText[T <: Untyped](tree: Tree[T])(encl: Text => Text): Text = if (tree.isEmpty) "" else encl(toText(tree)) - def optText[T >: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text = + def optText[T <: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text = if (tree.exists(!_.isEmpty)) encl(blockText(tree)) else "" override protected def treatAsTypeParam(sym: Symbol): Boolean = sym.is(TypeParam) diff --git a/compiler/src/dotty/tools/dotc/printing/Texts.scala b/compiler/src/dotty/tools/dotc/printing/Texts.scala index 17f86e766869..411fa74ebffa 100644 --- a/compiler/src/dotty/tools/dotc/printing/Texts.scala +++ b/compiler/src/dotty/tools/dotc/printing/Texts.scala @@ -15,12 +15,17 @@ object Texts { case Vertical(relems) => relems.isEmpty } + // Str Ver Clo Flu + // isVertical F T F F + // isClosed F T T F + // isFluid F F T T + // isSplittable F F F T def isVertical: Boolean = isInstanceOf[Vertical] def isClosed: Boolean = isVertical || isInstanceOf[Closed] def isFluid: Boolean = isInstanceOf[Fluid] def isSplittable: Boolean = isFluid && !isClosed - def close: Closed = new Closed(relems) + def close: Text = if isSplittable then Closed(relems) else this def remaining(width: Int): Int = this match { case Str(s, _) => @@ -53,7 +58,7 @@ object Texts { } private def appendIndented(that: Text)(width: Int): Text = - Vertical(that.layout(width - indentMargin).indented :: this.relems) + Fluid(that.layout(width - indentMargin).indented :: this.relems) private def append(width: Int)(that: Text): Text = if (this.isEmpty) that.layout(width) @@ -113,7 +118,7 @@ object Texts { sb.append("|") } } - sb.append(s) + sb.append(s.replaceAll("[ ]+$", "")) case _ => var follow = false for (elem <- relems.reverse) { @@ -138,7 +143,13 @@ object Texts { def ~ (that: Text): Text = if (this.isEmpty) that else if (that.isEmpty) this - else Fluid(that :: this :: Nil) + else this match + case Fluid(relems1) if !isClosed => that match + case Fluid(relems2) if !that.isClosed => Fluid(relems2 ++ relems1) + case _ => Fluid(that +: relems1) + case _ => that match + case Fluid(relems2) if !that.isClosed => Fluid(relems2 :+ this) + case _ => Fluid(that :: this :: Nil) def ~~ (that: Text): Text = if (this.isEmpty) that @@ -161,9 +172,9 @@ object Texts { def apply(xs: Traversable[Text], sep: String = " "): Text = if (sep == "\n") lines(xs) else { - val ys = xs filterNot (_.isEmpty) + val ys = xs.filterNot(_.isEmpty) if (ys.isEmpty) Str("") - else ys reduce (_ ~ sep ~ _) + else ys.reduceRight((a, b) => (a ~ sep).close ~ b) } /** The given texts `xs`, each on a separate line */ @@ -176,12 +187,16 @@ object Texts { case class Str(s: String, lineRange: LineRange = EmptyLineRange) extends Text { override def relems: List[Text] = List(this) + override def toString = this match + case Str(s, EmptyLineRange) => s"Str($s)" + case Str(s, lineRange) => s"Str($s, $lineRange)" } case class Vertical(relems: List[Text]) extends Text case class Fluid(relems: List[Text]) extends Text - class Closed(relems: List[Text]) extends Fluid(relems) + class Closed(relems: List[Text]) extends Fluid(relems): + override def productPrefix = "Closed" implicit def stringToText(s: String): Text = Str(s) diff --git a/compiler/src/dotty/tools/dotc/profile/ExtendedThreadMxBean.java b/compiler/src/dotty/tools/dotc/profile/ExtendedThreadMxBean.java index 68ae4f148cfd..60f44db16add 100644 --- a/compiler/src/dotty/tools/dotc/profile/ExtendedThreadMxBean.java +++ b/compiler/src/dotty/tools/dotc/profile/ExtendedThreadMxBean.java @@ -248,13 +248,14 @@ public SunThreadMxBean(ThreadMXBean underlying) { super(underlying); this.real = underlying; try { - getThreadUserTimeMethod = real.getClass().getMethod("getThreadUserTime", long[].class); - isThreadAllocatedMemoryEnabledMethod = real.getClass().getMethod("isThreadAllocatedMemoryEnabled"); - setThreadAllocatedMemoryEnabledMethod = real.getClass().getMethod("setThreadAllocatedMemoryEnabled", Boolean.TYPE); - getThreadAllocatedBytesMethod1 = real.getClass().getMethod("getThreadAllocatedBytes", Long.TYPE); - getThreadAllocatedBytesMethod2 = real.getClass().getMethod("getThreadAllocatedBytes", long[].class); - isThreadAllocatedMemorySupportedMethod = real.getClass().getMethod("isThreadAllocatedMemorySupported"); - getThreadCpuTimeMethod = real.getClass().getMethod("getThreadCpuTime", long[].class); + Class cls = Class.forName("com.sun.management.ThreadMXBean"); + getThreadUserTimeMethod = cls.getMethod("getThreadUserTime", long[].class); + isThreadAllocatedMemoryEnabledMethod = cls.getMethod("isThreadAllocatedMemoryEnabled"); + setThreadAllocatedMemoryEnabledMethod = cls.getMethod("setThreadAllocatedMemoryEnabled", Boolean.TYPE); + getThreadAllocatedBytesMethod1 = cls.getMethod("getThreadAllocatedBytes", Long.TYPE); + getThreadAllocatedBytesMethod2 = cls.getMethod("getThreadAllocatedBytes", long[].class); + isThreadAllocatedMemorySupportedMethod = cls.getMethod("isThreadAllocatedMemorySupported"); + getThreadCpuTimeMethod = cls.getMethod("getThreadCpuTime", long[].class); getThreadUserTimeMethod.setAccessible(true); isThreadAllocatedMemoryEnabledMethod.setAccessible(true); diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala index 0283fb904476..25c53903c10b 100644 --- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala +++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala @@ -103,6 +103,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) private val mainThread = Thread.currentThread() + @nowarn("cat=deprecation") private[profile] def snapThread(idleTimeNanos: Long): ProfileSnap = { import RealProfiler._ val current = Thread.currentThread() @@ -245,6 +246,7 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { reportCommon(EventType.BACKGROUND, profiler, threadRange) override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = reportCommon(EventType.MAIN, profiler, threadRange) + @nowarn("cat=deprecation") private def reportCommon(tpe:EventType, profiler: RealProfiler, threadRange: ProfileRange): Unit = out.println(s"$tpe,${threadRange.start.snapTimeNanos},${threadRange.end.snapTimeNanos},${profiler.id},${threadRange.phase.id},${threadRange.phase.phaseName.replace(',', ' ')},${threadRange.purpose},${threadRange.taskCount},${threadRange.thread.getId},${threadRange.thread.getName},${threadRange.runNs},${threadRange.idleNs},${threadRange.cpuNs},${threadRange.userNs},${threadRange.allocatedBytes},${threadRange.end.heapBytes} ") diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala new file mode 100644 index 000000000000..5a9490c3723e --- /dev/null +++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala @@ -0,0 +1,370 @@ +package dotty.tools.dotc +package quoted + +import scala.language.unsafeNulls + +import scala.collection.mutable +import scala.reflect.ClassTag + +import java.io.{PrintWriter, StringWriter} +import java.lang.reflect.{InvocationTargetException, Method => JLRMethod} + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.TreeMapWithImplicits +import dotty.tools.dotc.core.Annotations._ +import dotty.tools.dotc.core.Constants._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Denotations.staticRef +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.NameKinds.FlatName +import dotty.tools.dotc.core.Names._ +import dotty.tools.dotc.core.StagingContext._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.TypeErasure +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.transform.TreeMapWithStages._ +import dotty.tools.dotc.typer.ImportInfo.withRootImports +import dotty.tools.dotc.util.SrcPos +import dotty.tools.repl.AbstractFileClassLoader + +/** Tree interpreter for metaprogramming constructs */ +abstract class Interpreter(pos: SrcPos, classLoader: ClassLoader)(using Context): + import Interpreter._ + import tpd._ + + type Env = Map[Symbol, Object] + + /** Returns the result of interpreting the code in the tree. + * Return Some of the result or None if the result type is not consistent with the expected type. + * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception ocurred. + */ + final def interpret[T](tree: Tree)(implicit ct: ClassTag[T]): Option[T] = + interpretTree(tree)(Map.empty) match { + case obj: T => Some(obj) + case obj => + // TODO upgrade to a full type tag check or something similar + report.error(s"Interpreted tree returned a result of an unexpected type. Expected ${ct.runtimeClass} but was ${obj.getClass}", pos) + None + } + + /** Returns the result of interpreting the code in the tree. + * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception ocurred. + */ + protected def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { + case Literal(Constant(value)) => + interpretLiteral(value) + + case tree: Ident if tree.symbol.is(Inline, butNot = Method) => + tree.tpe.widenTermRefExpr match + case ConstantType(c) => c.value.asInstanceOf[Object] + case _ => throw new StopInterpretation(em"${tree.symbol} could not be inlined", tree.srcPos) + + // TODO disallow interpreted method calls as arguments + case Call(fn, args) => + if (fn.symbol.isConstructor && fn.symbol.owner.owner.is(Package)) + interpretNew(fn.symbol, args.flatten.map(interpretTree)) + else if (fn.symbol.is(Module)) + interpretModuleAccess(fn.symbol) + else if (fn.symbol.is(Method) && fn.symbol.isStatic) { + val staticMethodCall = interpretedStaticMethodCall(fn.symbol.owner, fn.symbol) + staticMethodCall(interpretArgs(args, fn.symbol.info)) + } + else if fn.symbol.isStatic then + assert(args.isEmpty) + interpretedStaticFieldAccess(fn.symbol) + else if (fn.qualifier.symbol.is(Module) && fn.qualifier.symbol.isStatic) + if (fn.name == nme.asInstanceOfPM) + interpretModuleAccess(fn.qualifier.symbol) + else { + val staticMethodCall = interpretedStaticMethodCall(fn.qualifier.symbol.moduleClass, fn.symbol) + staticMethodCall(interpretArgs(args, fn.symbol.info)) + } + else if (env.contains(fn.symbol)) + env(fn.symbol) + else if (tree.symbol.is(InlineProxy)) + interpretTree(tree.symbol.defTree.asInstanceOf[ValOrDefDef].rhs) + else + unexpectedTree(tree) + + case closureDef((ddef @ DefDef(_, ValDefs(arg :: Nil) :: Nil, _, _))) => + (obj: AnyRef) => interpretTree(ddef.rhs)(using env.updated(arg.symbol, obj)) + + // Interpret `foo(j = x, i = y)` which it is expanded to + // `val j$1 = x; val i$1 = y; foo(i = i$1, j = j$1)` + case Block(stats, expr) => interpretBlock(stats, expr) + case NamedArg(_, arg) => interpretTree(arg) + + case Inlined(_, bindings, expansion) => interpretBlock(bindings, expansion) + + case Typed(expr, _) => + interpretTree(expr) + + case SeqLiteral(elems, _) => + interpretVarargs(elems.map(e => interpretTree(e))) + + case _ => + unexpectedTree(tree) + } + + private def interpretArgs(argss: List[List[Tree]], fnType: Type)(using Env): List[Object] = { + def interpretArgsGroup(args: List[Tree], argTypes: List[Type]): List[Object] = + assert(args.size == argTypes.size) + val view = + for (arg, info) <- args.lazyZip(argTypes) yield + info match + case _: ExprType => () => interpretTree(arg) // by-name argument + case _ => interpretTree(arg) // by-value argument + view.toList + + fnType.dealias match + case fnType: MethodType if fnType.isErasedMethod => interpretArgs(argss, fnType.resType) + case fnType: MethodType => + val argTypes = fnType.paramInfos + assert(argss.head.size == argTypes.size) + interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, fnType.resType) + case fnType: AppliedType if defn.isContextFunctionType(fnType) => + val argTypes :+ resType = fnType.args: @unchecked + interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, resType) + case fnType: PolyType => interpretArgs(argss, fnType.resType) + case fnType: ExprType => interpretArgs(argss, fnType.resType) + case _ => + assert(argss.isEmpty) + Nil + } + + private def interpretBlock(stats: List[Tree], expr: Tree)(implicit env: Env) = { + var unexpected: Option[Object] = None + val newEnv = stats.foldLeft(env)((accEnv, stat) => stat match { + case stat: ValDef => + accEnv.updated(stat.symbol, interpretTree(stat.rhs)(accEnv)) + case stat => + if (unexpected.isEmpty) + unexpected = Some(unexpectedTree(stat)) + accEnv + }) + unexpected.getOrElse(interpretTree(expr)(newEnv)) + } + + private def interpretLiteral(value: Any)(implicit env: Env): Object = + value.asInstanceOf[Object] + + private def interpretVarargs(args: List[Object])(implicit env: Env): Object = + args.toSeq + + private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol)(implicit env: Env): List[Object] => Object = { + val (inst, clazz) = + try + if (moduleClass.name.startsWith(str.REPL_SESSION_LINE)) + (null, loadReplLineClass(moduleClass)) + else { + val inst = loadModule(moduleClass) + (inst, inst.getClass) + } + catch + case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspension triggered by a dependency on $sym", pos) + ctx.compilationUnit.suspend() // this throws a SuspendException + + val name = fn.name.asTermName + val method = getMethod(clazz, name, paramsSig(fn)) + (args: List[Object]) => stopIfRuntimeException(method.invoke(inst, args: _*), method) + } + + private def interpretedStaticFieldAccess(sym: Symbol)(implicit env: Env): Object = { + val clazz = loadClass(sym.owner.fullName.toString) + val field = clazz.getField(sym.name.toString) + field.get(null) + } + + private def interpretModuleAccess(fn: Symbol)(implicit env: Env): Object = + loadModule(fn.moduleClass) + + private def interpretNew(fn: Symbol, args: => List[Object])(implicit env: Env): Object = { + val clazz = loadClass(fn.owner.fullName.toString) + val constr = clazz.getConstructor(paramsSig(fn): _*) + constr.newInstance(args: _*).asInstanceOf[Object] + } + + private def unexpectedTree(tree: Tree)(implicit env: Env): Object = + throw new StopInterpretation("Unexpected tree could not be interpreted: " + tree, tree.srcPos) + + private def loadModule(sym: Symbol): Object = + if (sym.owner.is(Package)) { + // is top level object + val moduleClass = loadClass(sym.fullName.toString) + moduleClass.getField(str.MODULE_INSTANCE_FIELD).get(null) + } + else { + // nested object in an object + val className = { + val pack = sym.topLevelClass.owner + if (pack == defn.RootPackage || pack == defn.EmptyPackageClass) sym.flatName.toString + else pack.showFullName + "." + sym.flatName + } + val clazz = loadClass(className) + clazz.getConstructor().newInstance().asInstanceOf[Object] + } + + private def loadReplLineClass(moduleClass: Symbol)(implicit env: Env): Class[?] = { + val lineClassloader = new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader) + lineClassloader.loadClass(moduleClass.name.firstPart.toString) + } + + private def loadClass(name: String): Class[?] = + try classLoader.loadClass(name) + catch { + case _: ClassNotFoundException if ctx.compilationUnit.isSuspendable => + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspension triggered by a dependency on $name", pos) + ctx.compilationUnit.suspend() + case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspension triggered by a dependency on $sym", pos) + ctx.compilationUnit.suspend() // this throws a SuspendException + } + + private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod = + try clazz.getMethod(name.toString, paramClasses: _*) + catch { + case _: NoSuchMethodException => + val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)" + throw new StopInterpretation(msg, pos) + case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspension triggered by a dependency on $sym", pos) + ctx.compilationUnit.suspend() // this throws a SuspendException + } + + private def stopIfRuntimeException[T](thunk: => T, method: JLRMethod): T = + try thunk + catch { + case ex: RuntimeException => + val sw = new StringWriter() + sw.write("A runtime exception occurred while executing macro expansion\n") + sw.write(ex.getMessage) + sw.write("\n") + ex.printStackTrace(new PrintWriter(sw)) + sw.write("\n") + throw new StopInterpretation(sw.toString, pos) + case ex: InvocationTargetException => + ex.getTargetException match { + case ex: scala.quoted.runtime.StopMacroExpansion => + throw ex + case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspension triggered by a dependency on $sym", pos) + ctx.compilationUnit.suspend() // this throws a SuspendException + case targetException => + val sw = new StringWriter() + sw.write("Exception occurred while executing macro expansion.\n") + if (!ctx.settings.Ydebug.value) { + val end = targetException.getStackTrace.lastIndexWhere { x => + x.getClassName == method.getDeclaringClass.getCanonicalName && x.getMethodName == method.getName + } + val shortStackTrace = targetException.getStackTrace.take(end + 1) + targetException.setStackTrace(shortStackTrace) + } + targetException.printStackTrace(new PrintWriter(sw)) + sw.write("\n") + throw new StopInterpretation(sw.toString, pos) + } + } + + private object MissingClassDefinedInCurrentRun { + def unapply(targetException: NoClassDefFoundError)(using Context): Option[Symbol] = { + val className = targetException.getMessage + if (className eq null) None + else { + val sym = staticRef(className.toTypeName).symbol + if (sym.isDefinedInCurrentRun) Some(sym) else None + } + } + } + + /** List of classes of the parameters of the signature of `sym` */ + private def paramsSig(sym: Symbol): List[Class[?]] = { + def paramClass(param: Type): Class[?] = { + def arrayDepth(tpe: Type, depth: Int): (Type, Int) = tpe match { + case JavaArrayType(elemType) => arrayDepth(elemType, depth + 1) + case _ => (tpe, depth) + } + def javaArraySig(tpe: Type): String = { + val (elemType, depth) = arrayDepth(tpe, 0) + val sym = elemType.classSymbol + val suffix = + if (sym == defn.BooleanClass) "Z" + else if (sym == defn.ByteClass) "B" + else if (sym == defn.ShortClass) "S" + else if (sym == defn.IntClass) "I" + else if (sym == defn.LongClass) "J" + else if (sym == defn.FloatClass) "F" + else if (sym == defn.DoubleClass) "D" + else if (sym == defn.CharClass) "C" + else "L" + javaSig(elemType) + ";" + ("[" * depth) + suffix + } + def javaSig(tpe: Type): String = tpe match { + case tpe: JavaArrayType => javaArraySig(tpe) + case _ => + // Take the flatten name of the class and the full package name + val pack = tpe.classSymbol.topLevelClass.owner + val packageName = if (pack == defn.EmptyPackageClass) "" else s"${pack.fullName}." + packageName + tpe.classSymbol.fullNameSeparated(FlatName).toString + } + + val sym = param.classSymbol + if (sym == defn.BooleanClass) classOf[Boolean] + else if (sym == defn.ByteClass) classOf[Byte] + else if (sym == defn.CharClass) classOf[Char] + else if (sym == defn.ShortClass) classOf[Short] + else if (sym == defn.IntClass) classOf[Int] + else if (sym == defn.LongClass) classOf[Long] + else if (sym == defn.FloatClass) classOf[Float] + else if (sym == defn.DoubleClass) classOf[Double] + else java.lang.Class.forName(javaSig(param), false, classLoader) + } + def getExtraParams(tp: Type): List[Type] = tp.widenDealias match { + case tp: AppliedType if defn.isContextFunctionType(tp) => + // Call context function type direct method + tp.args.init.map(arg => TypeErasure.erasure(arg)) ::: getExtraParams(tp.args.last) + case _ => Nil + } + val extraParams = getExtraParams(sym.info.finalResultType) + val allParams = TypeErasure.erasure(sym.info) match { + case meth: MethodType => meth.paramInfos ::: extraParams + case _ => extraParams + } + allParams.map(paramClass) + } +end Interpreter + +object Interpreter: + /** Exception that stops interpretation if some issue is found */ + class StopInterpretation(val msg: String, val pos: SrcPos) extends Exception + + object Call: + import tpd._ + /** Matches an expression that is either a field access or an application + * It retruns a TermRef containing field accessed or a method reference and the arguments passed to it. + */ + def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = + Call0.unapply(arg).map((fn, args) => (fn, args.reverse)) + + private object Call0 { + def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = arg match { + case Select(Call0(fn, args), nme.apply) if defn.isContextFunctionType(fn.tpe.widenDealias.finalResultType) => + Some((fn, args)) + case fn: Ident => Some((tpd.desugarIdent(fn).withSpan(fn.span), Nil)) + case fn: Select => Some((fn, Nil)) + case Apply(f @ Call0(fn, args1), args2) => + if (f.tpe.widenDealias.isErasedMethod) Some((fn, args1)) + else Some((fn, args2 :: args1)) + case TypeApply(Call0(fn, args), _) => Some((fn, args)) + case _ => None + } + } + end Call diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index 5addb11f1a3c..00399ecbfd0a 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -18,23 +18,35 @@ object report: if ctx.settings.verbose.value then echo(msg, pos) def echo(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = - ctx.reporter.report(new Info(msg, pos.sourcePos)) + ctx.reporter.report(new Info(msg.toMessage, pos.sourcePos)) private def issueWarning(warning: Warning)(using Context): Unit = ctx.reporter.report(warning) - def deprecationWarning(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def deprecationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new DeprecationWarning(msg, pos.sourcePos)) - def migrationWarning(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def deprecationWarning(msg: => String, pos: SrcPos)(using Context): Unit = + deprecationWarning(msg.toMessage, pos) + + def migrationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new MigrationWarning(msg, pos.sourcePos)) - def uncheckedWarning(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def migrationWarning(msg: => String, pos: SrcPos)(using Context): Unit = + migrationWarning(msg.toMessage, pos) + + def uncheckedWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new UncheckedWarning(msg, pos.sourcePos)) - def featureWarning(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def uncheckedWarning(msg: => String, pos: SrcPos)(using Context): Unit = + uncheckedWarning(msg.toMessage, pos) + + def featureWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new FeatureWarning(msg, pos.sourcePos)) + def featureWarning(msg: => String, pos: SrcPos)(using Context): Unit = + featureWarning(msg.toMessage, pos) + def featureWarning(feature: String, featureDescription: => String, featureUseSite: Symbol, required: Boolean, pos: SrcPos)(using Context): Unit = { val req = if (required) "needs to" else "should" @@ -52,30 +64,43 @@ object report: |by adding the import clause 'import $fqname' |or by setting the compiler option -language:$feature.$explain""".stripMargin if (required) error(msg, pos) - else issueWarning(new FeatureWarning(msg, pos.sourcePos)) + else issueWarning(new FeatureWarning(msg.toMessage, pos.sourcePos)) } - def warning(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def warning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new Warning(msg, addInlineds(pos))) - def error(msg: Message, pos: SrcPos = NoSourcePosition, sticky: Boolean = false)(using Context): Unit = + def warning(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + warning(msg.toMessage, pos) + + def error(msg: Message, pos: SrcPos)(using Context): Unit = val fullPos = addInlineds(pos) - ctx.reporter.report(if (sticky) new StickyError(msg, fullPos) else new Error(msg, fullPos)) + ctx.reporter.report(new Error(msg, fullPos)) if ctx.settings.YdebugError.value then Thread.dumpStack() + def error(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + error(msg.toMessage, pos) + def error(ex: TypeError, pos: SrcPos)(using Context): Unit = - error(ex.toMessage, pos, sticky = true) - if ctx.settings.YdebugTypeError.value then ex.printStackTrace() + val fullPos = addInlineds(pos) + ctx.reporter.report(new StickyError(ex.toMessage, fullPos)) + if ctx.settings.YdebugError.value then Thread.dumpStack() - def errorOrMigrationWarning(msg: Message, pos: SrcPos = NoSourcePosition, from: SourceVersion)(using Context): Unit = + def errorOrMigrationWarning(msg: Message, pos: SrcPos, from: SourceVersion)(using Context): Unit = if sourceVersion.isAtLeast(from) then if sourceVersion.isMigrating && sourceVersion.ordinal <= from.ordinal then migrationWarning(msg, pos) else error(msg, pos) - def gradualErrorOrMigrationWarning(msg: Message, pos: SrcPos = NoSourcePosition, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = + def errorOrMigrationWarning(msg: => String, pos: SrcPos, from: SourceVersion)(using Context): Unit = + errorOrMigrationWarning(msg.toMessage, pos, from) + + def gradualErrorOrMigrationWarning(msg: Message, pos: SrcPos, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = if sourceVersion.isAtLeast(errorFrom) then errorOrMigrationWarning(msg, pos, errorFrom) else if sourceVersion.isAtLeast(warnFrom) then warning(msg, pos) + def gradualErrorOrMigrationWarning(msg: => String, pos: SrcPos, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = + gradualErrorOrMigrationWarning(msg.toMessage, pos, warnFrom, errorFrom) + def restrictionError(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = error(msg.mapMsg("Implementation restriction: " + _), pos) diff --git a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala index dec13a4f5925..a92da7821fab 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala @@ -11,6 +11,7 @@ import dotty.tools.dotc.util.SourcePosition import java.util.Optional import scala.util.chaining._ +import core.Decorators.toMessage object Diagnostic: @@ -23,7 +24,8 @@ object Diagnostic: class Error( msg: Message, pos: SourcePosition - ) extends Diagnostic(msg, pos, ERROR) + ) extends Diagnostic(msg, pos, ERROR): + def this(str: => String, pos: SourcePosition) = this(str.toMessage, pos) /** A sticky error is an error that should not be hidden by backtracking and * trying some alternative path. Typically, errors issued after catching @@ -46,7 +48,8 @@ object Diagnostic: class Info( msg: Message, pos: SourcePosition - ) extends Diagnostic(msg, pos, INFO) + ) extends Diagnostic(msg, pos, INFO): + def this(str: => String, pos: SourcePosition) = this(str.toMessage, pos) abstract class ConditionalWarning( msg: Message, diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index 7b22eb77e90e..d9140a6309b8 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -67,7 +67,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case AmbiguousOverloadID // errorNumber: 51 case ReassignmentToValID // errorNumber: 52 case TypeDoesNotTakeParametersID // errorNumber: 53 - case ParameterizedTypeLacksArgumentsID // errorNumber: 54 + case ParameterizedTypeLacksArgumentsID extends ErrorMessageID(isActive = false) // errorNumber: 54 case VarValParametersMayNotBeCallByNameID // errorNumber: 55 case MissingTypeParameterForID // errorNumber: 56 case DoesNotConformToBoundID // errorNumber: 57 @@ -184,7 +184,8 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case ImplicitSearchTooLargeID // errorNumber: 168 case TargetNameOnTopLevelClassID // errorNumber: 169 case NotClassTypeID // errorNumber 170 - + case MissingArgumentID // errorNumer 171 + def errorNumber = ordinal - 1 object ErrorMessageID: diff --git a/compiler/src/dotty/tools/dotc/reporting/Message.scala b/compiler/src/dotty/tools/dotc/reporting/Message.scala index 77e1336a990c..9e397d606491 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Message.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Message.scala @@ -13,12 +13,6 @@ object Message { val nonSensicalStartTag: String = "" val nonSensicalEndTag: String = "" - /** This implicit conversion provides a fallback for error messages that have - * not yet been ported to the new scheme. Comment out this `implicit def` to - * see where old errors still exist - */ - implicit def toNoExplanation(str: => String): Message = NoExplanation(str) - def rewriteNotice(what: String, version: SourceVersion | Null = null, options: String = "")(using Context): String = if !ctx.mode.is(Mode.Interactive) then val sourceStr = if version != null then i"-source $version" else "" diff --git a/compiler/src/dotty/tools/dotc/reporting/Profile.scala b/compiler/src/dotty/tools/dotc/reporting/Profile.scala index 2cd67c87f386..6e2f83364dbe 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Profile.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Profile.scala @@ -103,7 +103,7 @@ class ActiveProfile(details: Int) extends Profile: name, info.lineCount, info.tokenCount, Profile.chunks(info.tastySize), s"${"%6.2f".format(complexity)} $explanation", path)) - def safeMax(xs: Array[Int]) = xs.max.max(10).min(50) + def safeMax(xs: Array[Int]) = if xs.isEmpty then 10 else xs.max.max(10).min(50) def printAndAggregateSourceInfos(): Profile.Info = val sourceNameWidth = safeMax(units.map(_.source.file.name.length)) diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index 0d5acaef4960..497e77ae4a7c 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -14,6 +14,7 @@ import dotty.tools.dotc.util.NoSourcePosition import java.io.{BufferedReader, PrintWriter} import scala.annotation.internal.sharable import scala.collection.mutable +import core.Decorators.toMessage object Reporter { /** Convert a SimpleReporter into a real Reporter */ @@ -218,7 +219,7 @@ abstract class Reporter extends interfaces.ReporterResult { for (settingName, count) <- unreportedWarnings do val were = if count == 1 then "was" else "were" val msg = s"there $were ${countString(count, settingName.tail + " warning")}; re-run with $settingName for details" - report(Warning(msg, NoSourcePosition)) + report(Warning(msg.toMessage, NoSourcePosition)) /** Print the summary of warnings and errors */ def printSummary()(using Context): Unit = { diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index a3af4c1b2582..2197ea63a1c2 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -1390,7 +1390,7 @@ import cc.CaptureSet.IdentityCaptRefMap |""".stripMargin } - class TypeDoesNotTakeParameters(tpe: Type, params: List[Trees.Tree[Trees.Untyped]])(using Context) + class TypeDoesNotTakeParameters(tpe: Type, params: List[untpd.Tree])(using Context) extends TypeMsg(TypeDoesNotTakeParametersID) { private def fboundsAddendum = if tpe.typeSymbol.isAllOf(Provisional | TypeParam) then @@ -1406,15 +1406,6 @@ import cc.CaptureSet.IdentityCaptRefMap |""" } - class ParameterizedTypeLacksArguments(psym: Symbol)(using Context) - extends TypeMsg(ParameterizedTypeLacksArgumentsID) { - def msg = em"Parameterized $psym lacks argument list" - def explain = - em"""The $psym is declared with non-implicit parameters, you may not leave - |out the parameter list when extending it. - |""" - } - class VarValParametersMayNotBeCallByName(name: TermName, mutable: Boolean)(using Context) extends SyntaxMsg(VarValParametersMayNotBeCallByNameID) { def varOrVal = if (mutable) em"${hl("var")}" else em"${hl("val")}" @@ -1445,6 +1436,13 @@ import cc.CaptureSet.IdentityCaptRefMap def explain = em"A fully applied type is expected but $tpe takes $numParams $parameters" } + class MissingArgument(pname: Name, methString: String)(using Context) + extends TypeMsg(MissingArgumentID): + def msg = + if pname.firstPart contains '$' then s"not enough arguments for $methString" + else s"missing argument for parameter $pname of $methString" + def explain = "" + class DoesNotConformToBound(tpe: Type, which: String, bound: Type)(using Context) extends TypeMismatchMsg( if which == "lower" then bound else tpe, @@ -2535,3 +2533,4 @@ import cc.CaptureSet.IdentityCaptRefMap extends TypeMsg(NotClassTypeID), ShowMatchTrace(tp): def msg = ex"$tp is not a class type" def explain = "" + diff --git a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala index 9c85d2f5bd1d..96e88e5c68ae 100644 --- a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala +++ b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala @@ -8,6 +8,9 @@ import collection.mutable import scala.annotation.tailrec import dotty.tools.dotc.reporting.Reporter +import java.io.OutputStreamWriter +import java.nio.charset.StandardCharsets.UTF_8 + /** Handles rewriting of Scala2 files to Dotty */ object Rewrites { private class PatchedFiles extends mutable.HashMap[SourceFile, Patches] @@ -54,13 +57,11 @@ object Rewrites { ds } - def writeBack(): Unit = { + def writeBack(): Unit = val chars = apply(source.underlying.content) - val bytes = new String(chars).getBytes - val out = source.file.output - out.write(bytes) - out.close() - } + val osw = OutputStreamWriter(source.file.output, UTF_8) + try osw.write(chars, 0, chars.length) + finally osw.close() } /** If -rewrite is set, record a patch that replaces the range diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 8b388f4d5b62..071efb1fb91c 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -81,7 +81,9 @@ class ExtractSemanticDB extends Phase: private def excludeDef(sym: Symbol)(using Context): Boolean = !sym.exists || sym.isLocalDummy - || sym.is(Synthetic) + // basically do not register synthetic symbols, except anonymous class + // `new Foo { ... }` + || (sym.is(Synthetic) && !sym.isAnonymousClass) || sym.isSetter || sym.isOldStyleImplicitConversion(forImplicitClassOnly = true) || sym.owner.isGivenInstanceSummoner @@ -178,7 +180,7 @@ class ExtractSemanticDB extends Phase: if !excludeChildren(tree.symbol) then traverseChildren(tree) } - if !excludeDef(tree.symbol) && tree.span.hasLength then + if !excludeDef(tree.symbol) && (tree.span.hasLength || tree.symbol.isAnonymousClass) then registerDefinition(tree.symbol, tree.nameSpan, symbolKinds(tree), tree.source) val privateWithin = tree.symbol.privateWithin if privateWithin.exists then @@ -355,7 +357,7 @@ class ExtractSemanticDB extends Phase: else Span(span.start) - if namePresentInSource(sym, span, treeSource) then + if namePresentInSource(sym, span, treeSource) || sym.isAnonymousClass then registerOccurrence(sname, finalSpan, SymbolOccurrence.Role.DEFINITION, treeSource) if !sym.is(Package) then registerSymbol(sym, symkinds) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala index a46ff4f323bf..e157b52fe260 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala @@ -43,7 +43,6 @@ object Scala3: if content.lift(span.end - 1).exists(_ == '`') then (span.start + 1, span.end - 1) else (span.start, span.end) - // println(s"${start}, $end") val nameInSource = content.slice(start, end).mkString // for secondary constructors `this` desig match @@ -222,6 +221,12 @@ object Scala3: case NameKinds.AnyNumberedName(nme.EMPTY, _) => true case _ => false } + + def isDynamic(using Context): Boolean = + name == nme.applyDynamic || + name == nme.selectDynamic || + name == nme.updateDynamic || + name == nme.applyDynamicNamed end NameOps given SymbolOps: AnyRef with diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala b/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala index c5bd6cc7863b..b2f26e3e992f 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala @@ -4,6 +4,7 @@ import dotty.tools.dotc.ast.tpd._ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.Flags._ import dotty.tools.dotc.core.StdNames.nme +import dotty.tools.dotc.core.NameKinds import dotty.tools.dotc.{semanticdb => s} @@ -26,8 +27,21 @@ class SyntheticsExtractor: tree match case tree: TypeApply if tree.span.isSynthetic && - tree.args.forall(arg => !arg.symbol.is(Scala2x)) && - !tree.span.isZeroExtent => + tree.args.forall(arg => !arg.symbol.isDefinedInSource) && + !tree.span.isZeroExtent && + (tree.fun match { + // for `Bar[Int]` of `class Foo extends Bar[Int]` + // we'll have `TypeTree(Select(New(AppliedTypeTree(...))), List(Int))` + // in this case, don't register `*[Int]` to synthetics as we already have `[Int]` in source. + case Select(New(AppliedTypeTree(_, _)), _) => false + + // for `new SomeJavaClass[Int]()` + // there will be a synthesized default getter + // in addition to the source derived one. + case Select(_, name) if name.is(NameKinds.DefaultGetterName) => false + case Select(fun, _) if fun.symbol.name.isDynamic => false + case _ => true + }) => visited.add(tree) val fnTree = tree.fun match // Something like `List.apply[Int](1,2,3)` diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala index 5908bce97994..14362260d032 100644 --- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala +++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala @@ -80,6 +80,8 @@ abstract class AccessProxies { val sym = newSymbol(owner, name, Synthetic | Method, info, coord = accessed.span).entered if accessed.is(Private) then sym.setFlag(Final) else if sym.allOverriddenSymbols.exists(!_.is(Deferred)) then sym.setFlag(Override) + if accessed.hasAnnotation(defn.ExperimentalAnnot) then + sym.addAnnotation(defn.ExperimentalAnnot) sym } diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 9b8ba4504eda..c797c9fd92c1 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -386,7 +386,7 @@ object Erasure { case _: FunProto | AnyFunctionProto => tree case _ => tree.tpe.widen match case mt: MethodType if tree.isTerm => - assert(mt.paramInfos.isEmpty)//, i"bad adapt for $tree: $mt") + assert(mt.paramInfos.isEmpty, i"bad adapt for $tree: $mt") adaptToType(tree.appliedToNone, pt) case tpw => if (pt.isInstanceOf[ProtoType] || tree.tpe <:< pt) @@ -614,7 +614,7 @@ object Erasure { * are handled separately by [[typedDefDef]], [[typedValDef]] and [[typedTyped]]. */ override def typedTypeTree(tree: untpd.TypeTree, pt: Type)(using Context): TypeTree = - checkNotErasedClass(tree.withType(erasure(tree.tpe))) + checkNotErasedClass(tree.withType(erasure(tree.typeOpt))) /** This override is only needed to semi-erase type ascriptions */ override def typedTyped(tree: untpd.Typed, pt: Type)(using Context): Tree = diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index ed3bfc7c0181..00074a6ea81a 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -72,9 +72,7 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase => override def transformTemplate(impl: Template)(using Context): Tree = { val cls = ctx.owner.asClass val isTrait = cls.is(Trait) - if (needsOuterIfReferenced(cls) && - !needsOuterAlways(cls) && - impl.existsSubTree(referencesOuter(cls, _))) + if needsOuterIfReferenced(cls) && !needsOuterAlways(cls) && referencesOuter(cls, impl) then ensureOuterAccessors(cls) val clsHasOuter = hasOuter(cls) @@ -255,55 +253,83 @@ object ExplicitOuter { /** Tree references an outer class of `cls` which is not a static owner. */ - def referencesOuter(cls: Symbol, tree: Tree)(using Context): Boolean = { - def isOuterSym(sym: Symbol) = - !sym.isStaticOwner && cls.isProperlyContainedIn(sym) - def isOuterRef(ref: Type): Boolean = ref match { - case ref: ThisType => - isOuterSym(ref.cls) - case ref: TermRef => - if (ref.prefix ne NoPrefix) - !ref.symbol.isStatic && isOuterRef(ref.prefix) - else ( - ref.symbol.isOneOf(HoistableFlags) && - // ref.symbol will be placed in enclosing class scope by LambdaLift, so it might need - // an outer path then. - isOuterSym(ref.symbol.owner.enclosingClass) - || - // If not hoistable, ref.symbol will get a proxy in immediately enclosing class. If this properly - // contains the current class, it needs an outer path. - // If the symbol is hoistable, it might have free variables for which the same - // reasoning applies. See pos/i1664.scala - ctx.owner.enclosingClass.owner.enclosingClass.isContainedIn(ref.symbol.owner) - ) - case _ => false - } - def hasOuterPrefix(tp: Type): Boolean = tp.stripped match { - case AppliedType(tycon, _) => hasOuterPrefix(tycon) - case TypeRef(prefix, _) => isOuterRef(prefix) - case _ => false - } - def containsOuterRefs(tp: Type): Boolean = tp match - case tp: SingletonType => isOuterRef(tp) - case tp: AndOrType => containsOuterRefs(tp.tp1) || containsOuterRefs(tp.tp2) - case _ => false - tree match { - case _: This | _: Ident => isOuterRef(tree.tpe) - case nw: New => - val newCls = nw.tpe.classSymbol - isOuterSym(newCls.owner.enclosingClass) || - hasOuterPrefix(nw.tpe) || - newCls.owner.isTerm && cls.isProperlyContainedIn(newCls) - // newCls might get proxies for free variables. If current class is - // properly contained in newCls, it needs an outer path to newCls access the - // proxies and forward them to the new instance. - case app: TypeApply if app.symbol.isTypeTest => - // Type tests of singletons translate to `eq` tests with references, which might require outer pointers - containsOuterRefs(app.args.head.tpe) - case _ => - false - } - } + def referencesOuter(cls: Symbol, tree: Tree)(using Context): Boolean = + + + val test = new TreeAccumulator[Boolean]: + private var inInline = false + + def isOuterSym(sym: Symbol) = + !sym.isStaticOwner && cls.isProperlyContainedIn(sym) + + def isOuterRef(ref: Type): Boolean = ref match + case ref: ThisType => + isOuterSym(ref.cls) + case ref: TermRef => + if (ref.prefix ne NoPrefix) + !ref.symbol.isStatic && isOuterRef(ref.prefix) + else ( + ref.symbol.isOneOf(HoistableFlags) && + // ref.symbol will be placed in enclosing class scope by LambdaLift, so it might need + // an outer path then. + isOuterSym(ref.symbol.owner.enclosingClass) + || + // If not hoistable, ref.symbol will get a proxy in immediately enclosing class. If this properly + // contains the current class, it needs an outer path. + // If the symbol is hoistable, it might have free variables for which the same + // reasoning applies. See pos/i1664.scala + ctx.owner.enclosingClass.owner.enclosingClass.isContainedIn(ref.symbol.owner) + ) + case _ => false + + def hasOuterPrefix(tp: Type): Boolean = tp.stripped match + case AppliedType(tycon, _) => hasOuterPrefix(tycon) + case TypeRef(prefix, _) => isOuterRef(prefix) + case _ => false + + def containsOuterRefsAtTopLevel(tp: Type): Boolean = tp match + case tp: SingletonType => isOuterRef(tp) + case tp: AndOrType => containsOuterRefsAtTopLevel(tp.tp1) || containsOuterRefsAtTopLevel(tp.tp2) + case _ => false + + def containsOuterRefsAnywhere(tp: Type): Boolean = + tp.existsPart({ + case t: SingletonType => isOuterRef(t) + case _ => false + }, StopAt.Static) + + def containsOuterRefs(t: Tree): Boolean = t match + case _: This | _: Ident => isOuterRef(t.tpe) + case nw: New => + val newCls = nw.tpe.classSymbol + isOuterSym(newCls.owner.enclosingClass) || + hasOuterPrefix(nw.tpe) || + newCls.owner.isTerm && cls.isProperlyContainedIn(newCls) + // newCls might get proxies for free variables. If current class is + // properly contained in newCls, it needs an outer path to newCls access the + // proxies and forward them to the new instance. + case app: TypeApply if app.symbol.isTypeTest => + // Type tests of singletons translate to `eq` tests with references, which might require outer pointers + containsOuterRefsAtTopLevel(app.args.head.tpe) + case t: TypeTree if inInline => + // Expansions of inline methods must be able to address outer types + containsOuterRefsAnywhere(t.tpe) + case _ => + false + + def apply(x: Boolean, t: Tree)(using Context) = + if x || containsOuterRefs(t) then true + else t match + case t: DefDef if t.symbol.isInlineMethod => + val saved = inInline + inInline = true + try foldOver(x, t) + finally inInline = saved + case _ => + foldOver(x, t) + + test(false, tree) + end referencesOuter private final val HoistableFlags = Method | Lazy | Module diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala index 6968eb271961..a7e0795ce195 100644 --- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -17,6 +17,7 @@ import NameOps._ import NameKinds.OuterSelectName import StdNames._ import TypeUtils.isErasedValueType +import config.Feature object FirstTransform { val name: String = "firstTransform" @@ -102,7 +103,7 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => } /** Eliminate self in Template - * Under -Ycc, we keep the self type `S` around in a type definition + * Under captureChecking, we keep the self type `S` around in a type definition * * private[this] type $this = S * @@ -110,7 +111,7 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => */ override def transformTemplate(impl: Template)(using Context): Tree = impl.self match - case self: ValDef if !self.tpt.isEmpty && ctx.settings.Ycc.value => + case self: ValDef if !self.tpt.isEmpty && Feature.ccEnabled => val tsym = newSymbol(ctx.owner, tpnme.SELF, PrivateLocal, TypeAlias(self.tpt.tpe)) val tdef = untpd.cpy.TypeDef(self)(tpnme.SELF, self.tpt).withType(tsym.typeRef) cpy.Template(impl)(self = EmptyValDef, body = tdef :: impl.body) diff --git a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala index edbfbd1552c4..9a36d65babe8 100644 --- a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala +++ b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala @@ -13,6 +13,7 @@ import collection.mutable import ast.Trees._ import core.NameKinds.SuperArgName import SymUtils._ +import core.Decorators.* object HoistSuperArgs { val name: String = "hoistSuperArgs" @@ -181,7 +182,9 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase /** Hoist complex arguments in super call out of the class. */ def hoistSuperArgsFromCall(superCall: Tree, cdef: DefDef, lifted: mutable.ListBuffer[Symbol]): Tree = superCall match - case Block(defs, expr) => + case Block(defs, expr) if !expr.symbol.owner.is(Scala2x) => + // MO: The guard avoids the crash for #16351. + // It would be good to dig deeper, but I won't have the time myself to do it. cpy.Block(superCall)( stats = defs.mapconserve { case vdef: ValDef => diff --git a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala index 1cc1340ad7c7..c69b342b9a01 100644 --- a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala +++ b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala @@ -14,15 +14,17 @@ import core.NameOps.isContextFunction import core.Types.* import coverage.* import typer.LiftCoverage -import util.SourcePosition +import util.{SourcePosition, SourceFile} import util.Spans.Span import localopt.StringInterpolatorOpt +import inlines.Inlines /** Implements code coverage by inserting calls to scala.runtime.coverage.Invoker * ("instruments" the source code). * The result can then be consumed by the Scoverage tool. */ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: + import InstrumentCoverage.{InstrumentedParts, ExcludeMethodFlags} override def phaseName = InstrumentCoverage.name @@ -55,186 +57,335 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: Serializer.serialize(coverage, outputPath, ctx.settings.sourceroot.value) - override protected def newTransformer(using Context) = CoverageTransformer() + override protected def newTransformer(using Context) = + CoverageTransformer(ctx.settings.coverageOutputDir.value) /** Transforms trees to insert calls to Invoker.invoked to compute the coverage when the code is called */ - private class CoverageTransformer extends Transformer: + private class CoverageTransformer(outputPath: String) extends Transformer: + private val ConstOutputPath = Constant(outputPath) + + /** Generates the tree for: + * ``` + * Invoker.invoked(id, DIR) + * ``` + * where DIR is the _outputPath_ defined by the coverage settings. + */ + private def invokeCall(id: Int, span: Span)(using Context): Apply = + ref(defn.InvokedMethodRef).withSpan(span) + .appliedToArgs( + Literal(Constant(id)) :: Literal(ConstOutputPath) :: Nil + ).withSpan(span) + .asInstanceOf[Apply] + + /** + * Records information about a new coverable statement. Generates a unique id for it. + * + * @param tree the tree to add to the coverage report + * @param pos the position to save in the report + * @param branch true if it's a branch (branches are considered differently by most coverage analysis tools) + * @param ctx the current context + * @return the statement's id + */ + private def recordStatement(tree: Tree, pos: SourcePosition, branch: Boolean)(using ctx: Context): Int = + val id = statementId + statementId += 1 + + val sourceFile = pos.source + val statement = Statement( + location = Location(tree, sourceFile), + id = id, + start = pos.start, + end = pos.end, + line = pos.line, + desc = sourceFile.content.slice(pos.start, pos.end).mkString, + symbolName = tree.symbol.name.toSimpleName.toString, + treeName = tree.getClass.getSimpleName.nn, + branch + ) + coverage.addStatement(statement) + id + + /** + * Adds a new statement to the current `Coverage` and creates a corresponding call + * to `Invoker.invoke` with its id, and the given position. + * + * Note that the entire tree won't be saved in the coverage analysis, only some + * data related to the tree is recorded (e.g. its type, its parent class, ...). + * + * @param tree the tree to add to the coverage report + * @param pos the position to save in the report + * @param branch true if it's a branch + * @return the tree corresponding to the call to `Invoker.invoke` + */ + private def createInvokeCall(tree: Tree, pos: SourcePosition, branch: Boolean = false)(using Context): Apply = + val statementId = recordStatement(tree, pos, branch) + val span = pos.span.toSynthetic + invokeCall(statementId, span) + + /** + * Tries to instrument an `Apply`. + * These "tryInstrument" methods are useful to tweak the generation of coverage instrumentation, + * in particular in `case TypeApply` in the [[transform]] method. + * + * @param tree the tree to instrument + * @return instrumentation result, with the preparation statement, coverage call and tree separated + */ + private def tryInstrument(tree: Apply)(using Context): InstrumentedParts = + if canInstrumentApply(tree) then + // Create a call to Invoker.invoked(coverageDirectory, newStatementId) + val coverageCall = createInvokeCall(tree, tree.sourcePos) + + if needsLift(tree) then + // Transform args and fun, i.e. instrument them if needed (and if possible) + val app = cpy.Apply(tree)(transform(tree.fun), tree.args.map(transform)) + + // Lifts the arguments. Note that if only one argument needs to be lifted, we lift them all. + // Also, tree.fun can be lifted too. + // See LiftCoverage for the internal working of this lifting. + val liftedDefs = mutable.ListBuffer[Tree]() + val liftedApp = LiftCoverage.liftForCoverage(liftedDefs, app) + + InstrumentedParts(liftedDefs.toList, coverageCall, liftedApp) + else + // Instrument without lifting + val transformed = cpy.Apply(tree)(transform(tree.fun), transform(tree.args)) + InstrumentedParts.singleExpr(coverageCall, transformed) + else + // Transform recursively but don't instrument the tree itself + val transformed = cpy.Apply(tree)(transform(tree.fun), transform(tree.args)) + InstrumentedParts.notCovered(transformed) + + private def tryInstrument(tree: Ident)(using Context): InstrumentedParts = + val sym = tree.symbol + if canInstrumentParameterless(sym) then + // call to a local parameterless method f + val coverageCall = createInvokeCall(tree, tree.sourcePos) + InstrumentedParts.singleExpr(coverageCall, tree) + else + InstrumentedParts.notCovered(tree) + + private def tryInstrument(tree: Select)(using Context): InstrumentedParts = + val sym = tree.symbol + val transformed = cpy.Select(tree)(transform(tree.qualifier), tree.name) + if canInstrumentParameterless(sym) then + // call to a parameterless method + val coverageCall = createInvokeCall(tree, tree.sourcePos) + InstrumentedParts.singleExpr(coverageCall, transformed) + else + InstrumentedParts.notCovered(transformed) + + /** Generic tryInstrument */ + private def tryInstrument(tree: Tree)(using Context): InstrumentedParts = + tree match + case t: Apply => tryInstrument(t) + case t: Ident => tryInstrument(t) + case t: Select => tryInstrument(t) + case _ => InstrumentedParts.notCovered(transform(tree)) + + /** + * Transforms and instruments a branch if it's non-empty. + * If the tree is empty, return itself and don't instrument. + */ + private def transformBranch(tree: Tree)(using Context): Tree = + import dotty.tools.dotc.core.Decorators.{show,i} + if tree.isEmpty || tree.span.isSynthetic then + // - If t.isEmpty then `transform(t) == t` always hold, + // so we can avoid calling transform in that case. + // - If tree.span.isSynthetic then the branch has been generated + // by the frontend phases, so we don't want to instrument it. + tree + else + val transformed = transform(tree) + val coverageCall = createInvokeCall(tree, tree.sourcePos, branch = true) + InstrumentedParts.singleExprTree(coverageCall, transformed) + override def transform(tree: Tree)(using Context): Tree = inContext(transformCtx(tree)) { // necessary to position inlined code properly tree match // simple cases case tree: (Import | Export | Literal | This | Super | New) => tree case tree if tree.isEmpty || tree.isType => tree // empty Thicket, Ident (referring to a type), TypeTree, ... + case tree if !tree.span.exists || tree.span.isZeroExtent => tree // no meaningful position // identifier case tree: Ident => - val sym = tree.symbol - if canInstrumentParameterless(sym) then - // call to a local parameterless method f - instrument(tree) - else - tree + tryInstrument(tree).toTree // branches case tree: If => cpy.If(tree)( cond = transform(tree.cond), - thenp = instrument(transform(tree.thenp), branch = true), - elsep = instrument(transform(tree.elsep), branch = true) + thenp = transformBranch(tree.thenp), + elsep = transformBranch(tree.elsep) ) case tree: Try => cpy.Try(tree)( - expr = instrument(transform(tree.expr), branch = true), - cases = instrumentCases(tree.cases), - finalizer = instrument(transform(tree.finalizer), branch = true) + expr = transformBranch(tree.expr), + cases = tree.cases.map(transformCaseDef), + finalizer = transformBranch(tree.finalizer) ) // f(args) case tree: Apply => - if canInstrumentApply(tree) then - if needsLift(tree) then - instrumentLifted(tree) - else - instrument(transformApply(tree)) - else - transformApply(tree) + tryInstrument(tree).toTree // (fun)[args] case TypeApply(fun, args) => - val tfun = transform(fun) - tfun match - case InstrumentCoverage.InstrumentedBlock(invokeCall, expr) => - // expr[T] shouldn't be transformed to - // {invoked(...), expr}[T] - // - // but to - // {invoked(...), expr[T]} - // - // This is especially important for trees like (expr[T])(args), - // for which the wrong transformation crashes the compiler. - // See tests/coverage/pos/PolymorphicExtensions.scala - Block( - invokeCall :: Nil, - cpy.TypeApply(tree)(expr, args) - ) - case _ => - cpy.TypeApply(tree)(tfun, args) + // Here is where `InstrumentedParts` becomes useful! + // We extract its components and act carefully. + val InstrumentedParts(pre, coverageCall, expr) = tryInstrument(fun) - // a.b - case Select(qual, name) => - val transformed = cpy.Select(tree)(transform(qual), name) - val sym = tree.symbol - if canInstrumentParameterless(sym) then - // call to a parameterless method - instrument(transformed) + if coverageCall.isEmpty then + // `fun` cannot be instrumented, and `args` is a type so we keep this tree as it is + tree else - transformed + // expr[T] shouldn't be transformed to: + // {invoked(...), expr}[T] + // + // but to: + // {invoked(...), expr[T]} + // + // This is especially important for trees like (expr[T])(args), + // for which the wrong transformation crashes the compiler. + // See tests/coverage/pos/PolymorphicExtensions.scala + Block( + pre :+ coverageCall, + cpy.TypeApply(tree)(expr, args) + ) + + // a.b + case tree: Select => + tryInstrument(tree).toTree + + case tree: CaseDef => + transformCaseDef(tree) - case tree: CaseDef => instrumentCaseDef(tree) case tree: ValDef => // only transform the rhs val rhs = transform(tree.rhs) cpy.ValDef(tree)(rhs = rhs) case tree: DefDef => - if tree.symbol.isOneOf(Inline | Erased) then - // Inline and erased definitions will not be in the generated code and therefore do not need to be instrumented. - // Note that a retained inline method will have a `$retained` variant that will be instrumented. - tree - else - // Only transform the params (for the default values) and the rhs. - val paramss = transformParamss(tree.paramss) - val rhs = transform(tree.rhs) - val finalRhs = - if canInstrumentDefDef(tree) then - // Ensure that the rhs is always instrumented, if possible. - // This is useful because methods can be stored and called later, or called by reflection, - // and if the rhs is too simple to be instrumented (like `def f = this`), the method won't show up as covered. - instrumentBody(tree, rhs) - else - rhs - cpy.DefDef(tree)(tree.name, paramss, tree.tpt, finalRhs) - end if + transformDefDef(tree) + case tree: PackageDef => // only transform the statements of the package cpy.PackageDef(tree)(tree.pid, transform(tree.stats)) + case tree: Assign => // only transform the rhs cpy.Assign(tree)(tree.lhs, transform(tree.rhs)) + case tree: Return => + // only transform the expr, because `from` is a "pointer" + // to the enclosing method, not a tree to instrument. + cpy.Return(tree)(expr = transform(tree.expr), from = tree.from) + + case tree: Template => + // only transform: + // - the arguments of the `Apply` trees in the parents + // - the template body + cpy.Template(tree)( + transformSub(tree.constr), + transformTemplateParents(tree.parents)(using ctx.superCallContext), + tree.derived, + tree.self, + transformStats(tree.body, tree.symbol) + ) + + case tree: Inlined => + // Ideally, tree.call would provide precise information about the inlined call, + // and we would use this information for the coverage report. + // But PostTyper simplifies tree.call, so we can't report the actual method that was inlined. + // In any case, the subtrees need to be repositioned right now, otherwise the + // coverage statement will point to a potentially unreachable source file. + val dropped = Inlines.dropInlined(tree) // drop and reposition + transform(dropped) // transform the content of the Inlined + // For everything else just recurse and transform - // Special care for Templates: it's important to set the owner of the `stats`, like super.transform case _ => super.transform(tree) } - /** Lifts and instruments an application. - * Note that if only one arg needs to be lifted, we just lift everything (see LiftCoverage). + /** Transforms a `def lhs = rhs` and instruments its body (rhs). + * + * The rhs is always transformed recursively. + * + * If possible, a coverage call is inserted at the beginning of the body + * (never outside of the DefDef tree). Therefore, this method always returns a `DefDef`. + * Thanks to this, it doesn't need to be wrapped in an`InstrumentedParts`. */ - private def instrumentLifted(tree: Apply)(using Context) = - // lifting - val buffer = mutable.ListBuffer[Tree]() - val liftedApply = LiftCoverage.liftForCoverage(buffer, tree) - - // instrumentation - val instrumentedArgs = buffer.toList.map(transform) - val instrumentedApply = instrument(liftedApply) - Block( - instrumentedArgs, - instrumentedApply - ) - - private inline def transformApply(tree: Apply)(using Context): Apply = - cpy.Apply(tree)(transform(tree.fun), transform(tree.args)) - - private inline def instrumentCases(cases: List[CaseDef])(using Context): List[CaseDef] = - cases.map(instrumentCaseDef) - - private def instrumentCaseDef(tree: CaseDef)(using Context): CaseDef = + private def transformDefDef(tree: DefDef)(using Context): DefDef = + val sym = tree.symbol + if sym.isOneOf(Inline | Erased) then + // Inline and erased definitions will not be in the generated code and therefore do not need to be instrumented. + // (Note that a retained inline method will have a `$retained` variant that will be instrumented.) + tree + else + // Only transform the params (for the default values) and the rhs, not the name and tpt. + val transformedParamss = transformParamss(tree.paramss) + val transformedRhs = + if !sym.isOneOf(Accessor | Artifact | Synthetic) && !tree.rhs.isEmpty then + // If the body can be instrumented, do it (i.e. insert a "coverage call" at the beginning) + // This is useful because methods can be stored and called later, or called by reflection, + // and if the rhs is too simple to be instrumented (like `def f = this`), + // the method won't show up as covered if we don't insert a call at its beginning. + instrumentBody(tree, transform(tree.rhs)) + else + transform(tree.rhs) + + cpy.DefDef(tree)(tree.name, transformedParamss, tree.tpt, transformedRhs) + + /** Transforms a `case ...` and instruments the parts that can be. */ + private def transformCaseDef(tree: CaseDef)(using Context): CaseDef = val pat = tree.pat val guard = tree.guard + + // compute a span that makes sense for the user that will read the coverage results val friendlyEnd = if guard.span.exists then guard.span.end else pat.span.end val pos = tree.sourcePos.withSpan(tree.span.withEnd(friendlyEnd)) // user-friendly span - // ensure that the body is always instrumented by inserting a call to Invoker.invoked at its beginning - val instrumentedBody = instrument(transform(tree.body), pos, false) - cpy.CaseDef(tree)(tree.pat, transform(tree.guard), instrumentedBody) - /** Records information about a new coverable statement. Generates a unique id for it. - * @return the statement's id - */ - private def recordStatement(tree: Tree, pos: SourcePosition, branch: Boolean)(using ctx: Context): Int = - val id = statementId - statementId += 1 - val statement = Statement( - source = ctx.source.file.name, - location = Location(tree), - id = id, - start = pos.start, - end = pos.end, - line = pos.line, - desc = tree.source.content.slice(pos.start, pos.end).mkString, - symbolName = tree.symbol.name.toSimpleName.toString, - treeName = tree.getClass.getSimpleName.nn, - branch - ) - coverage.addStatement(statement) - id - - private inline def syntheticSpan(pos: SourcePosition): Span = pos.span.toSynthetic - - /** Shortcut for instrument(tree, tree.sourcePos, branch) */ - private inline def instrument(tree: Tree, branch: Boolean = false)(using Context): Tree = - instrument(tree, tree.sourcePos, branch) + // recursively transform the guard, but keep the pat + val transformedGuard = transform(guard) - /** Instruments a statement, if it has a position. */ - private def instrument(tree: Tree, pos: SourcePosition, branch: Boolean)(using Context): Tree = - if pos.exists && !pos.span.isZeroExtent then - val statementId = recordStatement(tree, pos, branch) - insertInvokeCall(tree, pos, statementId) - else - tree - - /** Instruments the body of a DefDef. Handles corner cases. */ + // ensure that the body is always instrumented by inserting a call to Invoker.invoked at its beginning + val coverageCall = createInvokeCall(tree.body, pos) + val transformedBody = transform(tree.body) + val instrumentedBody = InstrumentedParts.singleExprTree(coverageCall, transformedBody) + + cpy.CaseDef(tree)(pat, transformedGuard, instrumentedBody) + + /** Transforms the parents of a Template. */ + private def transformTemplateParents(parents: List[Tree])(using Context): List[Tree] = + def transformParent(parent: Tree): Tree = parent match + case tree: Apply => + // only instrument the args, not the constructor call + cpy.Apply(tree)(tree.fun, tree.args.mapConserve(transform)) + case tree: TypeApply => + // args are types, instrument the fun with transformParent + cpy.TypeApply(tree)(transformParent(tree.fun), tree.args) + case other => + // should always be a TypeTree, nothing to instrument + other + + parents.mapConserve(transformParent) + + /** Instruments the body of a DefDef. Handles corner cases. + * Given a DefDef f like this: + * ``` + * def f(params) = rhs + * ``` + * + * It generally inserts a "coverage call" before rhs: + * ``` + * def f(params) = + * Invoker.invoked(id, DIR) + * rhs + * ``` + * + * But in some cases (e.g. closures), this would be invalid (see the comment below), + * and the call is inserted at another place. + */ private def instrumentBody(parent: DefDef, body: Tree)(using Context): Tree = /* recurse on closures, so that we insert the call at the leaf: @@ -256,21 +407,8 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: val namePos = parent.namePos val pos = namePos.withSpan(namePos.span.withStart(parent.span.start)) // record info and insert call to Invoker.invoked - val statementId = recordStatement(parent, pos, false) - insertInvokeCall(body, pos, statementId) - - /** Returns the tree, prepended by a call to Invoker.invoked */ - private def insertInvokeCall(tree: Tree, pos: SourcePosition, statementId: Int)(using Context): Tree = - val callSpan = syntheticSpan(pos) - Block(invokeCall(statementId, callSpan) :: Nil, tree).withSpan(callSpan.union(tree.span)) - - /** Generates Invoker.invoked(id, DIR) */ - private def invokeCall(id: Int, span: Span)(using Context): Tree = - val outputPath = ctx.settings.coverageOutputDir.value - ref(defn.InvokedMethodRef).withSpan(span) - .appliedToArgs( - List(Literal(Constant(id)), Literal(Constant(outputPath))) - ).withSpan(span) + val coverageCall = createInvokeCall(parent, pos) + InstrumentedParts.singleExprTree(coverageCall, body) /** * Checks if the apply needs a lift in the coverage phase. @@ -307,19 +445,12 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: nestedApplyNeedsLift || !isUnliftableFun(fun) && !tree.args.isEmpty && !tree.args.forall(LiftCoverage.noLift) - /** Check if the body of a DefDef can be instrumented with instrumentBody. */ - private def canInstrumentDefDef(tree: DefDef)(using Context): Boolean = - // No need to force the instrumentation of synthetic definitions - // (it would work, but it looks better without). - !tree.symbol.isOneOf(Accessor | Synthetic | Artifact) && - !tree.rhs.isEmpty - /** Check if an Apply can be instrumented. Prevents this phase from generating incorrect code. */ private def canInstrumentApply(tree: Apply)(using Context): Boolean = val sym = tree.symbol - !sym.isOneOf(Synthetic | Artifact) && // no need to instrument synthetic apply - !isCompilerIntrinsicMethod(sym) && - (tree.typeOpt match + !sym.isOneOf(ExcludeMethodFlags) + && !isCompilerIntrinsicMethod(sym) + && (tree.typeOpt match case AppliedType(tycon: NamedType, _) => /* If the last expression in a block is a context function, we'll try to summon its arguments at the current point, even if the expected type @@ -351,9 +482,10 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: * in post-erasure checking. */ private def canInstrumentParameterless(sym: Symbol)(using Context): Boolean = - sym.is(Method, butNot = Synthetic | Artifact) && - sym.info.isParameterless && - !isCompilerIntrinsicMethod(sym) + sym.is(Method, butNot = ExcludeMethodFlags) + && sym.info.isParameterless + && !isCompilerIntrinsicMethod(sym) + && !sym.info.typeSymbol.name.isContextFunction // exclude context functions like in canInstrumentApply /** Does sym refer to a "compiler intrinsic" method, which only exist during compilation, * like Any.isInstanceOf? @@ -370,15 +502,27 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: object InstrumentCoverage: val name: String = "instrumentCoverage" val description: String = "instrument code for coverage checking" - - /** Extractor object for trees produced by `insertInvokeCall`. */ - object InstrumentedBlock: - private def isInvokedCall(app: Apply)(using Context): Boolean = - app.span.isSynthetic && app.symbol == defn.InvokedMethodRef.symbol - - def unapply(t: Tree)(using Context): Option[(Apply, Tree)] = - t match - case Block((app: Apply) :: Nil, expr) if isInvokedCall(app) => - Some((app, expr)) - case _ => - None + val ExcludeMethodFlags: FlagSet = Synthetic | Artifact | Erased + + /** + * An instrumented Tree, in 3 parts. + * @param pre preparation code, e.g. lifted arguments. May be empty. + * @param invokeCall call to Invoker.invoked(dir, id), or an empty tree. + * @param expr the instrumented expression, executed just after the invokeCall + */ + case class InstrumentedParts(pre: List[Tree], invokeCall: Apply | EmptyTree.type, expr: Tree): + require(pre.isEmpty || (pre.nonEmpty && !invokeCall.isEmpty), "if pre isn't empty then invokeCall shouldn't be empty") + + /** Turns this into an actual Tree. */ + def toTree(using Context): Tree = + if invokeCall.isEmpty then expr + else if pre.isEmpty then Block(invokeCall :: Nil, expr) + else Block(pre :+ invokeCall, expr) + + object InstrumentedParts: + def notCovered(expr: Tree) = InstrumentedParts(Nil, EmptyTree, expr) + def singleExpr(invokeCall: Apply, expr: Tree) = InstrumentedParts(Nil, invokeCall, expr) + + /** Shortcut for `singleExpr(call, expr).toTree` */ + def singleExprTree(invokeCall: Apply, expr: Tree)(using Context): Tree = + Block(invokeCall :: Nil, expr) diff --git a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala index ad068b84c041..c95500d856be 100644 --- a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala @@ -65,7 +65,7 @@ class InterceptedMethods extends MiniPhase { override def transformApply(tree: Apply)(using Context): Tree = { lazy val qual = tree.fun match { case Select(qual, _) => qual - case ident @ Ident(_) => + case ident: Ident => ident.tpe match { case TermRef(prefix: TermRef, _) => tpd.ref(prefix) diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala index c32ea61cff2b..3b37ef130231 100644 --- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala @@ -2,30 +2,33 @@ package dotty.tools.dotc package transform import java.util.IdentityHashMap - import ast.tpd import core.Annotations.Annotation import core.Constants.Constant -import core.Contexts._ -import core.Decorators._ +import core.Contexts.* +import core.Decorators.* import core.DenotTransformers.IdentityDenotTransformer -import core.Flags._ -import core.NameKinds.{LazyBitMapName, LazyLocalInitName, LazyLocalName, ExpandedName} +import core.Flags.* +import core.NameKinds.{ExpandedName, LazyBitMapName, LazyLocalInitName, LazyLocalName} import core.StdNames.nme -import core.Symbols._ -import core.Types._ +import core.Symbols.* +import core.Types.* import core.{Names, StdNames} +import dotty.tools.dotc.config.Feature import transform.MegaPhase.MiniPhase -import transform.SymUtils._ +import transform.SymUtils.* + import scala.collection.mutable class LazyVals extends MiniPhase with IdentityDenotTransformer { import LazyVals._ import tpd._ - /** this map contains mutable state of transformation: OffsetDefs to be appended to companion object definitions, - * and number of bits currently used */ - class OffsetInfo(var defs: List[Tree], var ord:Int) + /** + * The map contains the list of the offset trees. + */ + class OffsetInfo(var defs: List[Tree], var ord: Int = 0) + private val appendOffsetDefs = mutable.Map.empty[Symbol, OffsetInfo] override def phaseName: String = LazyVals.name @@ -52,6 +55,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { else nullables.toList } + private def needsBoxing(tp: Type)(using Context): Boolean = tp.classSymbol.isPrimitiveValueClass override def prepareForUnit(tree: Tree)(using Context): Context = { if (lazyValNullables == null) @@ -62,7 +66,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { override def transformDefDef(tree: DefDef)(using Context): Tree = transformLazyVal(tree) - override def transformValDef(tree: ValDef)(using Context): Tree = transformLazyVal(tree) @@ -103,10 +106,9 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { /** Append offset fields to companion objects - */ + */ override def transformTemplate(template: Template)(using Context): Tree = { val cls = ctx.owner.asClass - appendOffsetDefs.get(cls) match { case None => template case Some(data) => @@ -115,7 +117,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { } } - private def addInFront(prefix: List[Tree], stats: List[Tree]) = stats match { case first :: rest if isSuperConstrCall(first) => first :: prefix ::: rest case _ => prefix ::: stats @@ -186,7 +187,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { Thicket(holderTree, initTree, accessor) } - override def transformStats(trees: List[tpd.Tree])(using Context): List[Tree] = { // backend requires field usage to be after field definition // need to bring containers to start of method @@ -274,6 +274,231 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { } } + /** + * Create a threadsafe lazy accessor and function that computes the field's value. `Evaluating` and + * `NullValue` are represented by `object`s and `Waiting` by a class that allows awaiting the completion + * of the evaluation. Note that since tail-recursive functions are transformed *before* lazy-vals, + * this implementation does involve explicit while loop. `PatternMatcher` is coming before `LazyVals`, + * therefore the pattern matching is implemented using if-s. + * + * ``` + * private @volatile var _x: AnyRef = null + * + * def x: A = + * val result = _x + * if result.isInstanceOf[A] then + * result // possible unboxing applied here + * else if result.eq(NullValue) then + * null // possible unboxing applied here + * else + * x_compute() // possible unboxing applied here + * + * private def x_compute(): AnyRef = + * while do + * val current: AnyRef = _x + * if current.eq(null) then + * if CAS(_x, null, Evaluating) then + * var resultNullable: AnyRef = null + * var result: AnyRef = null + * try + * resultNullable = rhs + * nullable = null // nulls out the nullable fields used only in initialization + * if resultNullable.eq(null) then + * result = NullValue + * else + * result = resultNullable + * finally + * if !CAS(_x, Evaluating, result) then + * val lock = _x.asInstanceOf[Waiting] + * CAS(_x, lock, result) + * lock.release() + * return resultNullable + * else + * if current.isInstanceOf[LazyValControlState] then + * if current.eq(Evaluating) then // To avoid creating Waiting instance + * CAS(current, current, new Waiting) + * else if current.isInstanceOf[Waiting] then + * current.asInstanceOf[Waiting].await() + * else return null + * else + * return current + * end while + * * ``` + * + * @param memberDef the transformed lazy field member definition + * @param claz the class containing this lazy val field + * @param target the target synthetic field + * @param offset the offset of the field in the storage allocation of the class + * @param thiz a reference to the transformed class + */ + def mkThreadSafeDef(memberDef: ValOrDefDef, + claz: ClassSymbol, + target: Symbol, + offset: Tree, + thiz: Tree)(using Context): (DefDef, DefDef) = { + val tp = memberDef.tpe.widenDealias.resultType.widenDealias + val waiting = ref(defn.LazyValsWaitingState) + val controlState = ref(defn.LazyValsControlState) + val evaluating = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.evaluating) + val nullValue = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.nullValue) + val objCasFlag = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.objCas) + val accessorMethodSymbol = memberDef.symbol.asTerm + val lazyInitMethodName = LazyLocalInitName.fresh(memberDef.name.asTermName) + val lazyInitMethodSymbol = newSymbol(claz, lazyInitMethodName, Synthetic | Method | Private, MethodType(Nil)(_ => Nil, _ => defn.ObjectType)) + + val rhs = memberDef.rhs + val rhsMappedOwner = rhs.changeOwnerAfter(memberDef.symbol, lazyInitMethodSymbol, this) + val valueSymbol = newSymbol(accessorMethodSymbol, lazyNme.result, Synthetic, defn.ObjectType) + + val immediateValueCondition = + if (defn.LazyValsControlState.isSubClass(tp.classSymbol)) then + ref(valueSymbol).select(defn.Any_!=).appliedTo(nullLiteral).select(nme.And).appliedTo(ref(valueSymbol) + .select(defn.Any_isInstanceOf).appliedToType(defn.LazyValsControlState.typeRef) + .select(nme.UNARY_!).appliedToNone) + else + ref(valueSymbol).select(defn.Any_isInstanceOf).appliedToType(tp) + + val accessorBody = + Block( + ValDef(valueSymbol, ref(target)) :: Nil, + If( // if _x != null && !_x.isInstanceOf[LazyValControlState] then + immediateValueCondition, + ref(valueSymbol).ensureConforms(tp), // then return _x.asInstanceOf[A] + If( + ref(valueSymbol).select(defn.Object_eq).appliedTo(nullValue), + nullLiteral.ensureConforms(tp), + ref(lazyInitMethodSymbol).ensureApplied.ensureConforms(tp) // else return x_compute() + ) + ) + ) + + val accessorDef = DefDef(accessorMethodSymbol, accessorBody) + + // if observed a null (uninitialized) value + val initialize = { + // var result: AnyRef + val resSymbNullable = newSymbol(lazyInitMethodSymbol, lazyNme.resultNullable, Synthetic | Mutable, defn.ObjectType) + val resSymb = newSymbol(lazyInitMethodSymbol, lazyNme.result, Synthetic | Mutable, defn.ObjectType) + // releasing block in finally + val lockRel = { + val lockSymb = newSymbol(lazyInitMethodSymbol, lazyNme.lock, Synthetic, waiting.typeOpt) + Block(ValDef(lockSymb, ref(target).cast(waiting.typeOpt)) + :: objCasFlag.appliedTo(thiz, offset, ref(lockSymb), ref(resSymb)) :: Nil, + ref(lockSymb).select(lazyNme.RLazyVals.waitingRelease).ensureApplied) + } + // finally block + val fin = If( + objCasFlag.appliedTo(thiz, offset, evaluating, ref(resSymb)).select(nme.UNARY_!).appliedToNone, + lockRel, + unitLiteral + ) + // entire try block + val evaluate = Try( + + Block( + (Assign(ref(resSymbNullable), if needsBoxing(tp) && rhsMappedOwner != EmptyTree then rhsMappedOwner.ensureConforms(defn.boxedType(tp)) else rhsMappedOwner) // try result = rhs + :: If( + ref(resSymbNullable).select(defn.Object_eq).appliedTo(nullLiteral), + Assign(ref(resSymb), nullValue), + Assign(ref(resSymb), ref(resSymbNullable)) + ) :: Nil) + ::: nullOut(nullableFor(accessorMethodSymbol)), + unitLiteral), + Nil, + fin + ) + // if CAS(_, null, Evaluating) + If( + objCasFlag.appliedTo(thiz, offset, nullLiteral, evaluating), + Block(ValDef(resSymb, nullLiteral) :: ValDef(resSymbNullable, nullLiteral) :: evaluate :: Nil, // var result: AnyRef = null + Return(ref(resSymbNullable), lazyInitMethodSymbol)), + unitLiteral + ).withType(defn.UnitType) + } + + val current = newSymbol(lazyInitMethodSymbol, lazyNme.current, Synthetic, defn.ObjectType) + val ifNotUninitialized = + If( + ref(current).select(defn.Any_isInstanceOf).appliedToTypeTree(controlState), + // if a control state + If( + ref(current).select(defn.Object_eq).appliedTo(evaluating), + // if is Evaluating then CAS(_, Evaluating, new Waiting) + Block( + objCasFlag.appliedTo(thiz, offset, ref(current), Select(New(waiting), StdNames.nme.CONSTRUCTOR).ensureApplied) :: Nil, + unitLiteral + ), + // if not Evaluating + If( + ref(current).select(defn.Any_isInstanceOf).appliedToTypeTree(waiting), + // if is waiting + ref(current).select(defn.Any_asInstanceOf).appliedToTypeTree(waiting).select(lazyNme.RLazyVals.waitingAwaitRelease, _.info.paramInfoss.exists(_.size == 0)).ensureApplied, + Return(nullLiteral, lazyInitMethodSymbol) + ) + ), + // if not a control state + Return(ref(current), lazyInitMethodSymbol) + ) + + val initBody = Block(ValDef(current, ref(target)) :: Nil, If(ref(current).select(defn.Object_eq).appliedTo(nullLiteral), initialize, ifNotUninitialized).withType(defn.UnitType)) + val initMainLoop = WhileDo(EmptyTree, initBody) // becomes: while (true) do { body } + val initMethodDef = DefDef(lazyInitMethodSymbol, initMainLoop) + (accessorDef, initMethodDef) + } + + def transformMemberDefThreadSafe(x: ValOrDefDef)(using Context): Thicket = { + assert(!(x.symbol is Mutable)) + if ctx.settings.YlightweightLazyVals.value then + transformMemberDefThreadSafeNew(x) + else + transformMemberDefThreadSafeLegacy(x) + } + + def transformMemberDefThreadSafeNew(x: ValOrDefDef)(using Context): Thicket = { + import dotty.tools.dotc.core.Types._ + import dotty.tools.dotc.core.Flags._ + + val claz = x.symbol.owner.asClass + val thizClass = Literal(Constant(claz.info)) + + def offsetName(id: Int) = s"${StdNames.nme.LAZY_FIELD_OFFSET}${if (x.symbol.owner.is(Module)) "_m_" else ""}$id".toTermName + val containerName = LazyLocalName.fresh(x.name.asTermName) + val containerSymbol = newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags | Private, defn.ObjectType, coord = x.symbol.coord).enteredAfter(this) + containerSymbol.addAnnotation(Annotation(defn.VolatileAnnot)) // private @volatile var _x: AnyRef + containerSymbol.addAnnotations(x.symbol.annotations) // pass annotations from original definition + val stat = x.symbol.isStatic + if stat then + containerSymbol.setFlag(JavaStatic) + val getOffset = + if stat then + Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getStaticFieldOffset) + else + Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffsetStatic) + val containerTree = ValDef(containerSymbol, nullLiteral) + + // create an offset for this lazy val + val offsetSymbol: TermSymbol = appendOffsetDefs.get(claz) match + case Some(info) => + newSymbol(claz, offsetName(info.defs.size), Synthetic, defn.LongType).enteredAfter(this) + case None => + newSymbol(claz, offsetName(0), Synthetic, defn.LongType).enteredAfter(this) + offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot)) + val fieldTree = thizClass.select(lazyNme.RLazyVals.getDeclaredField).appliedTo(Literal(Constant(containerName.mangledString))) + val offsetTree = ValDef(offsetSymbol.nn, getOffset.appliedTo(fieldTree)) + val offsetInfo = appendOffsetDefs.getOrElseUpdate(claz, new OffsetInfo(Nil)) + offsetInfo.defs = offsetTree :: offsetInfo.defs + val offset = ref(offsetSymbol.nn) + + val swapOver = + if stat then + tpd.clsOf(x.symbol.owner.typeRef) + else + This(claz) + + val (accessorDef, initMethodDef) = mkThreadSafeDef(x, claz, containerSymbol, offset, swapOver) + Thicket(containerTree, accessorDef, initMethodDef) + } + /** Create a threadsafe lazy accessor equivalent to such code * ``` * def methodSymbol(): Int = { @@ -305,7 +530,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { * } * ``` */ - def mkThreadSafeDef(methodSymbol: TermSymbol, + def mkThreadSafeDefLegacy(methodSymbol: TermSymbol, claz: ClassSymbol, ord: Int, target: Symbol, @@ -374,15 +599,12 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { DefDef(methodSymbol, loop) } - def transformMemberDefThreadSafe(x: ValOrDefDef)(using Context): Thicket = { - assert(!(x.symbol is Mutable)) - + def transformMemberDefThreadSafeLegacy(x: ValOrDefDef)(using Context): Thicket = { val tpe = x.tpe.widen.resultType.widen val claz = x.symbol.owner.asClass val thizClass = Literal(Constant(claz.info)) - val helperModule = requiredModule("scala.runtime.LazyVals") - val getOffset = Select(ref(helperModule), lazyNme.RLazyVals.getOffset) - val getOffsetStatic = Select(ref(helperModule), lazyNme.RLazyVals.getOffsetStatic) + val getOffset = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffset) + val getOffsetStatic = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffsetStatic) var offsetSymbol: TermSymbol | Null = null var flag: Tree = EmptyTree var ord = 0 @@ -425,17 +647,16 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val containerName = LazyLocalName.fresh(x.name.asTermName) val containerSymbol = newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags, tpe, coord = x.symbol.coord).enteredAfter(this) - val containerTree = ValDef(containerSymbol, defaultValue(tpe)) val offset = ref(offsetSymbol.nn) - val getFlag = Select(ref(helperModule), lazyNme.RLazyVals.get) - val setFlag = Select(ref(helperModule), lazyNme.RLazyVals.setFlag) - val wait = Select(ref(helperModule), lazyNme.RLazyVals.wait4Notification) - val state = Select(ref(helperModule), lazyNme.RLazyVals.state) - val cas = Select(ref(helperModule), lazyNme.RLazyVals.cas) + val getFlag = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.get) + val setFlag = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.setFlag) + val wait = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.wait4Notification) + val state = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.state) + val cas = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.cas) - val accessor = mkThreadSafeDef(x.symbol.asTerm, claz, ord, containerSymbol, x.rhs, tpe, offset, getFlag, state, cas, setFlag, wait) + val accessor = mkThreadSafeDefLegacy(x.symbol.asTerm, claz, ord, containerSymbol, x.rhs, tpe, offset, getFlag, state, cas, setFlag, wait) if (flag eq EmptyTree) Thicket(containerTree, accessor) else Thicket(containerTree, flag, accessor) @@ -445,26 +666,35 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { object LazyVals { val name: String = "lazyVals" val description: String = "expand lazy vals" - object lazyNme { import Names.TermName object RLazyVals { import scala.runtime.LazyVals.{Names => N} - val get: TermName = N.get.toTermName - val setFlag: TermName = N.setFlag.toTermName - val wait4Notification: TermName = N.wait4Notification.toTermName - val state: TermName = N.state.toTermName - val cas: TermName = N.cas.toTermName - val getOffset: TermName = N.getOffset.toTermName - val getOffsetStatic: TermName = "getOffsetStatic".toTermName - val getDeclaredField: TermName = "getDeclaredField".toTermName + val waitingAwaitRelease: TermName = "await".toTermName + val waitingRelease: TermName = "countDown".toTermName + val evaluating: TermName = "Evaluating".toTermName + val nullValue: TermName = "NullValue".toTermName + val objCas: TermName = "objCAS".toTermName + val get: TermName = N.get.toTermName + val setFlag: TermName = N.setFlag.toTermName + val wait4Notification: TermName = N.wait4Notification.toTermName + val state: TermName = N.state.toTermName + val cas: TermName = N.cas.toTermName + val getOffset: TermName = N.getOffset.toTermName + val getOffsetStatic: TermName = "getOffsetStatic".toTermName + val getStaticFieldOffset: TermName = "getStaticFieldOffset".toTermName + val getDeclaredField: TermName = "getDeclaredField".toTermName } val flag: TermName = "flag".toTermName val state: TermName = "state".toTermName val result: TermName = "result".toTermName + val resultNullable: TermName = "resultNullable".toTermName val value: TermName = "value".toTermName val initialized: TermName = "initialized".toTermName val initialize: TermName = "initialize".toTermName val retry: TermName = "retry".toTermName + val current: TermName = "current".toTermName + val lock: TermName = "lock".toTermName + val discard: TermName = "discard".toTermName } } diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala index d20f3e1a8da4..6456066bfdb0 100644 --- a/compiler/src/dotty/tools/dotc/transform/Memoize.scala +++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala @@ -16,8 +16,12 @@ import Flags._ import Decorators._ import StdNames.nme +import sjs.JSSymUtils._ + import util.Store +import dotty.tools.backend.sjs.JSDefinitions.jsdefn + object Memoize { val name: String = "memoize" val description: String = "add private fields to getters and setters" @@ -142,14 +146,30 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => } if sym.is(Accessor, butNot = NoFieldNeeded) then + /* Tests whether the semantics of Scala.js require a field for this symbol, irrespective of any + * optimization we think we can do. This is the case if one of the following is true: + * - it is a member of a JS type, since it needs to be visible as a JavaScript field + * - is is exported as static member of the companion class, since it needs to be visible as a JavaScript static field + * - it is exported to the top-level, since that can only be done as a true top-level variable, i.e., a field + */ + def sjsNeedsField: Boolean = + ctx.settings.scalajs.value && ( + sym.owner.isJSType + || sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot) + || sym.hasAnnotation(jsdefn.JSExportStaticAnnot) + ) + def adaptToField(field: Symbol, tree: Tree): Tree = if (tree.isEmpty) tree else tree.ensureConforms(field.info.widen) def isErasableBottomField(field: Symbol, cls: Symbol): Boolean = - !field.isVolatile && ((cls eq defn.NothingClass) || (cls eq defn.NullClass) || (cls eq defn.BoxedUnitClass)) + !field.isVolatile + && ((cls eq defn.NothingClass) || (cls eq defn.NullClass) || (cls eq defn.BoxedUnitClass)) + && !sjsNeedsField if sym.isGetter then - val constantFinalVal = sym.isAllOf(Accessor | Final, butNot = Mutable) && tree.rhs.isInstanceOf[Literal] + val constantFinalVal = + sym.isAllOf(Accessor | Final, butNot = Mutable) && tree.rhs.isInstanceOf[Literal] && !sjsNeedsField if constantFinalVal then // constant final vals do not need to be transformed at all, and do not need a field tree diff --git a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala index 94ea48e14efd..8c93ffb90232 100644 --- a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala +++ b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala @@ -30,7 +30,8 @@ import NameKinds.ParamAccessorName * The aim of this transformation is to avoid redundant parameter accessor fields. */ class ParamForwarding extends MiniPhase with IdentityDenotTransformer: - import ast.tpd._ + import ast.tpd.* + import ParamForwarding.inheritedAccessor private def thisPhase: ParamForwarding = this @@ -39,20 +40,6 @@ class ParamForwarding extends MiniPhase with IdentityDenotTransformer: override def description: String = ParamForwarding.description def transformIfParamAlias(mdef: ValOrDefDef)(using Context): Tree = - - def inheritedAccessor(sym: Symbol)(using Context): Symbol = - val candidate = sym.owner.asClass.superClass - .info.decl(sym.name).suchThat(_.is(ParamAccessor, butNot = Mutable)) - .symbol - if !candidate.is(Private) // candidate might be private and accessible if it is in an outer class - && candidate.isAccessibleFrom(currentClass.thisType, superAccess = true) - then - candidate - else if candidate.is(SuperParamAlias) then - inheritedAccessor(candidate) - else - NoSymbol - val sym = mdef.symbol.asTerm if sym.is(SuperParamAlias) then assert(sym.is(ParamAccessor, butNot = Mutable)) @@ -84,3 +71,17 @@ class ParamForwarding extends MiniPhase with IdentityDenotTransformer: object ParamForwarding: val name: String = "paramForwarding" val description: String = "add forwarders for aliases of superclass parameters" + + def inheritedAccessor(sym: Symbol)(using Context): Symbol = + val candidate = sym.owner.asClass.superClass + .info.decl(sym.name).suchThat(_.is(ParamAccessor, butNot = Mutable)) + .symbol + if !candidate.is(Private) // candidate might be private and accessible if it is in an outer class + && candidate.isAccessibleFrom(currentClass.thisType, superAccess = true) + then + candidate + else if candidate.is(SuperParamAlias) then + inheritedAccessor(candidate) + else + NoSymbol +end ParamForwarding \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 3db751df4145..05aaa745bb18 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -14,6 +14,7 @@ import Decorators._ import Symbols._, SymUtils._, NameOps._ import ContextFunctionResults.annotateContextResults import config.Printers.typr +import config.Feature import util.SrcPos import reporting._ import NameKinds.WildcardParamName @@ -301,12 +302,14 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase checkNoConstructorProxy(tree) transformSelect(tree, Nil) case tree: Apply => - val methType = tree.fun.tpe.widen + val methType = tree.fun.tpe.widen.asInstanceOf[MethodType] val app = if (methType.isErasedMethod) tpd.cpy.Apply(tree)( tree.fun, tree.args.mapConserve(arg => + if methType.isResultDependent then + Checking.checkRealizable(arg.tpe, arg.srcPos, "erased argument") if (methType.isImplicitMethod && arg.span.isSynthetic) arg match case _: RefTree | _: Apply | _: TypeApply if arg.symbol.is(Erased) => @@ -359,6 +362,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase } case Inlined(call, bindings, expansion) if !call.isEmpty => val pos = call.sourcePos + CrossVersionChecks.checkExperimentalRef(call.symbol, pos) val callTrace = Inlines.inlineCallTrace(call.symbol, pos)(using ctx.withSource(pos.source)) cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion)(using inlineContext(call))) case templ: Template => @@ -396,8 +400,8 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase val reference = ctx.settings.sourceroot.value val relativePath = util.SourceFile.relativePath(ctx.compilationUnit.source, reference) sym.addAnnotation(Annotation.makeSourceFile(relativePath)) - if ctx.settings.Ycc.value && sym != defn.CaptureCheckedAnnot then - sym.addAnnotation(Annotation(defn.CaptureCheckedAnnot)) + if Feature.pureFunsEnabled && sym != defn.WithPureFunsAnnot then + sym.addAnnotation(Annotation(defn.WithPureFunsAnnot)) else if !sym.is(Param) && !sym.owner.isOneOf(AbstractOrTrait) then Checking.checkGoodBounds(tree.symbol) diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 36044e6bcb91..0ac9087a08c0 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -4,7 +4,7 @@ package transform import core.* import Symbols.*, Contexts.*, Types.*, ContextOps.*, Decorators.*, SymDenotations.* -import Flags.*, SymUtils.*, NameKinds.* +import Flags.*, SymUtils.*, NameKinds.*, Denotations.Denotation import ast.* import Names.Name import Phases.Phase @@ -21,9 +21,11 @@ import util.Property import StdNames.nme import reporting.trace import annotation.constructorOnly +import cc.CaptureSet.IdempotentCaptRefMap +import dotty.tools.dotc.core.Denotations.SingleDenotation object Recheck: - import tpd.Tree + import tpd.* /** A flag used to indicate that a ParamAccessor has been temporarily made not-private * Only used at the start of the Recheck phase, reset at its end. @@ -36,6 +38,13 @@ object Recheck: /** Attachment key for rechecked types of TypeTrees */ val RecheckedType = Property.Key[Type] + val addRecheckedTypes = new TreeMap: + override def transform(tree: Tree)(using Context): Tree = + val tree1 = super.transform(tree) + tree.getAttachment(RecheckedType) match + case Some(tpe) => tree1.withType(tpe) + case None => tree1 + extension (sym: Symbol) /** Update symbol's info to newInfo from prevPhase.next to lastPhase. @@ -63,7 +72,7 @@ object Recheck: val symd = sym.denot symd.validFor.firstPhaseId == phase.id + 1 && (sym.originDenotation ne symd) - extension (tree: Tree) + extension [T <: Tree](tree: T) /** Remember `tpe` as the type of `tree`, which might be different from the * type stored in the tree itself, unless a type was already remembered for `tree`. @@ -78,11 +87,27 @@ object Recheck: if tpe ne tree.tpe then tree.putAttachment(RecheckedType, tpe) /** The remembered type of the tree, or if none was installed, the original type */ - def knownType = + def knownType: Type = tree.attachmentOrElse(RecheckedType, tree.tpe) def hasRememberedType: Boolean = tree.hasAttachment(RecheckedType) + def withKnownType(using Context): T = tree.getAttachment(RecheckedType) match + case Some(tpe) => tree.withType(tpe).asInstanceOf[T] + case None => tree + + extension (tpe: Type) + + /** Map ExprType => T to () ?=> T (and analogously for pure versions). + * Even though this phase runs after ElimByName, ExprTypes can still occur + * as by-name arguments of applied types. See note in doc comment for + * ElimByName phase. Test case is bynamefun.scala. + */ + def mapExprType(using Context): Type = tpe match + case ExprType(rt) => defn.ByNameFunction(rt) + case _ => tpe + + /** A base class that runs a simplified typer pass over an already re-typed program. The pass * does not transform trees but returns instead the re-typed type of each tree as it is * traversed. The Recheck phase must be directly preceded by a phase of type PreRecheck. @@ -108,7 +133,9 @@ abstract class Recheck extends Phase, SymTransformer: else sym def run(using Context): Unit = - newRechecker().checkUnit(ctx.compilationUnit) + val rechecker = newRechecker() + rechecker.checkUnit(ctx.compilationUnit) + rechecker.reset() def newRechecker()(using Context): Rechecker @@ -128,8 +155,14 @@ abstract class Recheck extends Phase, SymTransformer: */ def keepType(tree: Tree): Boolean = keepAllTypes + private val prevSelDenots = util.HashMap[NamedType, Denotation]() + + def reset()(using Context): Unit = + for (ref, mbr) <- prevSelDenots.iterator do + ref.withDenot(mbr) + /** Constant-folded rechecked type `tp` of tree `tree` */ - private def constFold(tree: Tree, tp: Type)(using Context): Type = + protected def constFold(tree: Tree, tp: Type)(using Context): Type = val tree1 = tree.withType(tp) val tree2 = ConstFold(tree1) if tree2 ne tree1 then tree2.tpe else tp @@ -137,21 +170,51 @@ abstract class Recheck extends Phase, SymTransformer: def recheckIdent(tree: Ident)(using Context): Type = tree.tpe - def recheckSelect(tree: Select)(using Context): Type = + def recheckSelect(tree: Select, pt: Type)(using Context): Type = val Select(qual, name) = tree - recheckSelection(tree, recheck(qual).widenIfUnstable, name) + val proto = + if tree.symbol == defn.Any_asInstanceOf then WildcardType + else AnySelectionProto + recheckSelection(tree, recheck(qual, proto).widenIfUnstable, name, pt) + + /** When we select the `apply` of a function with type such as `(=> A) => B`, + * we need to convert the parameter type `=> A` to `() ?=> A`. See doc comment + * of `mapExprType`. + */ + def normalizeByName(mbr: SingleDenotation)(using Context): SingleDenotation = mbr.info match + case mt: MethodType if mt.paramInfos.exists(_.isInstanceOf[ExprType]) => + mbr.derivedSingleDenotation(mbr.symbol, + mt.derivedLambdaType(paramInfos = mt.paramInfos.map(_.mapExprType))) + case _ => + mbr - /** Keep the symbol of the `select` but re-infer its type */ - def recheckSelection(tree: Select, qualType: Type, name: Name)(using Context) = + def recheckSelection(tree: Select, qualType: Type, name: Name, + sharpen: Denotation => Denotation)(using Context): Type = if name.is(OuterSelectName) then tree.tpe else //val pre = ta.maybeSkolemizePrefix(qualType, name) - val mbr = qualType.findMember(name, qualType, - excluded = if tree.symbol.is(Private) then EmptyFlags else Private - ).suchThat(tree.symbol == _) - constFold(tree, qualType.select(name, mbr)) + val mbr = normalizeByName( + sharpen( + qualType.findMember(name, qualType, + excluded = if tree.symbol.is(Private) then EmptyFlags else Private + )).suchThat(tree.symbol == _)) + val newType = tree.tpe match + case prevType: NamedType => + val prevDenot = prevType.denot + val newType = qualType.select(name, mbr) + if (newType eq prevType) && (mbr.info ne prevDenot.info) && !prevSelDenots.contains(prevType) then + prevSelDenots(prevType) = prevDenot + newType + case _ => + qualType.select(name, mbr) + constFold(tree, newType) //.showing(i"recheck select $qualType . $name : ${mbr.info} = $result") + + /** Keep the symbol of the `select` but re-infer its type */ + def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context): Type = + recheckSelection(tree, qualType, name, sharpen = identity[Denotation]) + def recheckBind(tree: Bind, pt: Type)(using Context): Type = tree match case Bind(name, body) => recheck(body, pt) @@ -187,7 +250,7 @@ abstract class Recheck extends Phase, SymTransformer: * to FromJavaObject since it got lost in ElimRepeated */ private def mapJavaArgs(formals: List[Type])(using Context): List[Type] = - val tm = new TypeMap: + val tm = new TypeMap with IdempotentCaptRefMap: def apply(t: Type) = t match case t: TypeRef if t.symbol == defn.ObjectClass => defn.FromJavaObjectType case _ => mapOver(t) @@ -198,7 +261,8 @@ abstract class Recheck extends Phase, SymTransformer: mt.instantiate(argTypes) def recheckApply(tree: Apply, pt: Type)(using Context): Type = - recheck(tree.fun).widen match + val funtpe = recheck(tree.fun) + funtpe.widen match case fntpe: MethodType => assert(fntpe.paramInfos.hasSameLengthAs(tree.args)) val formals = @@ -206,7 +270,7 @@ abstract class Recheck extends Phase, SymTransformer: else fntpe.paramInfos def recheckArgs(args: List[Tree], formals: List[Type], prefs: List[ParamRef]): List[Type] = args match case arg :: args1 => - val argType = recheck(arg, formals.head) + val argType = recheck(arg, formals.head.mapExprType) val formals1 = if fntpe.isParamDependent then formals.tail.map(_.substParam(prefs.head, argType)) @@ -218,6 +282,8 @@ abstract class Recheck extends Phase, SymTransformer: val argTypes = recheckArgs(tree.args, formals, fntpe.paramRefs) constFold(tree, instantiate(fntpe, argTypes, tree.fun.symbol)) //.showing(i"typed app $tree : $fntpe with ${tree.args}%, % : $argTypes%, % = $result") + case tp => + assert(false, i"unexpected type of ${tree.fun}: $funtpe") def recheckTypeApply(tree: TypeApply, pt: Type)(using Context): Type = recheck(tree.fun).widen match @@ -248,7 +314,7 @@ abstract class Recheck extends Phase, SymTransformer: recheckBlock(tree.stats, tree.expr, pt) def recheckInlined(tree: Inlined, pt: Type)(using Context): Type = - recheckBlock(tree.bindings, tree.expansion, pt) + recheckBlock(tree.bindings, tree.expansion, pt)(using inlineContext(tree.call)) def recheckIf(tree: If, pt: Type)(using Context): Type = recheck(tree.cond, defn.BooleanType) @@ -283,7 +349,20 @@ abstract class Recheck extends Phase, SymTransformer: val rawType = recheck(tree.expr) val ownType = avoidMap(rawType) - checkConforms(ownType, tree.from.symbol.returnProto, tree) + + // The pattern matching translation, which runs before this phase + // sometimes instantiates return types with singleton type alternatives + // but the returned expression is widened. We compensate by widening the expected + // type as well. See also `widenSkolems` in `checkConformsExpr` which fixes + // a more general problem. It turns out that pattern matching returns + // are not checked by Ycheck, that's why these problems were allowed to slip + // through. + def widened(tp: Type): Type = tp match + case tp: SingletonType => tp.widen + case tp: AndOrType => tp.derivedAndOrType(widened(tp.tp1), widened(tp.tp2)) + case tp @ AnnotatedType(tp1, ann) => tp.derivedAnnotatedType(widened(tp1), ann) + case _ => tp + checkConforms(ownType, widened(tree.from.symbol.returnProto), tree) defn.NothingType end recheckReturn @@ -344,7 +423,7 @@ abstract class Recheck extends Phase, SymTransformer: val sym = tree.symbol tree match case tree: Ident => recheckIdent(tree) - case tree: Select => recheckSelect(tree) + case tree: Select => recheckSelect(tree, pt) case tree: Bind => recheckBind(tree, pt) case tree: ValOrDefDef => if tree.isEmpty then NoType @@ -409,6 +488,27 @@ abstract class Recheck extends Phase, SymTransformer: throw ex } + /** Typing and previous transforms sometiems leaves skolem types in prefixes of + * NamedTypes in `expected` that do not match the `actual` Type. -Ycheck does + * not complain (need to find out why), but a full recheck does. We compensate + * by de-skolemizing everywhere in `expected` except when variance is negative. + * @return If `tp` contains SkolemTypes in covariant or invariant positions, + * the type where these SkolemTypes are mapped to their underlying type. + * Otherwise, `tp` itself + */ + def widenSkolems(tp: Type)(using Context): Type = + object widenSkolems extends TypeMap, IdempotentCaptRefMap: + var didWiden: Boolean = false + def apply(t: Type): Type = t match + case t: SkolemType if variance >= 0 => + didWiden = true + apply(t.underlying) + case t: LazyRef => t + case t @ AnnotatedType(t1, ann) => t.derivedAnnotatedType(apply(t1), ann) + case _ => mapOver(t) + val tp1 = widenSkolems(tp) + if widenSkolems.didWiden then tp1 else tp + /** If true, print info for some successful checkConforms operations (failing ones give * an error message in any case). */ @@ -424,11 +524,16 @@ abstract class Recheck extends Phase, SymTransformer: def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = //println(i"check conforms $actual <:< $expected") - val isCompatible = + + def isCompatible(expected: Type): Boolean = actual <:< expected || expected.isRepeatedParam - && actual <:< expected.translateFromRepeated(toArray = tree.tpe.isRef(defn.ArrayClass)) - if !isCompatible then + && isCompatible(expected.translateFromRepeated(toArray = tree.tpe.isRef(defn.ArrayClass))) + || { + val widened = widenSkolems(expected) + (widened ne expected) && isCompatible(widened) + } + if !isCompatible(expected) then recheckr.println(i"conforms failed for ${tree}: $actual vs $expected") err.typeMismatch(tree.withType(actual), expected) else if debugSuccesses then @@ -436,6 +541,7 @@ abstract class Recheck extends Phase, SymTransformer: case _: Ident => println(i"SUCCESS $tree:\n${TypeComparer.explained(_.isSubType(actual, expected))}") case _ => + end checkConformsExpr def checkUnit(unit: CompilationUnit)(using Context): Unit = recheck(unit.tpdTree) @@ -444,12 +550,6 @@ abstract class Recheck extends Phase, SymTransformer: /** Show tree with rechecked types instead of the types stored in the `.tpe` field */ override def show(tree: untpd.Tree)(using Context): String = - val addRecheckedTypes = new TreeMap: - override def transform(tree: Tree)(using Context): Tree = - val tree1 = super.transform(tree) - tree.getAttachment(RecheckedType) match - case Some(tpe) => tree1.withType(tpe) - case None => tree1 atPhase(thisPhase) { super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) } diff --git a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala index e8f8a80e1a0d..1cf687187eeb 100644 --- a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala @@ -10,6 +10,7 @@ import Symbols.defn import Constants._ import Types._ import Decorators._ +import Flags._ import scala.collection.mutable @@ -33,7 +34,7 @@ class RepeatableAnnotations extends MiniPhase: val annsByType = stableGroupBy(annotations, _.symbol) annsByType.flatMap { case (_, a :: Nil) => a :: Nil - case (sym, anns) if sym.derivesFrom(defn.ClassfileAnnotationClass) => + case (sym, anns) if sym.is(JavaDefined) => sym.getAnnotation(defn.JavaRepeatableAnnot).flatMap(_.argumentConstant(0)) match case Some(Constant(containerTpe: Type)) => val clashingAnns = annsByType.getOrElse(containerTpe.classSymbol, Nil) diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index 31c28d7b1854..ea83f276a59c 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -19,6 +19,8 @@ import dotty.tools.dotc.core.Denotations.staticRef import dotty.tools.dotc.core.TypeErasure import dotty.tools.dotc.core.Constants.Constant +import dotty.tools.dotc.quoted.Interpreter + import scala.util.control.NonFatal import dotty.tools.dotc.util.SrcPos import dotty.tools.repl.AbstractFileClassLoader @@ -32,7 +34,8 @@ import scala.quoted.runtime.impl._ /** Utility class to splice quoted expressions */ object Splicer { - import tpd._ + import tpd.* + import Interpreter.* /** Splice the Tree for a Quoted expression. `${'{xyz}}` becomes `xyz` * and for `$xyz` the tree of `xyz` is interpreted for which the @@ -50,7 +53,7 @@ object Splicer { val oldContextClassLoader = Thread.currentThread().getContextClassLoader Thread.currentThread().setContextClassLoader(classLoader) try { - val interpreter = new Interpreter(splicePos, classLoader) + val interpreter = new SpliceInterpreter(splicePos, classLoader) // Some parts of the macro are evaluated during the unpickling performed in quotedExprToTree val interpretedExpr = interpreter.interpret[Quotes => scala.quoted.Expr[Any]](tree) @@ -219,24 +222,13 @@ object Splicer { checkIfValidStaticCall(tree)(using Set.empty) } - /** Tree interpreter that evaluates the tree */ - private class Interpreter(pos: SrcPos, classLoader: ClassLoader)(using Context) { - - type Env = Map[Symbol, Object] - - /** Returns the interpreted result of interpreting the code a call to the symbol with default arguments. - * Return Some of the result or None if some error happen during the interpretation. - */ - def interpret[T](tree: Tree)(implicit ct: ClassTag[T]): Option[T] = - interpretTree(tree)(Map.empty) match { - case obj: T => Some(obj) - case obj => - // TODO upgrade to a full type tag check or something similar - report.error(s"Interpreted tree returned a result of an unexpected type. Expected ${ct.runtimeClass} but was ${obj.getClass}", pos) - None - } + /** Tree interpreter that evaluates the tree. + * Interpreter is assumed to start at quotation level -1. + */ + private class SpliceInterpreter(pos: SrcPos, classLoader: ClassLoader)(using Context) extends Interpreter(pos, classLoader) { - def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { + override protected def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { + // Interpret level -1 quoted code `'{...}` (assumed without level 0 splices) case Apply(Select(Apply(TypeApply(fn, _), quoted :: Nil), nme.apply), _) if fn.symbol == defn.QuotedRuntime_exprQuote => val quoted1 = quoted match { case quoted: Ident if quoted.symbol.isAllOf(InlineByNameProxy) => @@ -245,324 +237,14 @@ object Splicer { case Inlined(EmptyTree, _, quoted) => quoted case _ => quoted } - interpretQuote(quoted1) + new ExprImpl(Inlined(EmptyTree, Nil, QuoteUtils.changeOwnerOfTree(quoted1, ctx.owner)).withSpan(quoted1.span), SpliceScope.getCurrent) + // Interpret level -1 `Type.of[T]` case Apply(TypeApply(fn, quoted :: Nil), _) if fn.symbol == defn.QuotedTypeModule_of => - interpretTypeQuote(quoted) - - case Literal(Constant(value)) => - interpretLiteral(value) - - case tree: Ident if tree.symbol.is(Inline, butNot = Method) => - tree.tpe.widenTermRefExpr match - case ConstantType(c) => c.value.asInstanceOf[Object] - case _ => throw new StopInterpretation(em"${tree.symbol} could not be inlined", tree.srcPos) - - // TODO disallow interpreted method calls as arguments - case Call(fn, args) => - if (fn.symbol.isConstructor && fn.symbol.owner.owner.is(Package)) - interpretNew(fn.symbol, args.flatten.map(interpretTree)) - else if (fn.symbol.is(Module)) - interpretModuleAccess(fn.symbol) - else if (fn.symbol.is(Method) && fn.symbol.isStatic) { - val staticMethodCall = interpretedStaticMethodCall(fn.symbol.owner, fn.symbol) - staticMethodCall(interpretArgs(args, fn.symbol.info)) - } - else if fn.symbol.isStatic then - assert(args.isEmpty) - interpretedStaticFieldAccess(fn.symbol) - else if (fn.qualifier.symbol.is(Module) && fn.qualifier.symbol.isStatic) - if (fn.name == nme.asInstanceOfPM) - interpretModuleAccess(fn.qualifier.symbol) - else { - val staticMethodCall = interpretedStaticMethodCall(fn.qualifier.symbol.moduleClass, fn.symbol) - staticMethodCall(interpretArgs(args, fn.symbol.info)) - } - else if (env.contains(fn.symbol)) - env(fn.symbol) - else if (tree.symbol.is(InlineProxy)) - interpretTree(tree.symbol.defTree.asInstanceOf[ValOrDefDef].rhs) - else - unexpectedTree(tree) - - case closureDef((ddef @ DefDef(_, ValDefs(arg :: Nil) :: Nil, _, _))) => - (obj: AnyRef) => interpretTree(ddef.rhs)(using env.updated(arg.symbol, obj)) - - // Interpret `foo(j = x, i = y)` which it is expanded to - // `val j$1 = x; val i$1 = y; foo(i = i$1, j = j$1)` - case Block(stats, expr) => interpretBlock(stats, expr) - case NamedArg(_, arg) => interpretTree(arg) - - case Inlined(_, bindings, expansion) => interpretBlock(bindings, expansion) - - case Typed(expr, _) => - interpretTree(expr) - - case SeqLiteral(elems, _) => - interpretVarargs(elems.map(e => interpretTree(e))) + new TypeImpl(QuoteUtils.changeOwnerOfTree(quoted, ctx.owner), SpliceScope.getCurrent) case _ => - unexpectedTree(tree) - } - - private def interpretArgs(argss: List[List[Tree]], fnType: Type)(using Env): List[Object] = { - def interpretArgsGroup(args: List[Tree], argTypes: List[Type]): List[Object] = - assert(args.size == argTypes.size) - val view = - for (arg, info) <- args.lazyZip(argTypes) yield - info match - case _: ExprType => () => interpretTree(arg) // by-name argument - case _ => interpretTree(arg) // by-value argument - view.toList - - fnType.dealias match - case fnType: MethodType if fnType.isErasedMethod => interpretArgs(argss, fnType.resType) - case fnType: MethodType => - val argTypes = fnType.paramInfos - assert(argss.head.size == argTypes.size) - interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, fnType.resType) - case fnType: AppliedType if defn.isContextFunctionType(fnType) => - val argTypes :+ resType = fnType.args: @unchecked - interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, resType) - case fnType: PolyType => interpretArgs(argss, fnType.resType) - case fnType: ExprType => interpretArgs(argss, fnType.resType) - case _ => - assert(argss.isEmpty) - Nil - } - - private def interpretBlock(stats: List[Tree], expr: Tree)(implicit env: Env) = { - var unexpected: Option[Object] = None - val newEnv = stats.foldLeft(env)((accEnv, stat) => stat match { - case stat: ValDef => - accEnv.updated(stat.symbol, interpretTree(stat.rhs)(accEnv)) - case stat => - if (unexpected.isEmpty) - unexpected = Some(unexpectedTree(stat)) - accEnv - }) - unexpected.getOrElse(interpretTree(expr)(newEnv)) - } - - private def interpretQuote(tree: Tree)(implicit env: Env): Object = - new ExprImpl(Inlined(EmptyTree, Nil, QuoteUtils.changeOwnerOfTree(tree, ctx.owner)).withSpan(tree.span), SpliceScope.getCurrent) - - private def interpretTypeQuote(tree: Tree)(implicit env: Env): Object = - new TypeImpl(QuoteUtils.changeOwnerOfTree(tree, ctx.owner), SpliceScope.getCurrent) - - private def interpretLiteral(value: Any)(implicit env: Env): Object = - value.asInstanceOf[Object] - - private def interpretVarargs(args: List[Object])(implicit env: Env): Object = - args.toSeq - - private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol)(implicit env: Env): List[Object] => Object = { - val (inst, clazz) = - try - if (moduleClass.name.startsWith(str.REPL_SESSION_LINE)) - (null, loadReplLineClass(moduleClass)) - else { - val inst = loadModule(moduleClass) - (inst, inst.getClass) - } - catch - case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException - - val name = fn.name.asTermName - val method = getMethod(clazz, name, paramsSig(fn)) - (args: List[Object]) => stopIfRuntimeException(method.invoke(inst, args: _*), method) - } - - private def interpretedStaticFieldAccess(sym: Symbol)(implicit env: Env): Object = { - val clazz = loadClass(sym.owner.fullName.toString) - val field = clazz.getField(sym.name.toString) - field.get(null) - } - - private def interpretModuleAccess(fn: Symbol)(implicit env: Env): Object = - loadModule(fn.moduleClass) - - private def interpretNew(fn: Symbol, args: => List[Object])(implicit env: Env): Object = { - val clazz = loadClass(fn.owner.fullName.toString) - val constr = clazz.getConstructor(paramsSig(fn): _*) - constr.newInstance(args: _*).asInstanceOf[Object] - } - - private def unexpectedTree(tree: Tree)(implicit env: Env): Object = - throw new StopInterpretation("Unexpected tree could not be interpreted: " + tree, tree.srcPos) - - private def loadModule(sym: Symbol): Object = - if (sym.owner.is(Package)) { - // is top level object - val moduleClass = loadClass(sym.fullName.toString) - moduleClass.getField(str.MODULE_INSTANCE_FIELD).get(null) - } - else { - // nested object in an object - val className = { - val pack = sym.topLevelClass.owner - if (pack == defn.RootPackage || pack == defn.EmptyPackageClass) sym.flatName.toString - else pack.showFullName + "." + sym.flatName - } - val clazz = loadClass(className) - clazz.getConstructor().newInstance().asInstanceOf[Object] - } - - private def loadReplLineClass(moduleClass: Symbol)(implicit env: Env): Class[?] = { - val lineClassloader = new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader) - lineClassloader.loadClass(moduleClass.name.firstPart.toString) - } - - private def loadClass(name: String): Class[?] = - try classLoader.loadClass(name) - catch { - case _: ClassNotFoundException => - val msg = s"Could not find class $name in classpath" - throw new StopInterpretation(msg, pos) - } - - private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod = - try clazz.getMethod(name.toString, paramClasses: _*) - catch { - case _: NoSuchMethodException => - val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)" - throw new StopInterpretation(msg, pos) - case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException - } - - private def stopIfRuntimeException[T](thunk: => T, method: JLRMethod): T = - try thunk - catch { - case ex: RuntimeException => - val sw = new StringWriter() - sw.write("A runtime exception occurred while executing macro expansion\n") - sw.write(ex.getMessage) - sw.write("\n") - ex.printStackTrace(new PrintWriter(sw)) - sw.write("\n") - throw new StopInterpretation(sw.toString, pos) - case ex: InvocationTargetException => - ex.getTargetException match { - case ex: scala.quoted.runtime.StopMacroExpansion => - throw ex - case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException - case targetException => - val sw = new StringWriter() - sw.write("Exception occurred while executing macro expansion.\n") - if (!ctx.settings.Ydebug.value) { - val end = targetException.getStackTrace.lastIndexWhere { x => - x.getClassName == method.getDeclaringClass.getCanonicalName && x.getMethodName == method.getName - } - val shortStackTrace = targetException.getStackTrace.take(end + 1) - targetException.setStackTrace(shortStackTrace) - } - targetException.printStackTrace(new PrintWriter(sw)) - sw.write("\n") - throw new StopInterpretation(sw.toString, pos) - } - } - - private object MissingClassDefinedInCurrentRun { - def unapply(targetException: NoClassDefFoundError)(using Context): Option[Symbol] = { - val className = targetException.getMessage - if (className == null) None - else { - val sym = staticRef(className.toTypeName).symbol - if (sym.isDefinedInCurrentRun) Some(sym) else None - } - } - } - - /** List of classes of the parameters of the signature of `sym` */ - private def paramsSig(sym: Symbol): List[Class[?]] = { - def paramClass(param: Type): Class[?] = { - def arrayDepth(tpe: Type, depth: Int): (Type, Int) = tpe match { - case JavaArrayType(elemType) => arrayDepth(elemType, depth + 1) - case _ => (tpe, depth) - } - def javaArraySig(tpe: Type): String = { - val (elemType, depth) = arrayDepth(tpe, 0) - val sym = elemType.classSymbol - val suffix = - if (sym == defn.BooleanClass) "Z" - else if (sym == defn.ByteClass) "B" - else if (sym == defn.ShortClass) "S" - else if (sym == defn.IntClass) "I" - else if (sym == defn.LongClass) "J" - else if (sym == defn.FloatClass) "F" - else if (sym == defn.DoubleClass) "D" - else if (sym == defn.CharClass) "C" - else "L" + javaSig(elemType) + ";" - ("[" * depth) + suffix - } - def javaSig(tpe: Type): String = tpe match { - case tpe: JavaArrayType => javaArraySig(tpe) - case _ => - // Take the flatten name of the class and the full package name - val pack = tpe.classSymbol.topLevelClass.owner - val packageName = if (pack == defn.EmptyPackageClass) "" else s"${pack.fullName}." - packageName + tpe.classSymbol.fullNameSeparated(FlatName).toString - } - - val sym = param.classSymbol - if (sym == defn.BooleanClass) classOf[Boolean] - else if (sym == defn.ByteClass) classOf[Byte] - else if (sym == defn.CharClass) classOf[Char] - else if (sym == defn.ShortClass) classOf[Short] - else if (sym == defn.IntClass) classOf[Int] - else if (sym == defn.LongClass) classOf[Long] - else if (sym == defn.FloatClass) classOf[Float] - else if (sym == defn.DoubleClass) classOf[Double] - else java.lang.Class.forName(javaSig(param), false, classLoader) - } - def getExtraParams(tp: Type): List[Type] = tp.widenDealias match { - case tp: AppliedType if defn.isContextFunctionType(tp) => - // Call context function type direct method - tp.args.init.map(arg => TypeErasure.erasure(arg)) ::: getExtraParams(tp.args.last) - case _ => Nil - } - val extraParams = getExtraParams(sym.info.finalResultType) - val allParams = TypeErasure.erasure(sym.info) match { - case meth: MethodType => meth.paramInfos ::: extraParams - case _ => extraParams - } - allParams.map(paramClass) - } - } - - - - /** Exception that stops interpretation if some issue is found */ - private class StopInterpretation(val msg: String, val pos: SrcPos) extends Exception - - object Call { - /** Matches an expression that is either a field access or an application - * It retruns a TermRef containing field accessed or a method reference and the arguments passed to it. - */ - def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = - Call0.unapply(arg).map((fn, args) => (fn, args.reverse)) - - private object Call0 { - def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = arg match { - case Select(Call0(fn, args), nme.apply) if defn.isContextFunctionType(fn.tpe.widenDealias.finalResultType) => - Some((fn, args)) - case fn: Ident => Some((tpd.desugarIdent(fn).withSpan(fn.span), Nil)) - case fn: Select => Some((fn, Nil)) - case Apply(f @ Call0(fn, args1), args2) => - if (f.tpe.widenDealias.isErasedMethod) Some((fn, args1)) - else Some((fn, args2 :: args1)) - case TypeApply(Call0(fn, args), _) => Some((fn, args)) - case _ => None - } + super.interpretTree(tree) } } } diff --git a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala index 0a6fa9217303..6010fe2a2a44 100644 --- a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala @@ -363,6 +363,8 @@ object SymUtils: self.hasAnnotation(defn.ExperimentalAnnot) || isDefaultArgumentOfExperimentalMethod || (!self.is(Package) && self.owner.isInExperimentalScope) + || self.topLevelClass.ownersIterator.exists(p => + p.is(Package) && p.owner.isRoot && p.name == tpnme.dotty) /** The declared self type of this class, as seen from `site`, stripping * all refinements for opaque types. diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 5791d0c7d119..dc8defa90eef 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -91,7 +91,7 @@ class TreeChecker extends Phase with SymTransformer { if (ctx.phaseId <= erasurePhase.id) { val initial = symd.initial assert(symd == initial || symd.signature == initial.signature, - i"""Signature of ${sym.showLocated} changed at phase ${ctx.phase.prevMega} + i"""Signature of ${sym} in ${sym.ownersIterator.toList}%, % changed at phase ${ctx.phase.prevMega} |Initial info: ${initial.info} |Initial sig : ${initial.signature} |Current info: ${symd.info} @@ -305,9 +305,26 @@ class TreeChecker extends Phase with SymTransformer { override def excludeFromDoubleDeclCheck(sym: Symbol)(using Context): Boolean = sym.isEffectivelyErased && sym.is(Private) && !sym.initial.is(Private) + /** Check that all invariants related to Super and SuperType are met */ + def checkSuper(tree: Tree)(implicit ctx: Context): Unit = tree match + case Super(qual, mix) => + tree.tpe match + case tp @ SuperType(thistpe, supertpe) => + if (!mix.isEmpty) + assert(supertpe.isInstanceOf[TypeRef], + s"Precondition of pickling violated: the supertpe in $tp is not a TypeRef even though $tree has a non-empty mix") + case tp => + assert(false, s"The type of a Super tree must be a SuperType, but $tree has type $tp") + case _ => + tree.tpe match + case tp: SuperType => + assert(false, s"The type of a non-Super tree must not be a SuperType, but $tree has type $tp") + case _ => + override def typed(tree: untpd.Tree, pt: Type = WildcardType)(using Context): Tree = { val tpdTree = super.typed(tree, pt) Typer.assertPositioned(tree) + checkSuper(tpdTree) if (ctx.erasedTypes) // Can't be checked in earlier phases since `checkValue` is only run in // Erasure (because running it in Typer would force too much) @@ -359,7 +376,7 @@ class TreeChecker extends Phase with SymTransformer { override def typedIdent(tree: untpd.Ident, pt: Type)(using Context): Tree = { assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase) - assert(tree.isType || ctx.mode.is(Mode.Pattern) && untpd.isWildcardArg(tree) || !needsSelect(tree.tpe), i"bad type ${tree.tpe} for $tree # ${tree.uniqueId}") + assert(tree.isType || ctx.mode.is(Mode.Pattern) && untpd.isWildcardArg(tree) || !needsSelect(tree.typeOpt), i"bad type ${tree.tpe} for $tree # ${tree.uniqueId}") assertDefined(tree) checkNotRepeated(super.typedIdent(tree, pt)) @@ -385,7 +402,7 @@ class TreeChecker extends Phase with SymTransformer { val sym = tree.symbol val symIsFixed = tpe match { - case tpe: TermRef => ctx.erasedTypes || !tpe.isMemberRef + case tpe: TermRef => ctx.erasedTypes || !tpe.isPrefixDependentMemberRef case _ => false } if (sym.exists && !sym.is(Private) && diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index fc914e9b03bf..b2a101649457 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -31,27 +31,32 @@ object TypeTestsCasts { import typer.Inferencing.maximizeType import typer.ProtoTypes.constrained - /** Whether `(x: X).isInstanceOf[P]` can be checked at runtime? + /** Tests whether `(x: X).isInstanceOf[P]` is uncheckable at runtime, returning the reason, + * or the empty string if it is checkable. * * First do the following substitution: * (a) replace `T @unchecked` and pattern binder types (e.g., `_$1`) in P with WildcardType * * Then check: * - * 1. if `X <:< P`, TRUE - * 2. if `P` is a singleton type, TRUE - * 3. if `P` refers to an abstract type member or type parameter, FALSE + * 1. if `X <:< P`, "" + * 2. if `P` is a singleton type, "" + * 3. if `P` refers to an abstract type member or type parameter, "it refers to an abstract type member or type parameter" * 4. if `P = Array[T]`, checkable(E, T) where `E` is the element type of `X`, defaults to `Any`. * 5. if `P` is `pre.F[Ts]` and `pre.F` refers to a class which is not `Array`: * (a) replace `Ts` with fresh type variables `Xs` * (b) constrain `Xs` with `pre.F[Xs] <:< X` - * (c) maximize `pre.F[Xs]` and check `pre.F[Xs] <:< P` + * (c) maximize `pre.F[Xs]` + * (d) if !`pre.F[Xs] <:< P`, "its type arguments can't be determined from $X" * 6. if `P = T1 | T2` or `P = T1 & T2`, checkable(X, T1) && checkable(X, T2). - * 7. if `P` is a refinement type, FALSE - * 8. if `P` is a local class which is not statically reachable from the scope where `X` is defined, FALSE - * 9. otherwise, TRUE + * 7. if `P` is a refinement type, "it's a refinement type" + * 8. if `P` is a local class which is not statically reachable from the scope where `X` is defined, "it's a local class" + * 9. otherwise, "" */ - def checkable(X: Type, P: Type, span: Span)(using Context): Boolean = atPhase(Phases.refchecksPhase.next) { + def whyUncheckable(X: Type, P: Type, span: Span)(using Context): String = atPhase(Phases.refchecksPhase.next) { + extension (inline s1: String) inline def &&(inline s2: String): String = if s1 == "" then s2 else s1 + extension (inline b: Boolean) inline def |||(inline s: String): String = if b then "" else s + // Run just before ElimOpaque transform (which follows RefChecks) def isAbstract(P: Type) = !P.dealias.typeSymbol.isClass @@ -124,10 +129,10 @@ object TypeTestsCasts { } - def recur(X: Type, P: Type): Boolean = (X <:< P) || (P.dealias match { - case _: SingletonType => true + def recur(X: Type, P: Type): String = (X <:< P) ||| (P.dealias match { + case _: SingletonType => "" case _: TypeProxy - if isAbstract(P) => false + if isAbstract(P) => i"it refers to an abstract type member or type parameter" case defn.ArrayOf(tpT) => X match { case defn.ArrayOf(tpE) => recur(tpE, tpT) @@ -147,21 +152,23 @@ object TypeTestsCasts { X.classSymbol.exists && P.classSymbol.exists && !X.classSymbol.asClass.mayHaveCommonChild(P.classSymbol.asClass) || typeArgsTrivial(X, tpe) + ||| i"its type arguments can't be determined from $X" } case AndType(tp1, tp2) => recur(X, tp1) && recur(X, tp2) case OrType(tp1, tp2) => recur(X, tp1) && recur(X, tp2) case AnnotatedType(t, _) => recur(X, t) - case tp2: RefinedType => recur(X, tp2.parent) && TypeComparer.hasMatchingMember(tp2.refinedName, X, tp2) + case tp2: RefinedType => recur(X, tp2.parent) + && (TypeComparer.hasMatchingMember(tp2.refinedName, X, tp2) ||| i"it's a refinement type") case tp2: RecType => recur(X, tp2.parent) case _ if P.classSymbol.isLocal && foundClasses(X).exists(P.classSymbol.isInaccessibleChildOf) => // 8 - false - case _ => true + i"it's a local class" + case _ => "" }) - val res = X.widenTermRefExpr.hasAnnotation(defn.UncheckedAnnot) || recur(X.widen, replaceP(P)) + val res = recur(X.widen, replaceP(P)) - debug.println(i"checking ${X.show} isInstanceOf ${P} = $res") + debug.println(i"checking $X isInstanceOf $P = $res") res } @@ -348,9 +355,12 @@ object TypeTestsCasts { if (sym.isTypeTest) { val argType = tree.args.head.tpe val isTrusted = tree.hasAttachment(PatternMatcher.TrustedTypeTestKey) - if (!isTrusted && !checkable(expr.tpe, argType, tree.span)) - report.uncheckedWarning(i"the type test for $argType cannot be checked at runtime", expr.srcPos) - transformTypeTest(expr, tree.args.head.tpe, + val isUnchecked = expr.tpe.widenTermRefExpr.hasAnnotation(defn.UncheckedAnnot) + if !isTrusted && !isUnchecked then + val whyNot = whyUncheckable(expr.tpe, argType, tree.span) + if whyNot.nonEmpty then + report.uncheckedWarning(i"the type test for $argType cannot be checked at runtime because $whyNot", expr.srcPos) + transformTypeTest(expr, argType, flagUnrelated = enclosingInlineds.isEmpty) // if test comes from inlined code, dont't flag it even if it always false } else if (sym.isTypeCast) diff --git a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala index 00c09a3ebf07..5b6e36343379 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala @@ -107,6 +107,11 @@ object TypeUtils { self.superType.mirrorCompanionRef } + /** Is this type a methodic type that takes at least one parameter? */ + def takesParams(using Context): Boolean = self.stripPoly match + case mt: MethodType => mt.paramNames.nonEmpty || mt.resType.takesParams + case _ => false + /** Is this type a methodic type that takes implicit parameters (both old and new) at some point? */ def takesImplicitParams(using Context): Boolean = self.stripPoly match case mt: MethodType => mt.isImplicitMethod || mt.resType.takesImplicitParams diff --git a/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala b/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala index 56fd4f754d60..95d40102c5a7 100644 --- a/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala +++ b/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala @@ -53,7 +53,7 @@ class UncacheGivenAliases extends MiniPhase with IdentityDenotTransformer: */ override def transformValDef(tree: ValDef)(using Context): Tree = val sym = tree.symbol - if sym.isAllOf(Given, Lazy) && !needsCache(sym, tree.rhs) then + if sym.isAllOf(LazyGiven) && !needsCache(sym, tree.rhs) then sym.copySymDenotation( initFlags = sym.flags &~ Lazy | Method, info = ExprType(sym.info)) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala index a8ade3acae71..1efb3c88149e 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala @@ -15,10 +15,11 @@ import StdNames._ import dotty.tools.dotc.transform._ import Phases._ +import scala.collection.mutable import Semantic._ -class Checker extends Phase { +class Checker extends Phase: override def phaseName: String = Checker.name @@ -31,17 +32,23 @@ class Checker extends Phase { override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = val checkCtx = ctx.fresh.setPhase(this.start) - Semantic.checkTasks(using checkCtx) { - val traverser = new InitTreeTraverser() - units.foreach { unit => traverser.traverse(unit.tpdTree) } - } + val traverser = new InitTreeTraverser() + units.foreach { unit => traverser.traverse(unit.tpdTree) } + val classes = traverser.getClasses() + + Semantic.checkClasses(classes)(using checkCtx) + units - def run(using Context): Unit = { + def run(using Context): Unit = // ignore, we already called `Semantic.check()` in `runOn` - } + () + + class InitTreeTraverser extends TreeTraverser: + private val classes: mutable.ArrayBuffer[ClassSymbol] = new mutable.ArrayBuffer + + def getClasses(): List[ClassSymbol] = classes.toList - class InitTreeTraverser(using WorkList) extends TreeTraverser { override def traverse(tree: Tree)(using Context): Unit = traverseChildren(tree) tree match { @@ -53,29 +60,12 @@ class Checker extends Phase { mdef match case tdef: TypeDef if tdef.isClassDef => val cls = tdef.symbol.asClass - val thisRef = ThisRef(cls) - if shouldCheckClass(cls) then Semantic.addTask(thisRef) + classes.append(cls) case _ => case _ => } - } - - private def shouldCheckClass(cls: ClassSymbol)(using Context) = { - val instantiable: Boolean = - cls.is(Flags.Module) || - !cls.isOneOf(Flags.AbstractOrTrait) && { - // see `Checking.checkInstantiable` in typer - val tp = cls.appliedRef - val stp = SkolemType(tp) - val selfType = cls.givenSelfType.asSeenFrom(stp, cls) - !selfType.exists || stp <:< selfType - } - - // A concrete class may not be instantiated if the self type is not satisfied - instantiable && cls.enclosingPackageClass != defn.StdLibPatchesPackage.moduleClass - } -} + end InitTreeTraverser object Checker: val name: String = "initChecker" diff --git a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala index 38d2263b54c3..7d92d2b2a921 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala @@ -79,7 +79,7 @@ object Errors: override def toString() = this.getClass.getName.nn /** Access non-initialized field */ - case class AccessNonInit(field: Symbol, trace: Seq[Tree]) extends Error: + case class AccessNonInit(field: Symbol)(val trace: Seq[Tree]) extends Error: def source: Tree = trace.last def show(using Context): String = "Access non-initialized " + field.show + "." + stacktrace @@ -87,27 +87,27 @@ object Errors: override def pos(using Context): SourcePosition = field.sourcePos /** Promote a value under initialization to fully-initialized */ - case class PromoteError(msg: String, trace: Seq[Tree]) extends Error: + case class PromoteError(msg: String)(val trace: Seq[Tree]) extends Error: def show(using Context): String = msg + stacktrace - case class AccessCold(field: Symbol, trace: Seq[Tree]) extends Error: + case class AccessCold(field: Symbol)(val trace: Seq[Tree]) extends Error: def show(using Context): String = "Access field " + field.show + " on a cold object." + stacktrace - case class CallCold(meth: Symbol, trace: Seq[Tree]) extends Error: + case class CallCold(meth: Symbol)(val trace: Seq[Tree]) extends Error: def show(using Context): String = "Call method " + meth.show + " on a cold object." + stacktrace - case class CallUnknown(meth: Symbol, trace: Seq[Tree]) extends Error: + case class CallUnknown(meth: Symbol)(val trace: Seq[Tree]) extends Error: def show(using Context): String = val prefix = if meth.is(Flags.Method) then "Calling the external method " else "Accessing the external field" prefix + meth.show + " may cause initialization errors." + stacktrace /** Promote a value under initialization to fully-initialized */ - case class UnsafePromotion(msg: String, trace: Seq[Tree], error: Error) extends Error: + case class UnsafePromotion(msg: String, error: Error)(val trace: Seq[Tree]) extends Error: def show(using Context): String = msg + stacktrace + "\n" + - "Promoting the value to hot failed due to the following problem:\n" + { + "Promoting the value to hot (transitively initialized) failed due to the following problem:\n" + { val ctx2 = ctx.withProperty(IsFromPromotion, Some(true)) error.show(using ctx2) } @@ -116,7 +116,7 @@ object Errors: * * Invariant: argsIndices.nonEmpty */ - case class UnsafeLeaking(trace: Seq[Tree], error: Error, nonHotOuterClass: Symbol, argsIndices: List[Int]) extends Error: + case class UnsafeLeaking(error: Error, nonHotOuterClass: Symbol, argsIndices: List[Int])(val trace: Seq[Tree]) extends Error: def show(using Context): String = "Problematic object instantiation: " + argumentInfo() + stacktrace + "\n" + "It leads to the following error during object initialization:\n" + @@ -141,5 +141,5 @@ object Errors: acc + text2 } val verb = if multiple then " are " else " is " - val adjective = "not hot." + val adjective = "not hot (transitively initialized)." subject + verb + adjective diff --git a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala index 4cb08312b0c9..a48aa77fe79f 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala @@ -156,7 +156,7 @@ object Semantic: def hasField(f: Symbol) = fields.contains(f) object Promoted: - class PromotionInfo: + class PromotionInfo(val entryClass: ClassSymbol): var isCurrentObjectPromoted: Boolean = false val values = mutable.Set.empty[Value] override def toString(): String = values.toString() @@ -165,7 +165,7 @@ object Semantic: opaque type Promoted = PromotionInfo /** Note: don't use `val` to avoid incorrect sharing */ - def empty: Promoted = new PromotionInfo + def empty(entryClass: ClassSymbol): Promoted = new PromotionInfo(entryClass) extension (promoted: Promoted) def isCurrentObjectPromoted: Boolean = promoted.isCurrentObjectPromoted @@ -173,6 +173,7 @@ object Semantic: def contains(value: Value): Boolean = promoted.values.contains(value) def add(value: Value): Unit = promoted.values += value def remove(value: Value): Unit = promoted.values -= value + def entryClass: ClassSymbol = promoted.entryClass end extension end Promoted type Promoted = Promoted.Promoted @@ -658,12 +659,12 @@ object Semantic: def select(field: Symbol, receiver: Type, needResolve: Boolean = true): Contextual[Value] = log("select " + field.show + ", this = " + value, printer, (_: Value).show) { if promoted.isCurrentObjectPromoted then Hot - else value match { + else value match case Hot => Hot case Cold => - val error = AccessCold(field, trace.toVector) + val error = AccessCold(field)(trace.toVector) reporter.report(error) Hot @@ -688,11 +689,11 @@ object Semantic: val rhs = target.defTree.asInstanceOf[ValOrDefDef].rhs eval(rhs, ref, target.owner.asClass, cacheResult = true) else - val error = CallUnknown(field, trace.toVector) + val error = CallUnknown(field)(trace.toVector) reporter.report(error) Hot else - val error = AccessNonInit(target, trace.toVector) + val error = AccessNonInit(target)(trace.toVector) reporter.report(error) Hot else @@ -710,7 +711,6 @@ object Semantic: case RefSet(refs) => refs.map(_.select(field, receiver)).join - } } def call(meth: Symbol, args: List[ArgInfo], receiver: Type, superType: Type, needResolve: Boolean = true): Contextual[Value] = log("call " + meth.show + ", args = " + args.map(_.value.show), printer, (_: Value).show) { @@ -779,7 +779,7 @@ object Semantic: case Cold => promoteArgs() - val error = CallCold(meth, trace.toVector) + val error = CallCold(meth)(trace.toVector) reporter.report(error) Hot @@ -820,7 +820,7 @@ object Semantic: // try promoting the receiver as last resort val hasErrors = Reporter.hasErrors { ref.promote("try promote value to hot") } if hasErrors then - val error = CallUnknown(target, trace.toVector) + val error = CallUnknown(target)(trace.toVector) reporter.report(error) Hot else if target.exists then @@ -899,7 +899,7 @@ object Semantic: Hot else // no source code available - val error = CallUnknown(ctor, trace.toVector) + val error = CallUnknown(ctor)(trace.toVector) reporter.report(error) Hot } @@ -922,7 +922,7 @@ object Semantic: yield i + 1 - val error = UnsafeLeaking(trace.toVector, errors.head, nonHotOuterClass, indices) + val error = UnsafeLeaking(errors.head, nonHotOuterClass, indices)(trace.toVector) reporter.report(error) Hot else @@ -947,7 +947,7 @@ object Semantic: tryLeak(warm, NoSymbol, args2) case Cold => - val error = CallCold(ctor, trace.toVector) + val error = CallCold(ctor)(trace.toVector) reporter.report(error) Hot @@ -1078,7 +1078,7 @@ object Semantic: case Hot => case Cold => - reporter.report(PromoteError(msg, trace.toVector)) + reporter.report(PromoteError(msg)(trace.toVector)) case thisRef: ThisRef => val emptyFields = thisRef.nonInitFields() @@ -1086,7 +1086,7 @@ object Semantic: promoted.promoteCurrent(thisRef) else val fields = "Non initialized field(s): " + emptyFields.map(_.show).mkString(", ") + "." - reporter.report(PromoteError(msg + "\n" + fields, trace.toVector)) + reporter.report(PromoteError(msg + "\n" + fields)(trace.toVector)) case warm: Warm => if !promoted.contains(warm) then @@ -1106,7 +1106,7 @@ object Semantic: res.promote("The function return value is not hot. Found = " + res.show + ".") } if errors.nonEmpty then - reporter.report(UnsafePromotion(msg, trace.toVector, errors.head)) + reporter.report(UnsafePromotion(msg, errors.head)(trace.toVector)) else promoted.add(fun) @@ -1156,7 +1156,7 @@ object Semantic: if !isHotSegment then for member <- klass.info.decls do if member.isClass then - val error = PromoteError("Promotion cancelled as the value contains inner " + member.show + ".", Vector.empty) + val error = PromoteError("Promotion cancelled as the value contains inner " + member.show + ".")(Vector.empty) reporter.report(error) else if !member.isType && !member.isConstructor && !member.is(Flags.Deferred) then given Trace = Trace.empty @@ -1189,7 +1189,7 @@ object Semantic: } if errors.isEmpty then Nil - else UnsafePromotion(msg, trace.toVector, errors.head) :: Nil + else UnsafePromotion(msg, errors.head)(trace.toVector) :: Nil } end extension @@ -1206,72 +1206,49 @@ object Semantic: cls == defn.AnyValClass || cls == defn.ObjectClass -// ----- Work list --------------------------------------------------- - case class Task(value: ThisRef) - - class WorkList private[Semantic](): - private val pendingTasks: mutable.ArrayBuffer[Task] = new mutable.ArrayBuffer - - def addTask(task: Task): Unit = - if !pendingTasks.contains(task) then pendingTasks.append(task) - - /** Process the worklist until done */ - final def work()(using Cache, Context): Unit = - for task <- pendingTasks - do doTask(task) - - /** Check an individual class - * - * This method should only be called from the work list scheduler. - */ - private def doTask(task: Task)(using Cache, Context): Unit = - val thisRef = task.value - val tpl = thisRef.klass.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] - - @tailrec - def iterate(): Unit = { - given Promoted = Promoted.empty - given Trace = Trace.empty.add(thisRef.klass.defTree) - given reporter: Reporter.BufferedReporter = new Reporter.BufferedReporter +// ----- API -------------------------------- - thisRef.ensureFresh() + /** Check an individual class + * + * The class to be checked must be an instantiable concrete class. + */ + private def checkClass(classSym: ClassSymbol)(using Cache, Context): Unit = + val thisRef = ThisRef(classSym) + val tpl = classSym.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] - // set up constructor parameters - for param <- tpl.constr.termParamss.flatten do - thisRef.updateField(param.symbol, Hot) + @tailrec + def iterate(): Unit = { + given Promoted = Promoted.empty(classSym) + given Trace = Trace.empty.add(classSym.defTree) + given reporter: Reporter.BufferedReporter = new Reporter.BufferedReporter - log("checking " + task) { eval(tpl, thisRef, thisRef.klass) } - reporter.errors.foreach(_.issue) + thisRef.ensureFresh() - if cache.hasChanged && reporter.errors.isEmpty then - // code to prepare cache and heap for next iteration - cache.prepareForNextIteration() - iterate() - else - cache.prepareForNextClass() - } + // set up constructor parameters + for param <- tpl.constr.termParamss.flatten do + thisRef.updateField(param.symbol, Hot) - iterate() - end doTask - end WorkList - inline def workList(using wl: WorkList): WorkList = wl + log("checking " + classSym) { eval(tpl, thisRef, classSym) } + reporter.errors.foreach(_.issue) -// ----- API -------------------------------- + if cache.hasChanged && reporter.errors.isEmpty then + // code to prepare cache and heap for next iteration + cache.prepareForNextIteration() + iterate() + else + cache.prepareForNextClass() + } - /** Add a checking task to the work list */ - def addTask(thisRef: ThisRef)(using WorkList) = workList.addTask(Task(thisRef)) + iterate() + end checkClass - /** Check the specified tasks - * - * Semantic.checkTasks { - * Semantic.addTask(...) - * } + /** + * Check the specified concrete classes */ - def checkTasks(using Context)(taskBuilder: WorkList ?=> Unit): Unit = - val workList = new WorkList - val cache = new Cache - taskBuilder(using workList) - workList.work()(using cache, ctx) + def checkClasses(classes: List[ClassSymbol])(using Context): Unit = + given Cache() + for classSym <- classes if isConcreteClass(classSym) do + checkClass(classSym) // ----- Semantic definition -------------------------------- @@ -1296,7 +1273,10 @@ object Semantic: * * This method only handles cache logic and delegates the work to `cases`. * - * The parameter `cacheResult` is used to reduce the size of the cache. + * @param expr The expression to be evaluated. + * @param thisV The value for `C.this` where `C` is represented by the parameter `klass`. + * @param klass The enclosing class where the expression is located. + * @param cacheResult It is used to reduce the size of the cache. */ def eval(expr: Tree, thisV: Ref, klass: ClassSymbol, cacheResult: Boolean = false): Contextual[Value] = log("evaluating " + expr.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Value).show) { cache.get(thisV, expr) match @@ -1326,6 +1306,10 @@ object Semantic: /** Handles the evaluation of different expressions * * Note: Recursive call should go to `eval` instead of `cases`. + * + * @param expr The expression to be evaluated. + * @param thisV The value for `C.this` where `C` is represented by the parameter `klass`. + * @param klass The enclosing class where the expression `expr` is located. */ def cases(expr: Tree, thisV: Ref, klass: ClassSymbol): Contextual[Value] = val trace2 = trace.add(expr) @@ -1503,7 +1487,14 @@ object Semantic: report.error("[Internal error] unexpected tree" + Trace.show, expr) Hot - /** Handle semantics of leaf nodes */ + /** Handle semantics of leaf nodes + * + * For leaf nodes, their semantics is determined by their types. + * + * @param tp The type to be evaluated. + * @param thisV The value for `C.this` where `C` is represented by the parameter `klass`. + * @param klass The enclosing class where the type `tp` is located. + */ def cases(tp: Type, thisV: Ref, klass: ClassSymbol): Contextual[Value] = log("evaluating " + tp.show, printer, (_: Value).show) { tp match case _: ConstantType => @@ -1513,7 +1504,16 @@ object Semantic: thisV.accessLocal(tmref, klass) case tmref: TermRef => - cases(tmref.prefix, thisV, klass).select(tmref.symbol, receiver = tmref.prefix) + val cls = tmref.widenSingleton.classSymbol + if cls.exists && cls.isStaticOwner then + if klass.isContainedIn(cls) then + resolveThis(cls.asClass, thisV, klass) + else if cls.isContainedIn(promoted.entryClass) then + cases(tmref.prefix, thisV, klass).select(tmref.symbol, receiver = tmref.prefix) + else + Hot + else + cases(tmref.prefix, thisV, klass).select(tmref.symbol, receiver = tmref.prefix) case tp @ ThisType(tref) => val cls = tref.classSymbol.asClass @@ -1521,8 +1521,7 @@ object Semantic: // O.this outside the body of the object O Hot else - val value = resolveThis(cls, thisV, klass) - value + resolveThis(cls, thisV, klass) case _: TermParamRef | _: RecThis => // possible from checking effects of types @@ -1533,7 +1532,12 @@ object Semantic: Hot } - /** Resolve C.this that appear in `klass` */ + /** Resolve C.this that appear in `klass` + * + * @param target The class symbol for `C` for which `C.this` is to be resolved. + * @param thisV The value for `D.this` where `D` is represented by the parameter `klass`. + * @param klass The enclosing class where the type `C.this` is located. + */ def resolveThis(target: ClassSymbol, thisV: Value, klass: ClassSymbol): Contextual[Value] = log("resolving " + target.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Value).show) { if target == klass then thisV else if target.is(Flags.Package) then Hot @@ -1558,7 +1562,12 @@ object Semantic: } - /** Compute the outer value that correspond to `tref.prefix` */ + /** Compute the outer value that correspond to `tref.prefix` + * + * @param tref The type whose prefix is to be evaluated. + * @param thisV The value for `C.this` where `C` is represented by the parameter `klass`. + * @param klass The enclosing class where the type `tref` is located. + */ def outerValue(tref: TypeRef, thisV: Ref, klass: ClassSymbol): Contextual[Value] = val cls = tref.classSymbol.asClass if tref.prefix == NoPrefix then @@ -1569,7 +1578,12 @@ object Semantic: if cls.isAllOf(Flags.JavaInterface) then Hot else cases(tref.prefix, thisV, klass) - /** Initialize part of an abstract object in `klass` of the inheritance chain */ + /** Initialize part of an abstract object in `klass` of the inheritance chain + * + * @param tpl The class body to be evaluated. + * @param thisV The value of the current object to be initialized. + * @param klass The class to which the template belongs. + */ def init(tpl: Template, thisV: Ref, klass: ClassSymbol): Contextual[Value] = log("init " + klass.show, printer, (_: Value).show) { val paramsMap = tpl.constr.termParamss.flatten.map { vdef => vdef.name -> thisV.objekt.field(vdef.symbol) @@ -1664,7 +1678,16 @@ object Semantic: if thisV.isThisRef || !thisV.asInstanceOf[Warm].isPopulatingParams then tpl.body.foreach { case vdef : ValDef if !vdef.symbol.is(Flags.Lazy) && !vdef.rhs.isEmpty => val res = eval(vdef.rhs, thisV, klass) - thisV.updateField(vdef.symbol, res) + // TODO: Improve promotion to avoid handling enum initialization specially + // + // The failing case is tests/init/pos/i12544.scala due to promotion failure. + if vdef.symbol.name == nme.DOLLAR_VALUES + && vdef.symbol.is(Flags.Synthetic) + && vdef.symbol.owner.companionClass.is(Flags.Enum) + then + thisV.updateField(vdef.symbol, Hot) + else + thisV.updateField(vdef.symbol, res) fieldsChanged = true case _: MemberDef => @@ -1765,3 +1788,18 @@ object Semantic: if (sym.isEffectivelyFinal || sym.isConstructor) sym else sym.matchingMember(cls.appliedRef) } + + private def isConcreteClass(cls: ClassSymbol)(using Context) = { + val instantiable: Boolean = + cls.is(Flags.Module) || + !cls.isOneOf(Flags.AbstractOrTrait) && { + // see `Checking.checkInstantiable` in typer + val tp = cls.appliedRef + val stp = SkolemType(tp) + val selfType = cls.givenSelfType.asSeenFrom(stp, cls) + !selfType.exists || stp <:< selfType + } + + // A concrete class may not be instantiated if the self type is not satisfied + instantiable && cls.enclosingPackageClass != defn.StdLibPatchesPackage.moduleClass + } diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index a8432833d42a..8e891f822255 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -306,6 +306,7 @@ object SpaceEngine { val isEmptyTp = extractorMemberType(unappResult, nme.isEmpty, NoSourcePosition) isEmptyTp <:< ConstantType(Constant(false)) } + || unappResult.derivesFrom(defn.NonEmptyTupleClass) } /** Is the unapply or unapplySeq irrefutable? @@ -534,16 +535,15 @@ class SpaceEngine(using Context) extends SpaceLogic { val mt: MethodType = unapp.widen match { case mt: MethodType => mt case pt: PolyType => - inContext(ctx.fresh.setExploreTyperState()) { val tvars = pt.paramInfos.map(newTypeVar(_)) val mt = pt.instantiate(tvars).asInstanceOf[MethodType] scrutineeTp <:< mt.paramInfos(0) // force type inference to infer a narrower type: could be singleton // see tests/patmat/i4227.scala mt.paramInfos(0) <:< scrutineeTp + instantiateSelected(mt, tvars) isFullyDefined(mt, ForceDegree.all) mt - } } // Case unapply: diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index 75323e30bfb9..48e6802e0f6c 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -974,14 +974,16 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP tree.rhs match { case sel: Select if sel.symbol == jsdefn.JSPackage_native => // ok + case rhs: Ident if rhs.symbol == jsdefn.JSPackage_native => + // ok case _ => val pos = if (tree.rhs != EmptyTree) tree.rhs.srcPos else tree.srcPos report.error(s"$longKindStr may only call js.native.", pos) } - // Check that the resul type was explicitly specified + // Check that the result type was explicitly specified // (This is stronger than Scala 2, which only warns, and only if it was inferred as Nothing.) - if (tree.tpt.span.isSynthetic) + if (tree.tpt.isInstanceOf[InferredTypeTree]) report.error(i"The type of ${tree.name} must be explicitly specified because it is JS native.", tree) } diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index b7cabbebee71..ec72c48b2422 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -6,7 +6,6 @@ import core._ import ast.{Trees, tpd, untpd, desugar} import util.Stats.record import util.{SrcPos, NoSourcePosition} -import Trees.Untyped import Contexts._ import Flags._ import Symbols._ @@ -30,6 +29,7 @@ import config.Feature import collection.mutable import config.Printers.{overload, typr, unapp} import TypeApplications._ +import Annotations.Annotation import Constants.{Constant, IntTag} import Denotations.SingleDenotation @@ -210,63 +210,81 @@ object Applications { def wrapDefs(defs: mutable.ListBuffer[Tree] | Null, tree: Tree)(using Context): Tree = if (defs != null && defs.nonEmpty) tpd.Block(defs.toList, tree) else tree + /** Optionally, if `sym` is a symbol created by `resolveMapped`, i.e. representing + * a mapped alternative, the original prefix of the alternative and the number of + * skipped term parameters. + */ + private def mappedAltInfo(sym: Symbol)(using Context): Option[(Type, Int)] = + for ann <- sym.getAnnotation(defn.MappedAlternativeAnnot) yield + val AppliedType(_, pre :: ConstantType(c) :: Nil) = ann.tree.tpe: @unchecked + (pre, c.intValue) + /** Find reference to default parameter getter for parameter #n in current - * parameter list, or NoType if none was found - */ + * parameter list, or EmptyTree if none was found. + * @param fn the tree referring to the function part of this call + * @param n the index of the parameter in the parameter list of the call + * @param testOnly true iff we just to find out whether a getter exists + */ def findDefaultGetter(fn: Tree, n: Int, testOnly: Boolean)(using Context): Tree = - if fn.symbol.isTerm then + def reifyPrefix(pre: Type): Tree = pre match + case pre: SingletonType => singleton(pre, needLoad = !testOnly) + case pre if testOnly => + // In this case it is safe to skolemize now; we will produce a stable prefix for the actual call. + ref(pre.narrow) + case _ => EmptyTree + + if fn.symbol.hasDefaultParams then val meth = fn.symbol.asTerm - val receiver: Tree = methPart(fn) match { - case Select(receiver, _) => receiver - case mr => mr.tpe.normalizedPrefix match { - case mr: TermRef => ref(mr) - case mr: ThisType => singleton(mr) - case mr => - if testOnly then - // In this case it is safe to skolemize now; we will produce a stable prefix for the actual call. - ref(mr.narrow) - else - EmptyTree - } - } - val getterPrefix = - if (meth.is(Synthetic) && meth.name == nme.apply) nme.CONSTRUCTOR else meth.name - def getterName = DefaultGetterName(getterPrefix, n + numArgs(fn)) - if !meth.hasDefaultParams then - EmptyTree - else if (receiver.isEmpty) { - def findGetter(cx: Context): Tree = - if (cx eq NoContext) EmptyTree - else if (cx.scope != cx.outer.scope && - cx.denotNamed(meth.name).hasAltWith(_.symbol == meth)) { - val denot = cx.denotNamed(getterName) - if (denot.exists) ref(TermRef(cx.owner.thisType, getterName, denot)) - else findGetter(cx.outer) - } + val idx = n + numArgs(fn) + methPart(fn) match + case Select(receiver, _) => + findDefaultGetter(meth, receiver, idx) + case mr => mappedAltInfo(meth) match + case Some((pre, skipped)) => + findDefaultGetter(meth, reifyPrefix(pre), idx + skipped) + case None => + findDefaultGetter(meth, reifyPrefix(mr.tpe.normalizedPrefix), idx) + else EmptyTree // structural applies don't have symbols or defaults + end findDefaultGetter + + /** Find reference to default parameter getter for method `meth` numbered `idx` + * selected from given `receiver`, or EmptyTree if none was found. + * @param meth the called method (can be mapped by resolveMapped) + * @param receiver the receiver of the original method call, which determines + * where default getters are found + * @param idx the index of the searched for default getter, as encoded in its name + */ + def findDefaultGetter(meth: TermSymbol, receiver: Tree, idx: Int)(using Context): Tree = + val getterPrefix = + if (meth.is(Synthetic) && meth.name == nme.apply) nme.CONSTRUCTOR else meth.name + val getterName = DefaultGetterName(getterPrefix, idx) + + if receiver.isEmpty then + def findGetter(cx: Context): Tree = + if cx eq NoContext then EmptyTree + else if cx.scope != cx.outer.scope + && cx.denotNamed(meth.name).hasAltWith(_.symbol == meth) then + val denot = cx.denotNamed(getterName) + if denot.exists then ref(TermRef(cx.owner.thisType, getterName, denot)) else findGetter(cx.outer) - findGetter(ctx) - } - else { - def selectGetter(qual: Tree): Tree = { - val getterDenot = qual.tpe.member(getterName) - if (getterDenot.exists) qual.select(TermRef(qual.tpe, getterName, getterDenot)) - else EmptyTree - } - if (!meth.isClassConstructor) - selectGetter(receiver) - else { - // default getters for class constructors are found in the companion object - val cls = meth.owner - val companion = cls.companionModule - if (companion.isTerm) { - val prefix = receiver.tpe.baseType(cls).normalizedPrefix - if (prefix.exists) selectGetter(ref(TermRef(prefix, companion.asTerm))) - else EmptyTree - } + else findGetter(cx.outer) + findGetter(ctx) + else + def selectGetter(qual: Tree): Tree = + val getterDenot = qual.tpe.member(getterName) + if (getterDenot.exists) qual.select(TermRef(qual.tpe, getterName, getterDenot)) + else EmptyTree + if !meth.isClassConstructor then + selectGetter(receiver) + else + // default getters for class constructors are found in the companion object + val cls = meth.owner + val companion = cls.companionModule + if companion.isTerm then + val prefix = receiver.tpe.baseType(cls).normalizedPrefix + if prefix.exists then selectGetter(ref(TermRef(prefix, companion.asTerm))) else EmptyTree - } - } - else EmptyTree // structural applies don't have symbols or defaults + else EmptyTree end findDefaultGetter /** Splice new method reference `meth` into existing application `app` */ @@ -460,7 +478,7 @@ trait Applications extends Compatibility { matchArgs(orderedArgs, methType.paramInfos, 0) case _ => if (methType.isError) ok = false - else fail(s"$methString does not take parameters") + else fail(s"$methString does not take parameters".toMessage) } /** The application was successful */ @@ -472,7 +490,7 @@ trait Applications extends Compatibility { i"${err.refStr(methRef)}$infoStr" /** Re-order arguments to correctly align named arguments */ - def reorder[T >: Untyped](args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = { + def reorder[T <: Untyped](args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = { /** @param pnames The list of parameter names that are missing arguments * @param args The list of arguments that are not yet passed, or that are waiting to be dropped @@ -503,7 +521,7 @@ trait Applications extends Compatibility { s"parameter $aname of $methString is already instantiated" else s"$methString does not have a parameter $aname" - fail(msg, arg.asInstanceOf[Arg]) + fail(msg.toMessage, arg.asInstanceOf[Arg]) arg :: handleNamed(pnamesRest, args1, nameToArg, toDrop) } case arg :: args1 => @@ -529,7 +547,7 @@ trait Applications extends Compatibility { /** Is `sym` a constructor of a Java-defined annotation? */ def isJavaAnnotConstr(sym: Symbol): Boolean = - sym.is(JavaDefined) && sym.isConstructor && sym.owner.derivesFrom(defn.AnnotationClass) + sym.is(JavaDefined) && sym.isConstructor && sym.owner.is(JavaAnnotation) /** Match re-ordered arguments against formal parameters * @param n The position of the first parameter in formals in `methType`. @@ -545,7 +563,7 @@ trait Applications extends Compatibility { i"it is not the only argument to be passed to the corresponding repeated parameter $formal" else i"the corresponding parameter has type $formal which is not a repeated parameter type" - fail(em"Sequence argument type annotation `*` cannot be used here:\n$addendum", arg) + fail(em"Sequence argument type annotation `*` cannot be used here:\n$addendum".toMessage, arg) /** Add result of typing argument `arg` against parameter type `formal`. * @return The remaining formal parameter types. If the method is parameter-dependent @@ -561,15 +579,12 @@ trait Applications extends Compatibility { else formals1 - def missingArg(n: Int): Unit = { - val pname = methodType.paramNames(n) - fail( - if (pname.firstPart contains '$') s"not enough arguments for $methString" - else s"missing argument for parameter $pname of $methString") - } + def missingArg(n: Int): Unit = + fail(MissingArgument(methodType.paramNames(n), methString)) def tryDefault(n: Int, args1: List[Arg]): Unit = { val sym = methRef.symbol + val testOnly = this.isInstanceOf[TestApplication[?]] val defaultArg = if (isJavaAnnotConstr(sym)) { @@ -585,12 +600,14 @@ trait Applications extends Compatibility { else EmptyTree } - else defaultArgument(normalizedFun, n, this.isInstanceOf[TestApplication[?]]) + else defaultArgument(normalizedFun, n, testOnly) def implicitArg = implicitArgTree(formal, appPos.span) if !defaultArg.isEmpty then - matchArgs(args1, addTyped(treeToArg(defaultArg)), n + 1) + defaultArg.tpe.widen match + case _: MethodOrPoly if testOnly => matchArgs(args1, formals1, n + 1) + case _ => matchArgs(args1, addTyped(treeToArg(defaultArg)), n + 1) else if methodType.isContextualMethod && ctx.mode.is(Mode.ImplicitsEnabled) then matchArgs(args1, addTyped(treeToArg(implicitArg)), n + 1) else @@ -633,7 +650,7 @@ trait Applications extends Compatibility { i"can't supply unit value with infix notation because nullary $methString takes no arguments; use dotted invocation instead: (...).${methRef.name}()" case _ => i"too many arguments for $methString" - fail(msg, arg) + fail(msg.toMessage, arg) case nil => } } @@ -736,7 +753,7 @@ trait Applications extends Compatibility { /** Subclass of Application for type checking an Apply node, where * types of arguments are either known or unknown. */ - abstract class TypedApply[T >: Untyped]( + abstract class TypedApply[T <: Untyped]( app: untpd.Apply, fun: Tree, methRef: TermRef, args: List[Trees.Tree[T]], resultType: Type, override val applyKind: ApplyKind)(using Context) extends Application(methRef, fun.tpe, args, resultType) { @@ -994,7 +1011,7 @@ trait Applications extends Compatibility { case CaseDef(pat, _, _) => report.error(UnexpectedPatternForSummonFrom(pat), pat.srcPos) } - typed(untpd.InlineMatch(EmptyTree, cases).withSpan(arg.span), pt) + typed(untpd.InlineMatch(EmptyTree, cases).withSpan(tree.span), pt) case _ => errorTree(tree, em"argument to summonFrom must be a pattern matching closure") } @@ -1473,12 +1490,28 @@ trait Applications extends Compatibility { } } - /** Drop any implicit parameter section */ - def stripImplicit(tp: Type)(using Context): Type = tp match { + /** Drop any leading type or implicit parameter sections */ + def stripInferrable(tp: Type)(using Context): Type = tp match { + case mt: MethodType if mt.isImplicitMethod => + stripInferrable(resultTypeApprox(mt)) + case pt: PolyType => + stripInferrable(pt.resType) + case _ => + tp + } + + /** Drop any leading implicit parameter sections */ + def stripImplicit(tp: Type, wildcardOnly: Boolean = false)(using Context): Type = tp match { case mt: MethodType if mt.isImplicitMethod => - stripImplicit(resultTypeApprox(mt)) + stripImplicit(resultTypeApprox(mt, wildcardOnly)) case pt: PolyType => - pt.derivedLambdaType(pt.paramNames, pt.paramInfos, stripImplicit(pt.resultType)).asInstanceOf[PolyType].flatten + pt.derivedLambdaType(pt.paramNames, pt.paramInfos, + stripImplicit(pt.resultType, wildcardOnly = true)) + // can't use TypeParamRefs for parameter references in `resultTypeApprox` + // since their bounds can refer to type parameters in `pt` that are not + // bound by the constraint. This can lead to hygiene violations if subsequently + // `pt` itself is added to the constraint. Test case is run/enrich-gentraversable.scala. + .asInstanceOf[PolyType].flatten case _ => tp } @@ -1935,11 +1968,18 @@ trait Applications extends Compatibility { val ptypes = tp.paramInfos val numParams = ptypes.length def isVarArgs = ptypes.nonEmpty && ptypes.last.isRepeatedParam - def hasDefault = alt.symbol.hasDefaultParams - if (numParams == numArgs) true - else if (numParams < numArgs) isVarArgs - else if (numParams > numArgs + 1) hasDefault - else isVarArgs || hasDefault + def numDefaultParams = + if alt.symbol.hasDefaultParams then + val fn = ref(alt, needLoad = false) + ptypes.indices.count(n => !findDefaultGetter(fn, n, testOnly = true).isEmpty) + else 0 + if numParams < numArgs then isVarArgs + else if numParams == numArgs then true + else + val numNecessaryArgs = numParams - numDefaultParams + if numNecessaryArgs <= numArgs then true + else if numNecessaryArgs == numArgs + 1 then isVarArgs + else false case _ => numArgs == 0 } @@ -2034,7 +2074,7 @@ trait Applications extends Compatibility { skip(alt.widen) def resultIsMethod(tp: Type): Boolean = tp.widen.stripPoly match - case tp: MethodType => stripImplicit(tp.resultType).isInstanceOf[MethodType] + case tp: MethodType => stripInferrable(tp.resultType).isInstanceOf[MethodType] case _ => false record("resolveOverloaded.narrowedApplicable", candidates.length) @@ -2080,6 +2120,23 @@ trait Applications extends Compatibility { } end resolveOverloaded1 + /** The largest suffix of `paramss` that has the same first parameter name as `t`, + * plus the number of term parameters in `paramss` that come before that suffix. + */ + def trimParamss(t: Type, paramss: List[List[Symbol]])(using Context): (List[List[Symbol]], Int) = t match + case MethodType(Nil) => trimParamss(t.resultType, paramss) + case t: MethodOrPoly => + val firstParamName = t.paramNames.head + def recur(pss: List[List[Symbol]], skipped: Int): (List[List[Symbol]], Int) = + (pss: @unchecked) match + case (ps @ (p :: _)) :: pss1 => + if p.name == firstParamName then (pss, skipped) + else recur(pss1, if p.name.isTermName then skipped + ps.length else skipped) + case Nil => + (pss, skipped) + recur(paramss, 0) + case _ => (Nil, 0) + /** Resolve overloading by mapping to a different problem where each alternative's * type is mapped with `f`, alternatives with non-existing types are dropped, and the * expected type is `pt`. Map the results back to the original alternatives. @@ -2088,14 +2145,19 @@ trait Applications extends Compatibility { val reverseMapping = alts.flatMap { alt => val t = f(alt) if t.exists then + val (trimmed, skipped) = trimParamss(t, alt.symbol.rawParamss) val mappedSym = alt.symbol.asTerm.copy(info = t) - mappedSym.rawParamss = alt.symbol.rawParamss - // we need rawParamss to find parameters with default arguments, - // but we do not need to be precise right now, since this is just a pre-test before - // we look up default getters. If at some point we extract default arguments from the - // parameter symbols themselves, we have to find the right parameter by name, not position. - // That means it's OK to copy parameters wholesale rather than tailoring them to always - // correspond to the type transformation. + mappedSym.rawParamss = trimmed + val (pre, totalSkipped) = mappedAltInfo(alt.symbol) match + case Some((pre, prevSkipped)) => + mappedSym.removeAnnotation(defn.MappedAlternativeAnnot) + (pre, skipped + prevSkipped) + case None => + (alt.prefix, skipped) + mappedSym.addAnnotation( + Annotation(TypeTree( + defn.MappedAlternativeAnnot.typeRef.appliedTo( + pre, ConstantType(Constant(totalSkipped)))))) Some((TermRef(NoPrefix, mappedSym), alt)) else None diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index a89fa91dc6a6..99399832085f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -67,11 +67,12 @@ object Checking { */ def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type, app: Type = NoType, tpt: Tree = EmptyTree)(using Context): Unit = - args.lazyZip(boundss).foreach { (arg, bound) => - if !bound.isLambdaSub && !arg.tpe.hasSimpleKind then - errorTree(arg, - showInferred(MissingTypeParameterInTypeApp(arg.tpe), app, tpt)) - } + if ctx.phase != Phases.checkCapturesPhase then + args.lazyZip(boundss).foreach { (arg, bound) => + if !bound.isLambdaSub && !arg.tpe.hasSimpleKind then + errorTree(arg, + showInferred(MissingTypeParameterInTypeApp(arg.tpe), app, tpt)) + } for (arg, which, bound) <- TypeOps.boundsViolations(args, boundss, instantiate, app) do report.error( showInferred(DoesNotConformToBound(arg.tpe, which, bound), app, tpt), @@ -472,9 +473,10 @@ object Checking { if (sym.isOneOf(flag)) fail(AbstractMemberMayNotHaveModifier(sym, flag)) def checkNoConflict(flag1: FlagSet, flag2: FlagSet, msg: => String) = - if (sym.isAllOf(flag1 | flag2)) fail(msg) + if (sym.isAllOf(flag1 | flag2)) fail(msg.toMessage) def checkCombination(flag1: FlagSet, flag2: FlagSet) = - if sym.isAllOf(flag1 | flag2) then fail(i"illegal combination of modifiers: `${flag1.flagsString}` and `${flag2.flagsString}` for: $sym") + if sym.isAllOf(flag1 | flag2) then + fail(i"illegal combination of modifiers: `${flag1.flagsString}` and `${flag2.flagsString}` for: $sym".toMessage) def checkApplicable(flag: Flag, ok: Boolean) = if sym.is(flag, butNot = Synthetic) && !ok then fail(ModifierNotAllowedForDefinition(flag)) @@ -494,15 +496,15 @@ object Checking { } if sym.is(Transparent) then if sym.isType then - if !sym.is(Trait) then fail(em"`transparent` can only be used for traits") + if !sym.isExtensibleClass then fail(em"`transparent` can only be used for extensible classes and traits".toMessage) else - if !sym.isInlineMethod then fail(em"`transparent` can only be used for inline methods") + if !sym.isInlineMethod then fail(em"`transparent` can only be used for inline methods".toMessage) if (!sym.isClass && sym.is(Abstract)) fail(OnlyClassesCanBeAbstract(sym)) // note: this is not covered by the next test since terms can be abstract (which is a dual-mode flag) // but they can never be one of ClassOnlyFlags if !sym.isClass && sym.isOneOf(ClassOnlyFlags) then - fail(em"only classes can be ${(sym.flags & ClassOnlyFlags).flagsString}") + fail(em"only classes can be ${(sym.flags & ClassOnlyFlags).flagsString}".toMessage) if (sym.is(AbsOverride) && !sym.owner.is(Trait)) fail(AbstractOverrideOnlyInTraits(sym)) if sym.is(Trait) then @@ -519,7 +521,7 @@ object Checking { if !sym.isOneOf(Method | ModuleVal) then fail(TailrecNotApplicable(sym)) else if sym.is(Inline) then - fail("Inline methods cannot be @tailrec") + fail("Inline methods cannot be @tailrec".toMessage) if sym.hasAnnotation(defn.TargetNameAnnot) && sym.isClass && sym.isTopLevelClass then fail(TargetNameOnTopLevelClass(sym)) if (sym.hasAnnotation(defn.NativeAnnot)) { @@ -538,7 +540,7 @@ object Checking { fail(CannotExtendAnyVal(sym)) if (sym.isConstructor && !sym.isPrimaryConstructor && sym.owner.is(Trait, butNot = JavaDefined)) val addendum = if ctx.settings.Ydebug.value then s" ${sym.owner.flagsString}" else "" - fail("Traits cannot have secondary constructors" + addendum) + fail(s"Traits cannot have secondary constructors$addendum".toMessage) checkApplicable(Inline, sym.isTerm && !sym.isOneOf(Mutable | Module)) checkApplicable(Lazy, !sym.isOneOf(Method | Mutable)) if (sym.isType && !sym.isOneOf(Deferred | JavaDefined)) @@ -1108,6 +1110,8 @@ trait Checking { def checkParentCall(call: Tree, caller: ClassSymbol)(using Context): Unit = if (!ctx.isAfterTyper) { val called = call.tpe.classSymbol + if (called.is(JavaAnnotation)) + report.error(i"${called.name} must appear without any argument to be a valid class parent because it is a Java annotation", call.srcPos) if (caller.is(Trait)) report.error(i"$caller may not call constructor of $called", call.srcPos) else if (called.is(Trait) && !caller.mixins.contains(called)) @@ -1261,6 +1265,23 @@ trait Checking { if !Inlines.inInlineMethod && !ctx.isInlineContext then report.error(em"$what can only be used in an inline method", pos) + /** Check that the class corresponding to this tree is either a Scala or Java annotation. + * + * @return The original tree or an error tree in case `tree` isn't a valid + * annotation or already an error tree. + */ + def checkAnnotClass(tree: Tree)(using Context): Tree = + if tree.tpe.isError then + return tree + val cls = Annotations.annotClass(tree) + if cls.is(JavaDefined) then + if !cls.is(JavaAnnotation) then + errorTree(tree, em"$cls is not a valid Java annotation: it was not declared with `@interface`") + else tree + else if !cls.derivesFrom(defn.AnnotationClass) then + errorTree(tree, em"$cls is not a valid Scala annotation: it does not extend `scala.annotation.Annotation`") + else tree + /** Check arguments of compiler-defined annotations */ def checkAnnotArgs(tree: Tree)(using Context): tree.type = val cls = Annotations.annotClass(tree) @@ -1460,6 +1481,15 @@ trait Checking { |CanThrow capabilities can only be generated $req.""", pat.srcPos) + /** Check that tree does not define a context function type */ + def checkNoContextFunctionType(tree: Tree)(using Context): Unit = + def recur(tp: Type): Unit = tp.dealias match + case tp: HKTypeLambda => recur(tp.resType) + case tp if defn.isContextFunctionType(tp) => + report.error(em"context function type cannot have opaque aliases", tree.srcPos) + case _ => + recur(tree.tpe) + /** (1) Check that every named import selector refers to a type or value member of the * qualifier type. * (2) Check that no import selector is renamed more than once. @@ -1495,6 +1525,7 @@ trait ReChecking extends Checking { override def checkNoModuleClash(sym: Symbol)(using Context) = () override def checkCanThrow(tp: Type, span: Span)(using Context): Tree = EmptyTree override def checkCatch(pat: Tree, guard: Tree)(using Context): Unit = () + override def checkNoContextFunctionType(tree: Tree)(using Context): Unit = () override def checkFeature(name: TermName, description: => String, featureUseSite: Symbol, pos: SrcPos)(using Context): Unit = () } diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index 044dd7bb8528..746b01c934a3 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -115,15 +115,6 @@ class CrossVersionChecks extends MiniPhase: } } - /** Check that classes extending experimental classes or nested in experimental classes have the @experimental annotation. */ - private def checkExperimentalInheritance(cls: ClassSymbol)(using Context): Unit = - if !cls.isAnonymousClass && !cls.hasAnnotation(defn.ExperimentalAnnot) then - cls.info.parents.find(_.typeSymbol.isExperimental) match - case Some(parent) => - report.error(em"extension of experimental ${parent.typeSymbol} must have @experimental annotation", cls.srcPos) - case _ => - end checkExperimentalInheritance - override def transformValDef(tree: ValDef)(using Context): ValDef = checkDeprecatedOvers(tree) checkExperimentalAnnots(tree.symbol) @@ -136,12 +127,6 @@ class CrossVersionChecks extends MiniPhase: checkExperimentalSignature(tree.symbol, tree) tree - override def transformTemplate(tree: Template)(using Context): Tree = - val cls = ctx.owner.asClass - checkExperimentalInheritance(cls) - checkExperimentalAnnots(cls) - tree - override def transformIdent(tree: Ident)(using Context): Ident = { checkUndesiredProperties(tree.symbol, tree.srcPos) tree diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala index cdd37a2f0be7..b2e0a4481297 100644 --- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala +++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala @@ -26,6 +26,9 @@ object ErrorReporting { def errorTree(tree: untpd.Tree, msg: Message)(using Context): tpd.Tree = errorTree(tree, msg, tree.srcPos) + def errorTree(tree: untpd.Tree, msg: => String)(using Context): tpd.Tree = + errorTree(tree, msg.toMessage) + def errorTree(tree: untpd.Tree, msg: TypeError, pos: SrcPos)(using Context): tpd.Tree = tree.withType(errorType(msg, pos)) @@ -34,6 +37,9 @@ object ErrorReporting { ErrorType(msg) } + def errorType(msg: => String, pos: SrcPos)(using Context): ErrorType = + errorType(msg.toMessage, pos) + def errorType(ex: TypeError, pos: SrcPos)(using Context): ErrorType = { report.error(ex, pos) ErrorType(ex.toMessage) @@ -69,15 +75,28 @@ object ErrorReporting { "\n(Note that variables need to be initialized to be defined)" else "" + /** Reveal arguments in FunProtos that are proteted by an IgnoredProto but were + * revealed during type inference. This gives clearer error messages for overloading + * resolution errors that need to show argument lists after the first. We do not + * reveal other kinds of ignored prototypes since these might be misleading because + * there might be a possible implicit conversion on the result. + */ + def revealDeepenedArgs(tp: Type): Type = tp match + case tp @ IgnoredProto(deepTp: FunProto) if tp.wasDeepened => deepTp + case _ => tp + def expectedTypeStr(tp: Type): String = tp match { case tp: PolyProto => - em"type arguments [${tp.targs.tpes}%, %] and ${expectedTypeStr(tp.resultType)}" + em"type arguments [${tp.targs.tpes}%, %] and ${expectedTypeStr(revealDeepenedArgs(tp.resultType))}" case tp: FunProto => - val result = tp.resultType match { - case _: WildcardType | _: IgnoredProto => "" - case tp => em" and expected result type $tp" - } - em"arguments (${tp.typedArgs().tpes}%, %)$result" + def argStr(tp: FunProto): String = + val result = revealDeepenedArgs(tp.resultType) match { + case restp: FunProto => argStr(restp) + case _: WildcardType | _: IgnoredProto => "" + case tp => em" and expected result type $tp" + } + em"(${tp.typedArgs().tpes}%, %)$result" + s"arguments ${argStr(tp)}" case _ => em"expected type $tp" } @@ -168,7 +187,9 @@ object ErrorReporting { |The tests were made under $constraintText""" def whyFailedStr(fail: FailedExtension) = - i""" failed with + i""" + | + | failed with: | |${fail.whyFailed.message.indented(8)}""" diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index ede44c2b7f86..0400d241e367 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -447,7 +447,7 @@ object Implicits: /** An explanation of the cause of the failure as a string */ def explanation(using Context): String - def msg(using Context): Message = explanation + def msg(using Context): Message = explanation.toMessage /** If search was for an implicit conversion, a note describing the failure * in more detail - this is either empty or starts with a '\n' @@ -568,9 +568,9 @@ object Implicits: if reasons.length > 1 then reasons.mkString("\n\t* ", "\n\t* ", "") else - reasons.mkString + reasons.mkString(" ", "", "") - def explanation(using Context) = em"Failed to synthesize an instance of type ${clarify(expectedType)}: ${formatReasons}" + def explanation(using Context) = em"Failed to synthesize an instance of type ${clarify(expectedType)}:${formatReasons}" end Implicits @@ -627,6 +627,9 @@ trait ImplicitRunInfo: traverse(t.underlying) case t: TermParamRef => traverse(t.underlying) + case t: TypeLambda => + for p <- t.paramRefs do partSeen += p + traverseChildren(t) case t => traverseChildren(t) diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 27b83e025cf9..9d2db773c4d4 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -6,15 +6,14 @@ import core._ import ast._ import Contexts._, Types._, Flags._, Symbols._ import ProtoTypes._ -import NameKinds.{AvoidNameKind, UniqueName} +import NameKinds.UniqueName import util.Spans._ -import util.{Stats, SimpleIdentityMap, SrcPos} +import util.{Stats, SimpleIdentityMap, SimpleIdentitySet, SrcPos} import Decorators._ import config.Printers.{gadts, typr} import annotation.tailrec import reporting._ import collection.mutable - import scala.annotation.internal.sharable object Inferencing { @@ -574,7 +573,7 @@ trait Inferencing { this: Typer => * Then `Y` also occurs co-variantly in `T` because it needs to be minimized in order to constrain * `T` the least. See `variances` for more detail. */ - def interpolateTypeVars(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = { + def interpolateTypeVars(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = val state = ctx.typerState // Note that some variables in `locked` might not be in `state.ownedVars` @@ -583,7 +582,7 @@ trait Inferencing { this: Typer => // `qualifying`. val ownedVars = state.ownedVars - if ((ownedVars ne locked) && !ownedVars.isEmpty) { + if (ownedVars ne locked) && !ownedVars.isEmpty then val qualifying = ownedVars -- locked if (!qualifying.isEmpty) { typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, pt = $pt, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") @@ -619,44 +618,67 @@ trait Inferencing { this: Typer => if state.reporter.hasUnreportedErrors then return tree def constraint = state.constraint - type InstantiateQueue = mutable.ListBuffer[(TypeVar, Boolean)] - val toInstantiate = new InstantiateQueue - for tvar <- qualifying do - if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then - constrainIfDependentParamRef(tvar, tree) - // Needs to be checked again, since previous interpolations could already have - // instantiated `tvar` through unification. - val v = vs(tvar) - if v == null then - // Even though `tvar` is non-occurring in `v`, the specific - // instantiation we pick still matters because `tvar` might appear - // in the bounds of a non-`qualifying` type variable in the - // constraint. - // In particular, if `tvar` was created as the upper or lower - // bound of an existing variable by `LevelAvoidMap`, we - // instantiate it in the direction corresponding to the - // original variable which might be further constrained later. - // Otherwise, we simply rely on `hasLowerBound`. - val name = tvar.origin.paramName - val fromBelow = - name.is(AvoidNameKind.UpperBound) || - !name.is(AvoidNameKind.LowerBound) && tvar.hasLowerBound - typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint") - toInstantiate += ((tvar, fromBelow)) - else if v.intValue != 0 then - typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") - toInstantiate += ((tvar, v.intValue == 1)) - else comparing(cmp => - if !cmp.levelOK(tvar.nestingLevel, ctx.nestingLevel) then - // Invariant: The type of a tree whose enclosing scope is level - // N only contains type variables of level <= N. - typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint") - cmp.atLevel(ctx.nestingLevel, tvar.origin) - else - typr.println(i"no interpolation for nonvariant $tvar in $state") - ) - /** Instantiate all type variables in `buf` in the indicated directions. + /** Values of this type report type variables to instantiate with variance indication: + * +1 variable appears covariantly, can be instantiated from lower bound + * -1 variable appears contravariantly, can be instantiated from upper bound + * 0 variable does not appear at all, can be instantiated from either bound + */ + type ToInstantiate = List[(TypeVar, Int)] + + val toInstantiate: ToInstantiate = + val buf = new mutable.ListBuffer[(TypeVar, Int)] + for tvar <- qualifying do + if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then + constrainIfDependentParamRef(tvar, tree) + if !tvar.isInstantiated then + // isInstantiated needs to be checked again, since previous interpolations could already have + // instantiated `tvar` through unification. + val v = vs(tvar) + if v == null then buf += ((tvar, 0)) + else if v.intValue != 0 then buf += ((tvar, v.intValue)) + else comparing(cmp => + if !cmp.levelOK(tvar.nestingLevel, ctx.nestingLevel) then + // Invariant: The type of a tree whose enclosing scope is level + // N only contains type variables of level <= N. + typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint") + cmp.atLevel(ctx.nestingLevel, tvar.origin) + else + typr.println(i"no interpolation for nonvariant $tvar in $state") + ) + buf.toList + + def typeVarsIn(xs: ToInstantiate): TypeVars = + xs.foldLeft(SimpleIdentitySet.empty: TypeVars)((tvs, tvi) => tvs + tvi._1) + + /** Filter list of proposed instantiations so that they don't constrain further + * the current constraint. + */ + def filterByDeps(tvs0: ToInstantiate): ToInstantiate = + val excluded = // ignore dependencies from other variables that are being instantiated + typeVarsIn(tvs0) + def step(tvs: ToInstantiate): ToInstantiate = tvs match + case tvs @ (hd @ (tvar, v)) :: tvs1 => + def aboveOK = !constraint.dependsOn(tvar, excluded, co = true) + def belowOK = !constraint.dependsOn(tvar, excluded, co = false) + if v == 0 && !aboveOK then + step((tvar, 1) :: tvs1) + else if v == 0 && !belowOK then + step((tvar, -1) :: tvs1) + else if v == -1 && !aboveOK || v == 1 && !belowOK then + typr.println(i"drop $tvar, $v in $tp, $pt, qualifying = ${qualifying.toList}, tvs0 = ${tvs0.toList}%, %, excluded = ${excluded.toList}, $constraint") + step(tvs1) + else // no conflict, keep the instantiation proposal + tvs.derivedCons(hd, step(tvs1)) + case Nil => + Nil + val tvs1 = step(tvs0) + if tvs1 eq tvs0 then tvs1 + else filterByDeps(tvs1) // filter again with smaller excluded set + end filterByDeps + + /** Instantiate all type variables in `tvs` in the indicated directions, + * as described in the doc comment of `ToInstantiate`. * If a type variable A is instantiated from below, and there is another * type variable B in `buf` that is known to be smaller than A, wait and * instantiate all other type variables before trying to instantiate A again. @@ -685,29 +707,37 @@ trait Inferencing { this: Typer => * * V2 := V3, O2 := O3 */ - def doInstantiate(buf: InstantiateQueue): Unit = - if buf.nonEmpty then - val suspended = new InstantiateQueue - while buf.nonEmpty do - val first @ (tvar, fromBelow) = buf.head - buf.dropInPlace(1) - if !tvar.isInstantiated then - val suspend = buf.exists{ (following, _) => - if fromBelow then - constraint.isLess(following.origin, tvar.origin) - else - constraint.isLess(tvar.origin, following.origin) + def doInstantiate(tvs: ToInstantiate): Unit = + + /** Try to instantiate `tvs`, return any suspended type variables */ + def tryInstantiate(tvs: ToInstantiate): ToInstantiate = tvs match + case (hd @ (tvar, v)) :: tvs1 => + val fromBelow = v == 1 || (v == 0 && tvar.hasLowerBound) + typr.println( + i"interpolate${if v == 0 then " non-occurring" else ""} $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint") + if tvar.isInstantiated then + tryInstantiate(tvs1) + else + val suspend = tvs1.exists{ (following, _) => + if fromBelow + then constraint.isLess(following.origin, tvar.origin) + else constraint.isLess(tvar.origin, following.origin) } - if suspend then suspended += first else tvar.instantiate(fromBelow) - end if - end while - doInstantiate(suspended) + if suspend then + typr.println(i"suspended: $hd") + hd :: tryInstantiate(tvs1) + else + tvar.instantiate(fromBelow) + tryInstantiate(tvs1) + case Nil => Nil + if tvs.nonEmpty then doInstantiate(tryInstantiate(tvs)) end doInstantiate - doInstantiate(toInstantiate) + + doInstantiate(filterByDeps(toInstantiate)) } - } + end if tree - } + end interpolateTypeVars /** If `tvar` represents a parameter of a dependent method type in the current `call` * approximate it from below with the type of the actual argument. Skolemize that diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index ad8d0e50d348..6aab561c44b7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -833,7 +833,7 @@ class Namer { typer: Typer => if (cls eq sym) report.error("An annotation class cannot be annotated with iself", annotTree.srcPos) else { - val ann = Annotation.deferred(cls)(typedAheadAnnotation(annotTree)(using annotCtx)) + val ann = Annotation.deferred(cls)(typedAheadExpr(annotTree)(using annotCtx)) sym.addAnnotation(ann) } } @@ -1618,9 +1618,6 @@ class Namer { typer: Typer => def typedAheadExpr(tree: Tree, pt: Type = WildcardType)(using Context): tpd.Tree = typedAhead(tree, typer.typedExpr(_, pt)) - def typedAheadAnnotation(tree: Tree)(using Context): tpd.Tree = - typedAheadExpr(tree, defn.AnnotationClass.typeRef) - def typedAheadAnnotationClass(tree: Tree)(using Context): Symbol = tree match { case Apply(fn, _) => typedAheadAnnotationClass(fn) case TypeApply(fn, _) => typedAheadAnnotationClass(fn) diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index b53ef28dc8f7..6fb019ee057c 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -131,10 +131,18 @@ object ProtoTypes { /** A class marking ignored prototypes that can be revealed by `deepenProto` */ abstract case class IgnoredProto(ignored: Type) extends CachedGroundType with MatchAlways: + private var myWasDeepened = false override def revealIgnored = ignored - override def deepenProto(using Context): Type = ignored + override def deepenProto(using Context): Type = + myWasDeepened = true + ignored override def deepenProtoTrans(using Context): Type = ignored.deepenProtoTrans + /** Did someone look inside via deepenProto? Used for error deagniostics + * to give a more extensive expected type. + */ + def wasDeepened: Boolean = myWasDeepened + override def computeHash(bs: Hashable.Binders): Int = doHash(bs, ignored) override def eql(that: Type): Boolean = that match diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index ee5a156ca5c7..b53b2f9ec57a 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -71,7 +71,7 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking promote(tree) override def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(using Context): TypTree = - promote(TypeTree(tree.tpe).withSpan(tree.span)) + promote(TypeTree(tree.typeOpt).withSpan(tree.span)) override def typedExport(exp: untpd.Export)(using Context): Export = promote(exp) @@ -87,8 +87,8 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking // retract PatternOrTypeBits like in typedExpr withoutMode(Mode.PatternOrTypeBits)(typedUnadapted(tree.fun, AnyFunctionProto)) val implicits1 = tree.implicits.map(typedExpr(_)) - val patterns1 = tree.patterns.mapconserve(pat => typed(pat, pat.tpe)) - untpd.cpy.UnApply(tree)(fun1, implicits1, patterns1).withType(tree.tpe) + val patterns1 = tree.patterns.mapconserve(pat => typed(pat, pat.typeOpt)) + untpd.cpy.UnApply(tree)(fun1, implicits1, patterns1).withType(tree.typeOpt) } override def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = @@ -108,7 +108,7 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def completeAnnotations(mdef: untpd.MemberDef, sym: Symbol)(using Context): Unit = () - override def ensureConstrCall(cls: ClassSymbol, parent: Tree)(using Context): Tree = + override def ensureConstrCall(cls: ClassSymbol, parent: Tree, psym: Symbol)(using Context): Tree = parent override def handleUnexpectedFunType(tree: untpd.Apply, fun: Tree)(using Context): Tree = fun.tpe match { diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 94eacca5c7db..8afcec4dee63 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -15,8 +15,8 @@ import config.Printers.{checks, noPrinter} import Decorators._ import OverridingPairs.isOverridingPair import typer.ErrorReporting._ -import config.Feature.{warnOnMigration, migrateTo3} -import config.SourceVersion.`3.0` +import config.Feature.{warnOnMigration, migrateTo3, sourceVersion} +import config.SourceVersion.{`3.0`, `future`} import config.Printers.refcheck import reporting._ import Constants.Constant @@ -91,24 +91,42 @@ object RefChecks { cls.thisType } + /** - Check that self type of `cls` conforms to self types of all `parents` as seen from + * `cls.thisType` + * - If self type of `cls` is explicit, check that it conforms to the self types + * of all its class symbols. + * @param deep If true and a self type of a parent is not given explicitly, recurse to + * check against the parents of the parent. This is needed when capture checking, + * since we assume (& check) that the capture set of an inferred self type + * is the intersection of the capture sets of all its parents + */ + def checkSelfAgainstParents(cls: ClassSymbol, parents: List[Symbol])(using Context): Unit = + withMode(Mode.CheckBoundsOrSelfType) { + val cinfo = cls.classInfo + + def checkSelfConforms(other: ClassSymbol, category: String, relation: String) = + val otherSelf = other.declaredSelfTypeAsSeenFrom(cls.thisType) + if otherSelf.exists then + if !(cinfo.selfType <:< otherSelf) then + report.error(DoesNotConformToSelfType(category, cinfo.selfType, cls, otherSelf, relation, other), + cls.srcPos) + + for psym <- parents do + checkSelfConforms(psym.asClass, "illegal inheritance", "parent") + for reqd <- cls.asClass.givenSelfType.classSymbols do + if reqd != cls then + checkSelfConforms(reqd, "missing requirement", "required") + } + end checkSelfAgainstParents + /** Check that self type of this class conforms to self types of parents * and required classes. Also check that only `enum` constructs extend * `java.lang.Enum` and no user-written class extends ContextFunctionN. */ def checkParents(cls: Symbol, parentTrees: List[Tree])(using Context): Unit = cls.info match { case cinfo: ClassInfo => - def checkSelfConforms(other: ClassSymbol, category: String, relation: String) = { - val otherSelf = other.declaredSelfTypeAsSeenFrom(cls.thisType) - if otherSelf.exists && !(cinfo.selfType <:< otherSelf) then - report.error(DoesNotConformToSelfType(category, cinfo.selfType, cls, otherSelf, relation, other), - cls.srcPos) - } val psyms = cls.asClass.parentSyms - for (psym <- psyms) - checkSelfConforms(psym.asClass, "illegal inheritance", "parent") - for reqd <- cinfo.cls.givenSelfType.classSymbols do - if reqd != cls then - checkSelfConforms(reqd, "missing requirement", "required") + checkSelfAgainstParents(cls.asClass, psyms) def isClassExtendingJavaEnum = !cls.isOneOf(Enum | Trait) && psyms.contains(defn.JavaEnumClass) @@ -264,6 +282,8 @@ object RefChecks { * 1.10. If O is inline (and deferred, otherwise O would be final), M must be inline * 1.11. If O is a Scala-2 macro, M must be a Scala-2 macro. * 1.12. If O is non-experimental, M must be non-experimental. + * 1.13 Under -source future, if O is a val parameter, M must be a val parameter + * that passes its value on to O. * 2. Check that only abstract classes have deferred members * 3. Check that concrete classes do not have deferred definitions * that are not implemented in a subclass. @@ -462,7 +482,7 @@ object RefChecks { if (autoOverride(member) || other.owner.isAllOf(JavaInterface) && warnOnMigration( - "`override` modifier required when a Java 8 default method is re-implemented", + "`override` modifier required when a Java 8 default method is re-implemented".toMessage, member.srcPos, version = `3.0`)) member.setFlag(Override) else if (member.isType && self.memberInfo(member) =:= self.memberInfo(other)) @@ -514,12 +534,26 @@ object RefChecks { overrideError(i"needs to be declared with @targetName(${"\""}${other.targetName}${"\""}) so that external names match") else overrideError("cannot have a @targetName annotation since external names would be different") + else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) then // (1.13) + if sourceVersion.isAtLeast(`future`) then + overrideError(i"cannot override val parameter ${other.showLocated}") + else + report.deprecationWarning( + i"overriding val parameter ${other.showLocated} is deprecated, will be illegal in a future version", + member.srcPos) else if !other.isExperimental && member.hasAnnotation(defn.ExperimentalAnnot) then // (1.12) overrideError("may not override non-experimental member") else if other.hasAnnotation(defn.DeprecatedOverridingAnnot) then overrideDeprecation("", member, other, "removed or renamed") end checkOverride + def isInheritedAccessor(mbr: Symbol, other: Symbol): Boolean = + mbr.is(ParamAccessor) + && { + val next = ParamForwarding.inheritedAccessor(mbr) + next == other || isInheritedAccessor(next, other) + } + OverridingPairsChecker(clazz, self).checkAll(checkOverride) printMixinOverrideErrors() @@ -758,17 +792,19 @@ object RefChecks { // For each member, check that the type of its symbol, as seen from `self` // can override the info of this member - for (name <- membersToCheck) - for (mbrd <- self.member(name).alternatives) { - val mbr = mbrd.symbol - val mbrType = mbr.info.asSeenFrom(self, mbr.owner) - if (!mbrType.overrides(mbrd.info, relaxedCheck = false, matchLoosely = true)) - report.errorOrMigrationWarning( - em"""${mbr.showLocated} is not a legal implementation of `$name` in $clazz - | its type $mbrType - | does not conform to ${mbrd.info}""", - (if (mbr.owner == clazz) mbr else clazz).srcPos, from = `3.0`) + withMode(Mode.IgnoreCaptures) { + for (name <- membersToCheck) + for (mbrd <- self.member(name).alternatives) { + val mbr = mbrd.symbol + val mbrType = mbr.info.asSeenFrom(self, mbr.owner) + if (!mbrType.overrides(mbrd.info, relaxedCheck = false, matchLoosely = true)) + report.errorOrMigrationWarning( + em"""${mbr.showLocated} is not a legal implementation of `$name` in $clazz + | its type $mbrType + | does not conform to ${mbrd.info}""", + (if (mbr.owner == clazz) mbr else clazz).srcPos, from = `3.0`) } + } } /** Check that inheriting a case class does not constitute a variant refinement diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index c8b01b3407b7..b90409e72364 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -167,7 +167,7 @@ trait TypeAssigner { case _ => false def addendum = err.selectErrorAddendum(tree, qual, qualType, importSuggestionAddendum, foundWithoutNull) val msg: Message = - if tree.name == nme.CONSTRUCTOR then ex"$qualType does not have a constructor" + if tree.name == nme.CONSTRUCTOR then ex"$qualType does not have a constructor".toMessage else NotAMember(qualType, tree.name, kind, addendum) errorType(msg, tree.srcPos) @@ -255,7 +255,7 @@ trait TypeAssigner { errorType("ambiguous parent class qualifier", pos) } val owntype = - if (mixinClass.exists) mixinClass.appliedRef + if (mixinClass.exists) mixinClass.typeRef else if (!mix.isEmpty) findMixinSuper(cls.info) else if (ctx.erasedTypes) cls.info.firstParent.typeConstructor else { diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index fbdbd3df5580..032bed38482c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -283,7 +283,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def checkUnambiguous(found: Type) = val other = recur(selectors.tail) if other.exists && found.exists && found != other then - fail(em"reference to `$name` is ambiguous; it is imported twice") + fail(em"reference to `$name` is ambiguous; it is imported twice".toMessage) found if selector.rename == termName && selector.rename != nme.WILDCARD then @@ -476,13 +476,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * (x: T | Null) => x.$asInstanceOf$[x.type & T] */ def toNotNullTermRef(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match - case ref @ OrNull(tpnn) : TermRef + case ref: TermRef if pt != AssignProto && // Ensure it is not the lhs of Assign ctx.notNullInfos.impliesNotNull(ref) && // If a reference is in the context, it is already trackable at the point we add it. // Hence, we don't use isTracked in the next line, because checking use out of order is enough. !ref.usedOutOfOrder => - tree.cast(AndType(ref, tpnn)) + ref match + case OrNull(tpnn) => tree.cast(AndType(ref, tpnn)) + case _ => tree case _ => tree @@ -614,6 +616,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if selName.isTypeName then checkStable(qual.tpe, qual.srcPos, "type prefix") checkLegalValue(select, pt) ConstFold(select) + else if selName == nme.apply && qual.tpe.widen.isInstanceOf[MethodType] then + // Simplify `m.apply(...)` to `m(...)` + qual else if couldInstantiateTypeVar(qual.tpe.widen) then // there's a simply visible type variable in the result; try again with a more defined qualifier type // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, @@ -989,7 +994,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def canAssign(sym: Symbol) = sym.is(Mutable, butNot = Accessor) || - ctx.owner.isPrimaryConstructor && !sym.is(Method) && sym.owner == ctx.owner.owner || + ctx.owner.isPrimaryConstructor && !sym.is(Method) && sym.maybeOwner == ctx.owner.owner || // allow assignments from the primary constructor to class fields ctx.owner.name.is(TraitSetterName) || ctx.owner.isStaticConstructor @@ -1606,6 +1611,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } val result = pt match { + case mt: MatchType if isMatchTypeShaped(mt) => + typedDependentMatchFinish(tree, sel1, selType, tree.cases, mt) case MatchType.InDisguise(mt) if isMatchTypeShaped(mt) => typedDependentMatchFinish(tree, sel1, selType, tree.cases, mt) case _ => @@ -1874,7 +1881,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val expr1 = typed(tree.expr, defn.ThrowableType) val cap = checkCanThrow(expr1.tpe.widen, tree.span) val res = Throw(expr1).withSpan(tree.span) - if ctx.settings.Ycc.value && !cap.isEmpty && !ctx.isAfterTyper then + if Feature.ccEnabled && !cap.isEmpty && !ctx.isAfterTyper then // Record access to the CanThrow capabulity recovered in `cap` by wrapping // the type of the `throw` (i.e. Nothing) in a `@requiresCapability` annotatoon. Typed(res, @@ -2254,7 +2261,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedAnnotation(annot: untpd.Tree)(using Context): Tree = - checkAnnotArgs(typed(annot, defn.AnnotationClass.typeRef)) + checkAnnotClass(checkAnnotArgs(typed(annot))) def registerNowarn(tree: Tree, mdef: untpd.Tree)(using Context): Unit = val annot = Annotations.Annotation(tree) @@ -2417,6 +2424,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case rhs => typedType(rhs) checkFullyAppliedType(rhs1) + if sym.isOpaqueAlias then checkNoContextFunctionType(rhs1) assignType(cpy.TypeDef(tdef)(name, rhs1), sym) } @@ -2426,65 +2434,32 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val TypeDef(name, impl @ Template(constr, _, self, _)) = cdef: @unchecked val parents = impl.parents val superCtx = ctx.superCallContext - - /** If `ref` is an implicitly parameterized trait, pass an implicit argument list. - * Otherwise, if `ref` is a parameterized trait, error. - * Note: Traits and classes have sometimes a synthesized empty parameter list () - * in front or after the implicit parameter(s). See NamerOps.normalizeIfConstructor. - * We synthesize a () argument at the correct place in this case. - * @param ref The tree referring to the (parent) trait - * @param psym Its type symbol - */ - def maybeCall(ref: Tree, psym: Symbol): Tree = - def appliedRef = - typedExpr(untpd.New(untpd.TypedSplice(ref)(using superCtx), Nil))(using superCtx) - def dropContextual(tp: Type): Type = tp.stripPoly match - case mt: MethodType if mt.isContextualMethod => dropContextual(mt.resType) - case _ => tp - psym.primaryConstructor.info.stripPoly match - case cinfo @ MethodType(Nil) - if cinfo.resultType.isImplicitMethod && !cinfo.resultType.isContextualMethod => - appliedRef - case cinfo => - val cinfo1 = dropContextual(cinfo) - cinfo1 match - case cinfo1 @ MethodType(Nil) if !cinfo1.resultType.isInstanceOf[MethodType] => - if cinfo1 ne cinfo then appliedRef else ref - case cinfo1: MethodType if !ctx.erasedTypes => - report.error(ParameterizedTypeLacksArguments(psym), ref.srcPos) - ref - case _ => - ref - val seenParents = mutable.Set[Symbol]() - def typedParent(tree: untpd.Tree): Tree = { - def isTreeType(t: untpd.Tree): Boolean = t match { - case _: untpd.Apply => false - case _ => true - } - var result = - if isTreeType(tree) then typedType(tree)(using superCtx) - else typedExpr(tree)(using superCtx) - val psym = result.tpe.dealias.typeSymbol - if (seenParents.contains(psym) && !cls.isRefinementClass) { - // Desugaring can adds parents to classes, but we don't want to emit an + def typedParent(tree: untpd.Tree): Tree = + val parent = tree match + case _: untpd.Apply => typedExpr(tree)(using superCtx) + case _ => typedType(tree)(using superCtx) + val psym = parent.tpe.dealias.typeSymbol + if seenParents.contains(psym) && !cls.isRefinementClass then + // Desugaring can add parents to classes, but we don't want to emit an // error if the same parent was explicitly added in user code. - if (!tree.span.isSourceDerived) + if !tree.span.isSourceDerived then return EmptyTree - - if (!ctx.isAfterTyper) report.error(i"$psym is extended twice", tree.srcPos) - } - else seenParents += psym - if (tree.isType) { - checkSimpleKinded(result) // Not needed for constructor calls, as type arguments will be inferred. - if (psym.is(Trait) && !cls.is(Trait) && !cls.superClass.isSubClass(psym)) - result = maybeCall(result, psym) - } - else checkParentCall(result, cls) - if (cls is Case) checkCaseInheritance(psym, cls, tree.srcPos) + if !ctx.isAfterTyper then report.error(i"$psym is extended twice", tree.srcPos) + else + seenParents += psym + val result = ensureConstrCall(cls, parent, psym)(using superCtx) + if parent.isType then + if !result.symbol.info.takesImplicitParams then + checkSimpleKinded(parent) + // allow missing type parameters if there are implicit arguments to pass + // since we can infer type arguments from them + else + checkParentCall(result, cls) + if cls is Case then + checkCaseInheritance(psym, cls, tree.srcPos) result - } def ensureCorrectSuperClass(): Unit = val parents0 = cls.classInfo.declaredParents @@ -2499,23 +2474,27 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Augment `ptrees` to have the same class symbols as `parents`. Generate TypeTrees * or New trees to fill in any parents for which no tree exists yet. */ - def parentTrees(parents: List[Type], ptrees: List[Tree]): List[Tree] = parents match - case parent :: parents1 => - val psym = parent.classSymbol - def hasSameParent(ptree: Tree) = ptree.tpe.classSymbol == psym - ptrees match - case ptree :: ptrees1 if hasSameParent(ptree) => - ptree :: parentTrees(parents1, ptrees1) - case ptree :: ptrees1 if ptrees1.exists(hasSameParent) => - ptree :: parentTrees(parents, ptrees1) - case _ => - var added: Tree = TypeTree(parent).withSpan(cdef.nameSpan.focus) - if psym.is(Trait) && psym.primaryConstructor.info.takesImplicitParams then - // classes get a constructor separately using a different context - added = ensureConstrCall(cls, added)(using superCtx) - added :: parentTrees(parents1, ptrees) - case _ => - ptrees + def parentTrees(parents: List[Type], ptrees: List[Tree]): List[Tree] = + if ptrees.exists(_.tpe.isError) then ptrees + else parents match + case parent :: parents1 => + val psym = parent.classSymbol + def hasSameParent(ptree: Tree) = + psym == ( + if ptree.symbol.isConstructor then ptree.symbol.owner + else ptree.tpe.classSymbol + ) + ptrees match + case ptree :: ptrees1 if hasSameParent(ptree) => + ptree :: parentTrees(parents1, ptrees1) + case ptree :: ptrees1 if ptrees1.exists(hasSameParent) => + ptree :: parentTrees(parents, ptrees1) + case _ => + val added: Tree = ensureConstrCall( + cls, TypeTree(parent).withSpan(cdef.nameSpan.focus), psym)(using superCtx) + added :: parentTrees(parents1, ptrees) + case _ => + ptrees /** Checks if one of the decls is a type with the same name as class type member in selfType */ def classExistsOnSelf(decls: Scope, self: tpd.ValDef): Boolean = { @@ -2538,10 +2517,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ensureCorrectSuperClass() completeAnnotations(cdef, cls) val constr1 = typed(constr).asInstanceOf[DefDef] - val parents0 = parentTrees( + val parents1 = parentTrees( cls.classInfo.declaredParents, parents.mapconserve(typedParent).filterConserve(!_.isEmpty)) - val parents1 = ensureConstrCall(cls, parents0)(using superCtx) val firstParentTpe = parents1.head.tpe.dealias val firstParent = firstParentTpe.typeSymbol @@ -2610,23 +2588,22 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer protected def addAccessorDefs(cls: Symbol, body: List[Tree])(using Context): List[Tree] = PrepareInlineable.addAccessorDefs(cls, body) - /** If this is a real class, make sure its first parent is a - * constructor call. Cannot simply use a type. Overridden in ReTyper. + /** Turn a parent type into a constructor call where needed. This is the case where + * - we are in a Scala class or module (not a Java class, nor a trait), and + * - the parent symbol is a non-trait class, or + * - the parent symbol is a trait that takes at least one (explicit or implicit) parameter + * and the parent symbol is directly extended by the current class (i.e. not + * extended by the superclass). */ - def ensureConstrCall(cls: ClassSymbol, parents: List[Tree])(using Context): List[Tree] = parents match - case parents @ (first :: others) => - parents.derivedCons(ensureConstrCall(cls, first), others) - case parents => - parents - - /** If this is a real class, make sure its first parent is a - * constructor call. Cannot simply use a type. Overridden in ReTyper. - */ - def ensureConstrCall(cls: ClassSymbol, parent: Tree)(using Context): Tree = - if (parent.isType && !cls.is(Trait) && !cls.is(JavaDefined)) - typed(untpd.New(untpd.TypedSplice(parent), Nil)) - else - parent + def ensureConstrCall(cls: ClassSymbol, parent: Tree, psym: Symbol)(using Context): Tree = + if parent.isType && !cls.is(Trait) && !cls.is(JavaDefined) && psym.isClass + // Annotations are represented as traits with constructors, but should + // never be called as such outside of annotation trees. + && !psym.is(JavaAnnotation) + && (!psym.is(Trait) + || psym.primaryConstructor.info.takesParams && !cls.superClass.isSubClass(psym)) + then typed(untpd.New(untpd.TypedSplice(parent), Nil)) + else parent def localDummy(cls: ClassSymbol, impl: untpd.Template)(using Context): Symbol = newLocalDummy(cls, impl.span) @@ -2693,17 +2670,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Package will not exist if a duplicate type has already been entered, see `tests/neg/1708.scala` errorTree(tree, if pkg.exists then PackageNameAlreadyDefined(pkg) - else i"package ${tree.pid.name} does not exist") + else i"package ${tree.pid.name} does not exist".toMessage) end typedPackageDef def typedAnnotated(tree: untpd.Annotated, pt: Type)(using Context): Tree = { - val annot1 = typedExpr(tree.annot, defn.AnnotationClass.typeRef) - if Annotations.annotClass(annot1) == defn.NowarnAnnot then + val annot1 = checkAnnotClass(typedExpr(tree.annot)) + val annotCls = Annotations.annotClass(annot1) + if annotCls == defn.NowarnAnnot then registerNowarn(annot1, tree) val arg1 = typed(tree.arg, pt) if (ctx.mode is Mode.Type) { val cls = annot1.symbol.maybeOwner - if cls == defn.RetainsAnnot || cls == defn.RetainsByNameAnnot then + if Feature.ccEnabled + && (cls == defn.RetainsAnnot || cls == defn.RetainsByNameAnnot) + then CheckCaptures.checkWellformed(annot1) if arg1.isType then assignType(cpy.Annotated(tree)(arg1, annot1), arg1, annot1) @@ -2843,7 +2823,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tupleXXLobj = untpd.ref(defn.TupleXXLModule.termRef) val app = untpd.cpy.Apply(tree)(tupleXXLobj, elems.map(untpd.TypedSplice(_))) .withSpan(tree.span) - val app1 = typed(app, defn.TupleXXLClass.typeRef) + val app1 = typed(app, if ctx.mode.is(Mode.Pattern) then pt else defn.TupleXXLClass.typeRef) if (ctx.mode.is(Mode.Pattern)) app1 else { val elemTpes = elems.lazyZip(pts).map((elem, pt) => @@ -3446,42 +3426,59 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ErrorReporting.missingArgs(tree, mt) tree.withType(mt.resultType) - def adaptOverloaded(ref: TermRef) = { + def adaptOverloaded(ref: TermRef) = + // get all the alternatives val altDenots = val allDenots = ref.denot.alternatives if pt.isExtensionApplyProto then allDenots.filter(_.symbol.is(ExtensionMethod)) else allDenots + typr.println(i"adapt overloaded $ref with alternatives ${altDenots map (_.info)}%\n\n %") + + /** Search for an alternative that does not take parameters. + * If there is one, return it, otherwise emit an error. + */ + def tryParameterless(alts: List[TermRef])(error: => tpd.Tree): Tree = + alts.filter(_.info.isParameterless) match + case alt :: Nil => readaptSimplified(tree.withType(alt)) + case _ => + if altDenots.exists(_.info.paramInfoss == ListOfNil) then + typed(untpd.Apply(untpd.TypedSplice(tree), Nil), pt, locked) + else + error + def altRef(alt: SingleDenotation) = TermRef(ref.prefix, ref.name, alt) val alts = altDenots.map(altRef) - resolveOverloaded(alts, pt) match { + + resolveOverloaded(alts, pt) match case alt :: Nil => readaptSimplified(tree.withType(alt)) case Nil => - // If alternative matches, there are still two ways to recover: + // If no alternative matches, there are still two ways to recover: // 1. If context is an application, try to insert an apply or implicit // 2. If context is not an application, pick a alternative that does // not take parameters. - def noMatches = - errorTree(tree, NoMatchingOverload(altDenots, pt)) - def hasEmptyParams(denot: SingleDenotation) = denot.info.paramInfoss == ListOfNil - pt match { + + def errorNoMatch = errorTree(tree, NoMatchingOverload(altDenots, pt)) + + pt match case pt: FunOrPolyProto if pt.applyKind != ApplyKind.Using => // insert apply or convert qualifier, but only for a regular application - tryInsertApplyOrImplicit(tree, pt, locked)(noMatches) + tryInsertApplyOrImplicit(tree, pt, locked)(errorNoMatch) case _ => - alts.filter(_.info.isParameterless) match { - case alt :: Nil => readaptSimplified(tree.withType(alt)) - case _ => - if (altDenots exists (_.info.paramInfoss == ListOfNil)) - typed(untpd.Apply(untpd.TypedSplice(tree), Nil), pt, locked) - else - noMatches - } - } + tryParameterless(alts)(errorNoMatch) + case ambiAlts => - if tree.tpe.isErroneous || pt.isErroneous then tree.withType(UnspecifiedErrorType) - else + // If there are ambiguous alternatives, and: + // 1. the types aren't erroneous + // 2. the expected type is not a function type + // 3. there exist a parameterless alternative + // + // Then, pick the parameterless alternative. + // See tests/pos/i10715-scala and tests/pos/i10715-java. + + /** Constructs an "ambiguous overload" error */ + def errorAmbiguous = val remainingDenots = altDenots.filter(denot => ambiAlts.contains(altRef(denot))) val addendum = if ambiAlts.exists(!_.symbol.exists) then @@ -3490,8 +3487,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer |Note: Overloaded definitions introduced by refinements cannot be resolved""" else "" errorTree(tree, AmbiguousOverload(tree, remainingDenots, pt, addendum)) - } - } + end errorAmbiguous + + if tree.tpe.isErroneous || pt.isErroneous then + tree.withType(UnspecifiedErrorType) + else + pt match + case _: FunProto => + errorAmbiguous + case _ => + tryParameterless(alts)(errorAmbiguous) + + end match + end adaptOverloaded def adaptToArgs(wtp: Type, pt: FunProto): Tree = wtp match { case wtp: MethodOrPoly => @@ -3699,6 +3707,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer || Feature.warnOnMigration(MissingEmptyArgumentList(sym.show), tree.srcPos, version = `3.0`) && { patch(tree.span.endPos, "()"); true } + /** If this is a selection prototype of the form `.apply(...): R`, return the nested + * function prototype `(...)R`. Otherwise `pt`. + */ + def ptWithoutRedundantApply: Type = pt.revealIgnored match + case SelectionProto(nme.apply, mpt, _, _) => + mpt.revealIgnored match + case fpt: FunProto => fpt + case _ => pt + case _ => pt + // Reasons NOT to eta expand: // - we reference a constructor // - we reference a typelevel method @@ -3710,13 +3728,18 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer && !ctx.mode.is(Mode.Pattern) && !(isSyntheticApply(tree) && !functionExpected) then - if (!defn.isFunctionType(pt)) - pt match { - case SAMType(_) if !pt.classSymbol.hasAnnotation(defn.FunctionalInterfaceAnnot) => - report.warning(ex"${tree.symbol} is eta-expanded even though $pt does not have the @FunctionalInterface annotation.", tree.srcPos) - case _ => - } - simplify(typed(etaExpand(tree, wtp, arity), pt), pt, locked) + val pt1 = ptWithoutRedundantApply + if pt1 ne pt then + // Ignore `.apply` in `m.apply(...)`; it will later be simplified in typedSelect to `m(...)` + adapt1(tree, pt1, locked) + else + if (!defn.isFunctionType(pt)) + pt match { + case SAMType(_) if !pt.classSymbol.hasAnnotation(defn.FunctionalInterfaceAnnot) => + report.warning(ex"${tree.symbol} is eta-expanded even though $pt does not have the @FunctionalInterface annotation.", tree.srcPos) + case _ => + } + simplify(typed(etaExpand(tree, wtp, arity), pt), pt, locked) else if (wtp.paramInfos.isEmpty && isAutoApplied(tree.symbol)) readaptSimplified(tpd.Apply(tree, Nil)) else if (wtp.isImplicitMethod) @@ -3787,7 +3810,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer adaptToSubType(wtp) case CompareResult.OKwithGADTUsed if pt.isValueType - && !inContext(ctx.fresh.setGadt(EmptyGadtConstraint)) { + && !inContext(ctx.fresh.setGadt(GadtConstraint.empty)) { val res = (tree.tpe.widenExpr frozen_<:< pt) if res then // we overshot; a cast is not needed, after all. @@ -3825,11 +3848,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def adaptNoArgs(wtp: Type): Tree = { val ptNorm = underlyingApplied(pt) def functionExpected = defn.isFunctionType(ptNorm) - def needsEta = pt match { - case _: SingletonType => false - case IgnoredProto(_: FunOrPolyProto) => false + def needsEta = pt.revealIgnored match + case _: SingletonType | _: FunOrPolyProto => false case _ => true - } var resMatch: Boolean = false wtp match { case wtp: ExprType => @@ -3846,17 +3867,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case wtp: MethodType if needsEta => val funExpected = functionExpected val arity = - if (funExpected) - if (!isFullyDefined(pt, ForceDegree.none) && isFullyDefined(wtp, ForceDegree.none)) + if funExpected then + if !isFullyDefined(pt, ForceDegree.none) && isFullyDefined(wtp, ForceDegree.none) then // if method type is fully defined, but expected type is not, // prioritize method parameter types as parameter types of the eta-expanded closure 0 else defn.functionArity(ptNorm) - else { + else val nparams = wtp.paramInfos.length - if (nparams > 0 || pt.eq(AnyFunctionProto)) nparams + if nparams > 0 || pt.eq(AnyFunctionProto) then nparams else -1 // no eta expansion in this case - } adaptNoArgsUnappliedMethod(wtp, funExpected, arity) case _ => adaptNoArgsOther(wtp, functionExpected) @@ -4032,6 +4052,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Select(qual)(pre, name.toTypeName) case qual: This if qual.symbol.is(ModuleClass) => cpy.Ident(qual)(qual.symbol.name.sourceModuleName.toTypeName) + case _ => + errorTree(tree, em"cannot convert from $tree to an instance creation expression") val tycon = tree.tpe.widen.finalResultType.underlyingClassRef(refinementOK = false) typed( untpd.Select( diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala index 60465e519452..f04957f26400 100644 --- a/compiler/src/dotty/tools/dotc/util/Stats.scala +++ b/compiler/src/dotty/tools/dotc/util/Stats.scala @@ -55,7 +55,7 @@ import collection.mutable } def maybeMonitored[T](op: => T)(using Context): T = - if (ctx.settings.YdetailedStats.value) { + if (ctx.settings.YdetailedStats.value && hits.nonEmpty) { monitored = true try op finally { diff --git a/compiler/src/dotty/tools/package.scala b/compiler/src/dotty/tools/package.scala index 79488c70cf6b..4ec9ae40c421 100644 --- a/compiler/src/dotty/tools/package.scala +++ b/compiler/src/dotty/tools/package.scala @@ -18,7 +18,7 @@ package object tools { * Flow-typing under explicit nulls will automatically insert many necessary * occurrences of uncheckedNN. */ - inline def uncheckedNN: T = x.asInstanceOf[T] + transparent inline def uncheckedNN: T = x.asInstanceOf[T] inline def toOption: Option[T] = if x == null then None else Some(x.asInstanceOf[T]) @@ -42,4 +42,11 @@ package object tools { def unreachable(x: Any = "<< this case was declared unreachable >>"): Nothing = throw new MatchError(x) + + transparent inline def assertShort(inline assertion: Boolean, inline message: Any = null): Unit = + if !assertion then + val msg = message + val e = if msg == null then AssertionError() else AssertionError("assertion failed: " + msg) + e.setStackTrace(Array()) + throw e } diff --git a/compiler/src/dotty/tools/repl/JLineTerminal.scala b/compiler/src/dotty/tools/repl/JLineTerminal.scala index 9da12ae955d1..8e048d786ae1 100644 --- a/compiler/src/dotty/tools/repl/JLineTerminal.scala +++ b/compiler/src/dotty/tools/repl/JLineTerminal.scala @@ -16,7 +16,7 @@ import org.jline.reader.impl.history.DefaultHistory import org.jline.terminal.TerminalBuilder import org.jline.utils.AttributedString -final class JLineTerminal extends java.io.Closeable { +class JLineTerminal extends java.io.Closeable { // import java.util.logging.{Logger, Level} // Logger.getLogger("org.jline").setLevel(Level.FINEST) @@ -30,7 +30,8 @@ final class JLineTerminal extends java.io.Closeable { private def blue(str: String)(using Context) = if (ctx.settings.color.value != "never") Console.BLUE + str + Console.RESET else str - private def prompt(using Context) = blue("\nscala> ") + protected def promptStr = "scala" + private def prompt(using Context) = blue(s"\n$promptStr> ") private def newLinePrompt(using Context) = blue(" | ") /** Blockingly read line from `System.in` diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala index 608ca23c5fec..5cba672ce7b0 100644 --- a/compiler/src/dotty/tools/repl/Rendering.scala +++ b/compiler/src/dotty/tools/repl/Rendering.scala @@ -28,12 +28,10 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): import Rendering._ - private val MaxStringElements: Int = 1000 // no need to mkString billions of elements - - private var myClassLoader: AbstractFileClassLoader = _ - - private var myReplStringOf: Object => String = _ + var myClassLoader: AbstractFileClassLoader = _ + /** (value, maxElements, maxCharacters) => String */ + var myReplStringOf: (Object, Int, Int) => String = _ /** Class loader used to load compiled code */ private[repl] def classLoader()(using Context) = @@ -59,40 +57,50 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): // `ScalaRunTime.replStringOf`. Probe for new API without extraneous newlines. // For old API, try to clean up extraneous newlines by stripping suffix and maybe prefix newline. val scalaRuntime = Class.forName("scala.runtime.ScalaRunTime", true, myClassLoader) - val renderer = "stringOf" // was: replStringOf - try { - val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int], classOf[Boolean]) - val truly = java.lang.Boolean.TRUE - - (value: Object) => meth.invoke(null, value, Integer.valueOf(MaxStringElements), truly).asInstanceOf[String] - } catch { - case _: NoSuchMethodException => - val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int]) + val renderer = "stringOf" + def stringOfMaybeTruncated(value: Object, maxElements: Int): String = { + try { + val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int], classOf[Boolean]) + val truly = java.lang.Boolean.TRUE + meth.invoke(null, value, maxElements, truly).asInstanceOf[String] + } catch { + case _: NoSuchMethodException => + val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int]) + meth.invoke(null, value, maxElements).asInstanceOf[String] + } + } - (value: Object) => meth.invoke(null, value, Integer.valueOf(MaxStringElements)).asInstanceOf[String] + (value: Object, maxElements: Int, maxCharacters: Int) => { + // `ScalaRuntime.stringOf` may truncate the output, in which case we want to indicate that fact to the user + // In order to figure out if it did get truncated, we invoke it twice - once with the `maxElements` that we + // want to print, and once without a limit. If the first is shorter, truncation did occur. + val notTruncated = stringOfMaybeTruncated(value, Int.MaxValue) + val maybeTruncatedByElementCount = stringOfMaybeTruncated(value, maxElements) + val maybeTruncated = truncate(maybeTruncatedByElementCount, maxCharacters) + + // our string representation may have been truncated by element and/or character count + // if so, append an info string - but only once + if (notTruncated.length == maybeTruncated.length) maybeTruncated + else s"$maybeTruncated ... large output truncated, print value to show all" } + } myClassLoader } - /** Used to elide long output in replStringOf. - * - * TODO: Perhaps implement setting scala.repl.maxprintstring as in Scala 2, but - * then this bug will surface, so perhaps better not? - * https://github.com/scala/bug/issues/12337 - */ - private[repl] def truncate(str: String): String = - val showTruncated = " ... large output truncated, print value to show all" + private[repl] def truncate(str: String, maxPrintCharacters: Int)(using ctx: Context): String = val ncp = str.codePointCount(0, str.length) // to not cut inside code point - if ncp <= MaxStringElements then str - else str.substring(0, str.offsetByCodePoints(0, MaxStringElements - 1)) + showTruncated + if ncp <= maxPrintCharacters then str + else str.substring(0, str.offsetByCodePoints(0, maxPrintCharacters - 1)) /** Return a String representation of a value we got from `classLoader()`. */ private[repl] def replStringOf(value: Object)(using Context): String = assert(myReplStringOf != null, "replStringOf should only be called on values creating using `classLoader()`, but `classLoader()` has not been called so far") - val res = myReplStringOf(value) - if res == null then "null // non-null reference has null-valued toString" else truncate(res) + val maxPrintElements = ctx.settings.VreplMaxPrintElements.valueIn(ctx.settingsState) + val maxPrintCharacters = ctx.settings.VreplMaxPrintCharacters.valueIn(ctx.settingsState) + val res = myReplStringOf(value, maxPrintElements, maxPrintCharacters) + if res == null then "null // non-null reference has null-valued toString" else res /** Load the value of the symbol using reflection. * diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 4fab4b119a08..f076333cf449 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -118,7 +118,7 @@ class ReplDriver(settings: Array[String], private var rootCtx: Context = _ private var shouldStart: Boolean = _ private var compiler: ReplCompiler = _ - private var rendering: Rendering = _ + protected var rendering: Rendering = _ // initialize the REPL session as part of the constructor so that once `run` // is called, we're in business @@ -138,7 +138,7 @@ class ReplDriver(settings: Array[String], * observable outside of the CLI, for this reason, most helper methods are * `protected final` to facilitate testing. */ - final def runUntilQuit(using initialState: State = initialState)(): State = { + def runUntilQuit(using initialState: State = initialState)(): State = { val terminal = new JLineTerminal out.println( @@ -176,7 +176,12 @@ class ReplDriver(settings: Array[String], interpret(ParseResult.complete(input)) } - private def runBody(body: => State): State = rendering.classLoader()(using rootCtx).asContext(withRedirectedOutput(body)) + final def runQuietly(input: String)(using State): State = runBody { + val parsed = ParseResult(input) + interpret(parsed, quiet = true) + } + + protected def runBody(body: => State): State = rendering.classLoader()(using rootCtx).asContext(withRedirectedOutput(body)) // TODO: i5069 final def bind(name: String, value: Any)(using state: State): State = state @@ -242,10 +247,10 @@ class ReplDriver(settings: Array[String], .getOrElse(Nil) end completions - private def interpret(res: ParseResult)(using state: State): State = { + protected def interpret(res: ParseResult, quiet: Boolean = false)(using state: State): State = { res match { case parsed: Parsed if parsed.trees.nonEmpty => - compile(parsed, state) + compile(parsed, state, quiet) case SyntaxErrors(_, errs, _) => displayErrors(errs) @@ -263,7 +268,7 @@ class ReplDriver(settings: Array[String], } /** Compile `parsed` trees and evolve `state` in accordance */ - private def compile(parsed: Parsed, istate: State): State = { + private def compile(parsed: Parsed, istate: State, quiet: Boolean = false): State = { def extractNewestWrapper(tree: untpd.Tree): Name = tree match { case PackageDef(_, (obj: untpd.ModuleDef) :: Nil) => obj.name.moduleClassName case _ => nme.NO_NAME @@ -314,9 +319,11 @@ class ReplDriver(settings: Array[String], given Ordering[Diagnostic] = Ordering[(Int, Int, Int)].on(d => (d.pos.line, -d.level, d.pos.column)) - (definitions ++ warnings) - .sorted - .foreach(printDiagnostic) + if (!quiet) { + (definitions ++ warnings) + .sorted + .foreach(printDiagnostic) + } updatedState } diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index f8e439baeb0e..3cab93f247ae 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -2765,6 +2765,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def Invisible: Flags = dotc.core.Flags.Invisible def JavaDefined: Flags = dotc.core.Flags.JavaDefined def JavaStatic: Flags = dotc.core.Flags.JavaStatic + def JavaAnnotation: Flags = dotc.core.Flags.JavaAnnotation def Lazy: Flags = dotc.core.Flags.Lazy def Local: Flags = dotc.core.Flags.Local def Macro: Flags = dotc.core.Flags.Macro diff --git a/compiler/test-resources/repl/i11377 b/compiler/test-resources/repl/i11377 deleted file mode 100644 index 4e971fb89749..000000000000 --- a/compiler/test-resources/repl/i11377 +++ /dev/null @@ -1,14 +0,0 @@ -scala> val smallArray = Array.fill(100)(0) -val smallArray: Array[Int] = Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) -scala> val bigArray = Array.fill(10000)(0) -val bigArray: Array[Int] = Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ... large output truncated, print value to show all -scala> val notTruncated = "!" * 999 -val notTruncated: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -scala> val onTruncationLimit = "!" * 1000 -val onTruncationLimit: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -scala> val onTruncationLimitPlus = "!" * 1001 -val onTruncationLimitPlus: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ... large output truncated, print value to show all -scala> val veryBigTruncated = "!" * 10000 -val veryBigTruncated: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ... large output truncated, print value to show all -scala> val beh = "\u08A0"*10000 -val beh: String = ࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠ ... large output truncated, print value to show all diff --git a/compiler/test-resources/repl/settings-repl-disable-display b/compiler/test-resources/repl/settings-repl-disable-display new file mode 100644 index 000000000000..ba2c1c64574b --- /dev/null +++ b/compiler/test-resources/repl/settings-repl-disable-display @@ -0,0 +1,12 @@ +scala> 1 +val res0: Int = 1 + +scala>:settings -Xrepl-disable-display + +scala> 2 + +scala>:reset +Resetting REPL state. + +scala> 3 +val res0: Int = 3 \ No newline at end of file diff --git a/compiler/test-resources/repl/settings-repl-max-print-both-truncation-settings b/compiler/test-resources/repl/settings-repl-max-print-both-truncation-settings new file mode 100644 index 000000000000..a7f7d6c10dd6 --- /dev/null +++ b/compiler/test-resources/repl/settings-repl-max-print-both-truncation-settings @@ -0,0 +1,10 @@ +scala> Seq(1,2,3) +val res0: Seq[Int] = List(1, 2, 3) + +scala>:settings -Vrepl-max-print-elements:2 + +scala>:settings -Vrepl-max-print-characters:50 + +scala> Seq(1,2,3) +val res1: Seq[Int] = List(1, 2) ... large output truncated, print value to show all + diff --git a/compiler/test-resources/repl/settings-repl-max-print-characters b/compiler/test-resources/repl/settings-repl-max-print-characters new file mode 100644 index 000000000000..9263680b95cc --- /dev/null +++ b/compiler/test-resources/repl/settings-repl-max-print-characters @@ -0,0 +1,7 @@ +scala> 1.to(10).mkString +val res0: String = 12345678910 + +scala>:settings -Vrepl-max-print-characters:10 + +scala> 1.to(10).mkString +val res1: String = 123456789 ... large output truncated, print value to show all diff --git a/compiler/test-resources/repl/settings-repl-max-print-elements b/compiler/test-resources/repl/settings-repl-max-print-elements new file mode 100644 index 000000000000..b203e689f020 --- /dev/null +++ b/compiler/test-resources/repl/settings-repl-max-print-elements @@ -0,0 +1,7 @@ +scala> 1.to(200).toList +val res0: List[Int] = List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200) + +scala>:settings -Vrepl-max-print-elements:20 + +scala> 1.to(300).toList +val res1: List[Int] = List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20) ... large output truncated, print value to show all diff --git a/compiler/test-resources/type-printer/source-compatible b/compiler/test-resources/type-printer/source-compatible new file mode 100644 index 000000000000..d0773a11a795 --- /dev/null +++ b/compiler/test-resources/type-printer/source-compatible @@ -0,0 +1,17 @@ +scala> case class Bag() extends reflect.Selectable +// defined case class Bag +scala> val m = new Bag { val f = 23; def g = 47; def h(i: Int): Int = i; var i = 101; type N = Int; val l = List(42); def p[T](t: T) = t.toString() } +val m: + Bag{ + val f: Int; def g: Int; def h(i: Int): Int; val i: Int; + def i_=(x$1: Int): Unit; type N = Int; val l: List[Int]; + def p[T](t: T): String + } = Bag() +scala> type t = Bag { val f: Int; def g: Int; def h(i: Int): Int; val i: Int; def i_=(x$1: Int): Unit; type N = Int; val l: List[Int]; val s: String @unchecked } +// defined alias type t + = + Bag{ + val f: Int; def g: Int; def h(i: Int): Int; val i: Int; + def i_=(x$1: Int): Unit; type N = Int; val l: List[Int]; + val s: String @unchecked + } diff --git a/compiler/test/dotc/comptest.scala b/compiler/test/dotc/comptest.scala index bd0d800e641c..fb53f561a94d 100644 --- a/compiler/test/dotc/comptest.scala +++ b/compiler/test/dotc/comptest.scala @@ -12,6 +12,7 @@ object comptest extends ParallelTesting { def isInteractive = true def testFilter = Nil def updateCheckFiles: Boolean = false + def failedTests = None val posDir = "./tests/pos/" val negDir = "./tests/neg/" diff --git a/compiler/test/dotc/pos-lazy-vals-tests.allowlist b/compiler/test/dotc/pos-lazy-vals-tests.allowlist new file mode 100644 index 000000000000..21667a9265d7 --- /dev/null +++ b/compiler/test/dotc/pos-lazy-vals-tests.allowlist @@ -0,0 +1,37 @@ +Repeated.scala +byname-implicits-8.scala +existentials.scala +i1235.scala +i13332super.scala +i13349.scala +i13460.scala +i14626.scala +i1753.scala +i4031.scala +i4328.scala +i6450.scala +i6565.scala +i8031.scala +i8111.scala +i8900-unflip.scala +lazyvals.scala +singletons.scala +spec-traits.scala +spurious-overload.scala +t1591_pos.scala +t2910.scala +t3411.scala +t3420.scala +t3452f.scala +t3670.scala +t3927.scala +t4432.scala +t4716.scala +t4717.scala +t5099.scala +t5796.scala +t6278-synth-def.scala +t6925b.scala +t7011.scala +t8306.scala +zipped.scala diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 71292f4590b1..30126f07b49a 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -2,7 +2,6 @@ i94-nada.scala i1812.scala i1867.scala i3067.scala -t247.scala t2712-5.scala t284-pos.scala t3249 @@ -19,6 +18,7 @@ i6507b.scala i12299a.scala i13871.scala i15181.scala +i15922.scala # Tree is huge and blows stack for printing Text i7034.scala @@ -78,6 +78,12 @@ i2797a # allows to simplify a type that was already computed i13842.scala +# Position change under captureChecking +boxmap-paper.scala + +# Function types print differnt after unpickling since test mispredicts Feature.preFundsEnabled +caps-universal.scala + # GADT cast applied to singleton type difference i4176-gadt.scala diff --git a/compiler/test/dotc/run-lazy-vals-tests.allowlist b/compiler/test/dotc/run-lazy-vals-tests.allowlist new file mode 100644 index 000000000000..98973dc2893d --- /dev/null +++ b/compiler/test/dotc/run-lazy-vals-tests.allowlist @@ -0,0 +1,66 @@ +IArrayOps.scala +Lazies1.scala +Lazies2.scala +OrderingTest.scala +anon-mirror-gen-local.scala +byname-implicits-28.scala +byname-implicits-30.scala +byname-implicits-5.scala +exports.scala +i13146.scala +i13146a.scala +i13332a.scala +i13332shapeless.scala +i13358.scala +i1692.scala +i1692b.scala +i1856.scala +i2266.scala +i2275.scala +i4451.scala +i4559.scala +i5340.scala +i5350.scala +i7675.scala +i9473.scala +isInstanceOf-eval.scala +lazy-exprs.scala +lazy-impl.scala +lazy-implicit-lists.scala +lazy-override-run.scala +lazy-traits.scala +lazyVals.scala +lazyVals_c3.0.0.scala +lazyVals_c3.1.0.scala +nonLocalReturns.scala +nothing-lazy-val.scala +null-lazy-val.scala +patmatch-classtag.scala +priorityQueue.scala +serialization-new-legacy.scala +serialization-new.scala +singletons.scala +statics.scala +stream_flatmap_odds.scala +t1535.scala +t1591.scala +t2333.scala +t3038.scala +t3670.scala +t3699.scala +t3877.scala +t3895.scala +t3980.scala +t429.scala +t5552.scala +t5610a.scala +t603.scala +t6272.scala +t6443-by-name.scala +t6443-varargs.scala +t704.scala +t7406.scala +t8245.scala +unapply.scala +unit-lazy-val.scala +view-iterator-stream.scala diff --git a/compiler/test/dotty/Properties.scala b/compiler/test/dotty/Properties.scala index f4e0ed5f615f..71569f2f0e08 100644 --- a/compiler/test/dotty/Properties.scala +++ b/compiler/test/dotty/Properties.scala @@ -13,6 +13,10 @@ object Properties { prop == null || prop == "TRUE" } + /** If property is unset or FALSE we consider it `false` */ + private def propIsTrue(name: String): Boolean = + sys.props.getOrElse(name, "FALSE") == "TRUE" + /** Are we running on the CI? */ val isRunByCI: Boolean = sys.env.isDefinedAt("DOTTY_CI_RUN") || sys.env.isDefinedAt("DRONE") // TODO remove this when we drop Drone @@ -30,9 +34,11 @@ object Properties { */ val testsFilter: List[String] = sys.props.get("dotty.tests.filter").fold(Nil)(_.split(',').toList) + /** Run only failed tests */ + val rerunFailed: Boolean = propIsTrue("dotty.tests.rerunFailed") + /** Tests should override the checkfiles with the current output */ - val testsUpdateCheckfile: Boolean = - sys.props.getOrElse("dotty.tests.updateCheckfiles", "FALSE") == "TRUE" + val testsUpdateCheckfile: Boolean = propIsTrue("dotty.tests.updateCheckfiles") /** When set, the run tests are only compiled - not run, a warning will be * issued diff --git a/compiler/test/dotty/tools/AnnotationsTests.scala b/compiler/test/dotty/tools/AnnotationsTests.scala index 59e9f3129294..3998bf7c93c0 100644 --- a/compiler/test/dotty/tools/AnnotationsTests.scala +++ b/compiler/test/dotty/tools/AnnotationsTests.scala @@ -89,3 +89,9 @@ class AnnotationsTest: s"A missing annotation while parsing a Java class should be silently ignored but: ${ctx.reporter.summary}") } } + + @Test def hasNativeAnnot: Unit = + inCompilerContext(TestConfiguration.basicClasspath) { + val term: TermSymbol = requiredClass("java.lang.invoke.MethodHandle").requiredMethod("invokeExact") + assert(term.hasAnnotation(defn.NativeAnnot), i"${term.annotations}") + } diff --git a/compiler/test/dotty/tools/TestSources.scala b/compiler/test/dotty/tools/TestSources.scala index c4d36b16c90b..6961a61b69b6 100644 --- a/compiler/test/dotty/tools/TestSources.scala +++ b/compiler/test/dotty/tools/TestSources.scala @@ -13,21 +13,26 @@ object TestSources { def posFromTastyBlacklistFile: String = "compiler/test/dotc/pos-from-tasty.blacklist" def posTestPicklingBlacklistFile: String = "compiler/test/dotc/pos-test-pickling.blacklist" - def posTestRecheckExcludesFile = "compiler/test/dotc/pos-test-recheck.excludes" + def posTestRecheckExcludesFile: String = "compiler/test/dotc/pos-test-recheck.excludes" + def posLazyValsAllowlistFile: String = "compiler/test/dotc/pos-lazy-vals-tests.allowlist" def posFromTastyBlacklisted: List[String] = loadList(posFromTastyBlacklistFile) def posTestPicklingBlacklisted: List[String] = loadList(posTestPicklingBlacklistFile) - def posTestRecheckExcluded = loadList(posTestRecheckExcludesFile) + def posTestRecheckExcluded: List[String] = loadList(posTestRecheckExcludesFile) + def posLazyValsAllowlist: List[String] = loadList(posLazyValsAllowlistFile) // run tests lists def runFromTastyBlacklistFile: String = "compiler/test/dotc/run-from-tasty.blacklist" def runTestPicklingBlacklistFile: String = "compiler/test/dotc/run-test-pickling.blacklist" - def runTestRecheckExcludesFile = "compiler/test/dotc/run-test-recheck.excludes" + def runTestRecheckExcludesFile: String = "compiler/test/dotc/run-test-recheck.excludes" + def runLazyValsAllowlistFile: String = "compiler/test/dotc/run-lazy-vals-tests.allowlist" + def runFromTastyBlacklisted: List[String] = loadList(runFromTastyBlacklistFile) def runTestPicklingBlacklisted: List[String] = loadList(runTestPicklingBlacklistFile) - def runTestRecheckExcluded = loadList(runTestRecheckExcludesFile) + def runTestRecheckExcluded: List[String] = loadList(runTestRecheckExcludesFile) + def runLazyValsAllowlist: List[String] = loadList(runLazyValsAllowlistFile) // load lists diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index cce23cb5c9a6..e9d0e26f33b0 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -10,6 +10,7 @@ import org.junit.Assume._ import org.junit.experimental.categories.Category import scala.concurrent.duration._ +import reporting.TestReporter import vulpix._ import java.nio.file._ @@ -35,6 +36,12 @@ class BootstrappedOnlyCompilationTests { ).checkCompile() } + @Test def posWithCompilerCC: Unit = + implicit val testGroup: TestGroup = TestGroup("compilePosWithCompilerCC") + aggregateTests( + compileDir("tests/pos-with-compiler-cc/dotc", withCompilerOptions.and("-language:experimental.captureChecking")) + ).checkCompile() + @Test def posWithCompiler: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePosWithCompiler") aggregateTests( @@ -214,6 +221,7 @@ object BootstrappedOnlyCompilationTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 8d7a16dad8a4..c1b465ad4a88 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -16,6 +16,7 @@ import scala.jdk.CollectionConverters._ import scala.util.matching.Regex import scala.concurrent.duration._ import TestSources.sources +import reporting.TestReporter import vulpix._ class CompilationTests { @@ -40,9 +41,11 @@ class CompilationTests { compileFilesInDir("tests/pos-special/isInstanceOf", allowDeepSubtypes.and("-Xfatal-warnings")), compileFilesInDir("tests/new", defaultOptions.and("-source", "3.2")), // just to see whether 3.2 works compileFilesInDir("tests/pos-scala2", scala2CompatMode), - compileFilesInDir("tests/pos-custom-args/captures", defaultOptions.and("-Ycc")), + compileFilesInDir("tests/pos-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), compileFilesInDir("tests/pos-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")), compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init")), + // Run tests for experimental lightweight lazy vals + compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylightweight-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), compileFilesInDir("tests/pos-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/pos-custom-args/no-experimental", defaultOptions.and("-Yno-experimental")), compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")), @@ -140,7 +143,7 @@ class CompilationTests { compileFilesInDir("tests/neg-custom-args/allow-double-bindings", allowDoubleBindings), compileFilesInDir("tests/neg-custom-args/allow-deep-subtypes", allowDeepSubtypes), compileFilesInDir("tests/neg-custom-args/no-experimental", defaultOptions.and("-Yno-experimental")), - compileFilesInDir("tests/neg-custom-args/captures", defaultOptions.and("-Ycc")), + compileFilesInDir("tests/neg-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), compileDir("tests/neg-custom-args/impl-conv", defaultOptions.and("-Xfatal-warnings", "-feature")), compileDir("tests/neg-custom-args/i13946", defaultOptions.and("-Xfatal-warnings", "-feature")), compileFile("tests/neg-custom-args/avoid-warn-deprecation.scala", defaultOptions.and("-Xfatal-warnings", "-feature")), @@ -185,7 +188,7 @@ class CompilationTests { compileFile("tests/neg-custom-args/deptypes.scala", defaultOptions.and("-language:experimental.dependent")), compileFile("tests/neg-custom-args/matchable.scala", defaultOptions.and("-Xfatal-warnings", "-source", "future")), compileFile("tests/neg-custom-args/i7314.scala", defaultOptions.and("-Xfatal-warnings", "-source", "future")), - compileFile("tests/neg-custom-args/capt-wf.scala", defaultOptions.and("-Ycc", "-Xfatal-warnings")), + compileFile("tests/neg-custom-args/capt-wf.scala", defaultOptions.and("-language:experimental.captureChecking", "-Xfatal-warnings")), compileFile("tests/neg-custom-args/feature-shadowing.scala", defaultOptions.and("-Xfatal-warnings", "-feature")), compileDir("tests/neg-custom-args/hidden-type-errors", defaultOptions.and("-explain")), compileFile("tests/neg-custom-args/i13026.scala", defaultOptions.and("-print-lines")), @@ -213,9 +216,11 @@ class CompilationTests { compileFilesInDir("tests/run-custom-args/fatal-warnings", defaultOptions.and("-Xfatal-warnings")), compileDir("tests/run-custom-args/Xmacro-settings/simple", defaultOptions.and("-Xmacro-settings:one,two,three")), compileDir("tests/run-custom-args/Xmacro-settings/compileTimeEnv", defaultOptions.and("-Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO")), - compileFilesInDir("tests/run-custom-args/captures", allowDeepSubtypes.and("-Ycc")), + compileFilesInDir("tests/run-custom-args/captures", allowDeepSubtypes.and("-language:experimental.captureChecking")), compileFilesInDir("tests/run-deep-subtype", allowDeepSubtypes), - compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init")) + compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init"), FileFilter.exclude("serialization-new.scala")), + // Run tests for experimental lightweight lazy vals and stable lazy vals. + compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init", "-Ylightweight-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.runLazyValsAllowlist)), ).checkRuns() } @@ -237,7 +242,8 @@ class CompilationTests { ).checkCompile() } - @Test def recheck: Unit = + //@Test disabled in favor of posWithCompilerCC to save time. + def recheck: Unit = given TestGroup = TestGroup("recheck") aggregateTests( compileFilesInDir("tests/new", recheckOptions), @@ -313,6 +319,7 @@ object CompilationTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/FromTastyTests.scala b/compiler/test/dotty/tools/dotc/FromTastyTests.scala index 2684a47b870c..1d46cbbce95c 100644 --- a/compiler/test/dotty/tools/dotc/FromTastyTests.scala +++ b/compiler/test/dotty/tools/dotc/FromTastyTests.scala @@ -5,6 +5,7 @@ package dotc import scala.language.unsafeNulls import org.junit.{AfterClass, Test} +import reporting.TestReporter import vulpix._ import java.io.{File => JFile} @@ -48,6 +49,7 @@ object FromTastyTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/IdempotencyTests.scala b/compiler/test/dotty/tools/dotc/IdempotencyTests.scala index 84b3f1f8a48f..b515ebb05f96 100644 --- a/compiler/test/dotty/tools/dotc/IdempotencyTests.scala +++ b/compiler/test/dotty/tools/dotc/IdempotencyTests.scala @@ -12,6 +12,7 @@ import org.junit.{AfterClass, Test} import org.junit.experimental.categories.Category import scala.concurrent.duration._ +import reporting.TestReporter import vulpix._ @@ -76,6 +77,7 @@ object IdempotencyTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/StringFormatterTest.scala b/compiler/test/dotty/tools/dotc/StringFormatterTest.scala index 7df64ad5bf3f..e745fa515443 100644 --- a/compiler/test/dotty/tools/dotc/StringFormatterTest.scala +++ b/compiler/test/dotty/tools/dotc/StringFormatterTest.scala @@ -22,6 +22,7 @@ class StringFormatterTest extends AbstractStringFormatterTest: @Test def flagsSeq = check(", final", i"${Seq(JavaStatic, Final)}%, %") @Test def flagsTup = check("(,final)", i"${(JavaStatic, Final)}") @Test def seqOfTup2 = check("(final,given), (private,lazy)", i"${Seq((Final, Given), (Private, Lazy))}%, %") + @Test def seqOfTup3 = check("(Foo,given, (right is approximated))", i"${Seq((Foo, Given, TypeComparer.ApproxState.None.addHigh))}%, %") class StorePrinter extends Printer: var string: String = "" @@ -76,13 +77,18 @@ class ExStringFormatterTest extends AbstractStringFormatterTest: |where: Foo is a type | Foo² is a type |""".stripMargin, ex"${(Foo, Foo)}") + @Test def seqOfTup3Amb = check("""[(Foo,Foo²,type Err)] + | + |where: Foo is a type + | Foo² is a type + |""".stripMargin, ex"${Seq((Foo, Foo, Err))}") end ExStringFormatterTest abstract class AbstractStringFormatterTest extends DottyTest: override def initializeCtx(fc: FreshContext) = super.initializeCtx(fc.setSetting(fc.settings.color, "never")) def Foo = newSymbol(defn.RootClass, typeName("Foo"), EmptyFlags, TypeBounds.empty).typeRef - def Err = newErrorSymbol(defn.RootClass, typeName("Err"), "") + def Err = newErrorSymbol(defn.RootClass, typeName("Err"), "".toMessage) def Big = (1 to 120).foldLeft(defn.StringType)((tp, i) => RefinedType(tp, typeName("A" * 69 + i), TypeAlias(defn.IntType))) def mkCstrd = diff --git a/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala b/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala index 9e71b10b206d..50e07f388dc4 100644 --- a/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala +++ b/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala @@ -17,6 +17,7 @@ import scala.util.matching.Regex import scala.concurrent.duration._ import TestSources.sources import vulpix._ +import reporting.TestReporter class TastyBootstrapTests { import ParallelTesting._ @@ -114,6 +115,7 @@ object TastyBootstrapTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/TupleShowTests.scala b/compiler/test/dotty/tools/dotc/TupleShowTests.scala new file mode 100644 index 000000000000..2d76c480b001 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/TupleShowTests.scala @@ -0,0 +1,96 @@ +package dotty.tools +package dotc + +import core.*, Decorators.*, Symbols.* +import printing.Texts.* + +import java.lang.System.{ lineSeparator => EOL } +import org.junit.Test + +class TupleShowTests extends DottyTest: + def IntType = defn.IntType + def LongType = defn.LongType + def ShortType = defn.ShortType + def Types_10 = List.fill(5)(IntType) ::: List.fill(5)(LongType) + def Types_20 = Types_10 ::: Types_10 + + val tup0 = defn.tupleType(Nil) + val tup1 = defn.tupleType(IntType :: Nil) + val tup2 = defn.tupleType(IntType :: LongType :: Nil) + val tup3 = defn.tupleType(IntType :: LongType :: ShortType :: Nil) + val tup21 = defn.tupleType(Types_20 ::: IntType :: Nil) + val tup22 = defn.tupleType(Types_20 ::: IntType :: LongType :: Nil) + val tup23 = defn.tupleType(Types_20 ::: IntType :: LongType :: ShortType :: Nil) + val tup24 = defn.tupleType(Types_20 ::: IntType :: LongType :: ShortType :: ShortType :: Nil) + + @Test def tup0_show = chkEq("EmptyTuple.type", i"$tup0") + @Test def tup1_show = chkEq("Tuple1[Int]", i"$tup1") + @Test def tup2_show = chkEq("(Int, Long)", i"$tup2") + @Test def tup3_show = chkEq("(Int, Long, Short)", i"$tup3") + @Test def tup21_show = chkEq(res21, i"$tup21") + @Test def tup22_show = chkEq(res22, i"$tup22") + @Test def tup23_show = chkEq(res23, i"$tup23") + @Test def tup24_show = chkEq(res24, i"$tup24") + + @Test def tup3_text = + val obt = tup3.toText(ctx.printer) + val exp = Fluid(List( + Str(")"), + Str("Short"), + Closed(List(Str(", "), Str("Long"))), + Closed(List(Str(", "), Str("Int"))), + Str("("), + )) + chkEq(exp, obt) + + @Test def tup3_layout10 = + val obt = tup3.toText(ctx.printer).layout(10) + val exp = Fluid(List( + Str(" Short)"), + Str(" Long, "), + Str("(Int, "), + )) + chkEq(exp, obt) + + @Test def tup3_show10 = chkEq("(Int,\n Long,\n Short)".normEOL, tup3.toText(ctx.printer).mkString(10, false)) + + val res21 = """|(Int, Int, Int, Int, Int, Long, Long, Long, Long, Long, Int, Int, Int, Int, + | Int, Long, Long, Long, Long, Long, Int)""".stripMargin.normEOL + + val res22 = """|(Int, Int, Int, Int, Int, Long, Long, Long, Long, Long, Int, Int, Int, Int, + | Int, Long, Long, Long, Long, Long, Int, Long)""".stripMargin.normEOL + + val res23 = """|(Int, Int, Int, Int, Int, Long, Long, Long, Long, Long, Int, Int, Int, Int, + | Int, Long, Long, Long, Long, Long, Int, Long, Short)""".stripMargin.normEOL + + val res24 = """|(Int, Int, Int, Int, Int, Long, Long, Long, Long, Long, Int, Int, Int, Int, + | Int, Long, Long, Long, Long, Long, Int, Long, Short, Short)""".stripMargin.normEOL + + def chkEq[A](expected: A, obtained: A) = assert(expected == obtained, diff(s"$expected", s"$obtained")) + + /** On Windows the string literal in this test source file will be read with `\n` (b/c of "-encoding UTF8") + * but the compiler will correctly emit \r\n as the line separator. + * So we align the expected result to faithfully compare test results. */ + extension (str: String) def normEOL = if EOL == "\n" then str else str.replace("\n", EOL).nn + + def diff(exp: String, obt: String) = + val min = math.min(exp.length, obt.length) + val pre = + var i = 0 + while i < min && exp(i) == obt(i) do i += 1 + exp.take(i) + val suf = + val max = min - pre.length - 1 + var i = 0 + while i <= max && exp(exp.length - 1 - i) == obt(obt.length - 1 - i) do i += 1 + exp.drop(exp.length - 1) + + import scala.io.AnsiColor.* + val ellip = BLACK + BOLD + "..." + RESET + val compactPre = if pre.length <= 20 then pre else ellip + pre.drop(pre.length - 20) + val compactSuf = if suf.length <= 20 then suf else suf.take(20) + ellip + def extractDiff(s: String) = s.slice(pre.length, s.length - suf.length) + s"""|Comparison Failure: + | expected: $compactPre${CYAN }${extractDiff(exp)}$RESET$compactSuf + | obtained: $compactPre$MAGENTA${extractDiff(obt)}$RESET$compactSuf + |""".stripMargin diff --git a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala index 05f218059f02..44cf83b521f4 100644 --- a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala @@ -6,6 +6,7 @@ import Settings._ import org.junit.Test import org.junit.Assert._ +import core.Decorators.toMessage class ScalaSettingsTests: @@ -72,14 +73,14 @@ class ScalaSettingsTests: val proc = sets.processArguments(sumy, processAll = true, skipped = Nil) val conf = sets.Wconf.valueIn(proc.sstate) val sut = reporting.WConf.fromSettings(conf).getOrElse(???) - val msg = NoExplanation("There was a problem!") + val msg = "There was a problem!".toMessage val depr = new Diagnostic.DeprecationWarning(msg, util.NoSourcePosition) assertEquals(Action.Silent, sut.action(depr)) val feat = new Diagnostic.FeatureWarning(msg, util.NoSourcePosition) assertEquals(Action.Error, sut.action(feat)) val warn = new Diagnostic.Warning(msg, util.NoSourcePosition) assertEquals(Action.Warning, sut.action(warn)) - val nowr = new Diagnostic.Warning(NoExplanation("This is a problem."), util.NoSourcePosition) + val nowr = new Diagnostic.Warning("This is a problem.".toMessage, util.NoSourcePosition) assertEquals(Action.Silent, sut.action(nowr)) end ScalaSettingsTests diff --git a/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala b/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala index 1a9248c49a82..77e172f61167 100644 --- a/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala +++ b/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala @@ -4,13 +4,13 @@ import org.junit.Test import org.junit.AfterClass import org.junit.Assert.* import org.junit.experimental.categories.Category - import dotty.{BootstrappedOnlyTests, Properties} import dotty.tools.vulpix.* import dotty.tools.vulpix.TestConfiguration.* import dotty.tools.dotc.Main +import dotty.tools.dotc.reporting.TestReporter -import java.nio.file.{Files, FileSystems, Path, Paths, StandardCopyOption} +import java.nio.file.{FileSystems, Files, Path, Paths, StandardCopyOption} import scala.jdk.CollectionConverters.* import scala.util.Properties.userDir import scala.language.unsafeNulls @@ -33,11 +33,12 @@ class CoverageTests: checkCoverageIn(rootSrc.resolve("run"), true) def checkCoverageIn(dir: Path, run: Boolean)(using TestGroup): Unit = - /** Converts \ to / on windows, to make the tests pass without changing the serialization. */ + /** Converts \\ (escaped \) to / on windows, to make the tests pass without changing the serialization. */ def fixWindowsPaths(lines: Buffer[String]): Buffer[String] = val separator = java.io.File.separatorChar - if separator != '/' then - lines.map(_.replace(separator, '/')) + if separator == '\\' then + val escapedSep = "\\\\" + lines.map(_.replace(escapedSep, "/")) else lines end fixWindowsPaths @@ -84,6 +85,7 @@ object CoverageTests extends ParallelTesting: def testFilter = Properties.testsFilter def isInteractive = SummaryReport.isInteractive def updateCheckFiles = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests given summaryReport: SummaryReporting = SummaryReport() @AfterClass def tearDown(): Unit = diff --git a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala index 710ceee0a7c0..2c970e93f573 100644 --- a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala +++ b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala @@ -19,20 +19,26 @@ import dotty.tools.io.Directory import scala.io.Source import org.junit.Test +import scala.util.Using +import java.io.File class PrintingTest { - def options(phase: String) = - List(s"-Xprint:$phase", "-color:never", "-classpath", TestConfiguration.basicClasspath) + def options(phase: String, flags: List[String]) = + List(s"-Xprint:$phase", "-color:never", "-classpath", TestConfiguration.basicClasspath) ::: flags private def compileFile(path: JPath, phase: String): Boolean = { val baseFilePath = path.toString.stripSuffix(".scala") val checkFilePath = baseFilePath + ".check" + val flagsFilePath = baseFilePath + ".flags" val byteStream = new ByteArrayOutputStream() val reporter = TestReporter.reporter(new PrintStream(byteStream), INFO) + val flags = + if (!(new File(flagsFilePath)).exists) Nil + else Using(Source.fromFile(flagsFilePath, StandardCharsets.UTF_8.name))(_.getLines().toList).get try { - Main.process((path.toString::options(phase)).toArray, reporter, null) + Main.process((path.toString :: options(phase, flags)).toArray, reporter, null) } catch { case e: Throwable => println(s"Compile $path exception:") @@ -40,7 +46,7 @@ class PrintingTest { } val actualLines = byteStream.toString(StandardCharsets.UTF_8.name).linesIterator - FileDiff.checkAndDump(path.toString, actualLines.toIndexedSeq, checkFilePath) + FileDiff.checkAndDumpOrUpdate(path.toString, actualLines.toIndexedSeq, checkFilePath) } def testIn(testsDir: String, phase: String) = @@ -63,4 +69,7 @@ class PrintingTest { @Test def untypedPrinting: Unit = testIn("tests/printing/untyped", "parser") + + @Test + def transformedPrinting: Unit = testIn("tests/printing/transformed", "repeatableAnnotations") } diff --git a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala index c6ec06d0bb4e..940fc875a021 100644 --- a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala +++ b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala @@ -3,18 +3,20 @@ package dotc package reporting import scala.language.unsafeNulls - -import java.io.{ PrintStream, PrintWriter, File => JFile, FileOutputStream, StringWriter } +import java.io.{BufferedReader, FileInputStream, FileOutputStream, FileReader, PrintStream, PrintWriter, StringReader, StringWriter, File as JFile} import java.text.SimpleDateFormat import java.util.Date -import core.Decorators._ +import core.Decorators.* import scala.collection.mutable - +import scala.jdk.CollectionConverters.* import util.SourcePosition -import core.Contexts._ -import Diagnostic._ -import interfaces.Diagnostic.{ ERROR, WARNING } +import core.Contexts.* +import Diagnostic.* +import dotty.Properties +import interfaces.Diagnostic.{ERROR, WARNING} + +import scala.io.Codec class TestReporter protected (outWriter: PrintWriter, filePrintln: String => Unit, logLevel: Int) extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with MessageRendering { @@ -32,6 +34,10 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M private var _didCrash = false final def compilerCrashed: Boolean = _didCrash + private var _skip: Boolean = false + final def setSkip(): Unit = _skip = true + final def skipped: Boolean = _skip + protected final def inlineInfo(pos: SourcePosition)(using Context): String = if (pos.exists) { if (pos.outer.exists) @@ -80,17 +86,23 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M } object TestReporter { + private val testLogsDirName: String = "testlogs" + private val failedTestsFileName: String = "last-failed.log" + private val failedTestsFile: JFile = new JFile(s"$testLogsDirName/$failedTestsFileName") + private var outFile: JFile = _ private var logWriter: PrintWriter = _ + private var failedTestsWriter: PrintWriter = _ private def initLog() = if (logWriter eq null) { val date = new Date val df0 = new SimpleDateFormat("yyyy-MM-dd") val df1 = new SimpleDateFormat("yyyy-MM-dd-'T'HH-mm-ss") - val folder = s"testlogs/tests-${df0.format(date)}" + val folder = s"$testLogsDirName/tests-${df0.format(date)}" new JFile(folder).mkdirs() outFile = new JFile(s"$folder/tests-${df1.format(date)}.log") logWriter = new PrintWriter(new FileOutputStream(outFile, true)) + failedTestsWriter = new PrintWriter(new FileOutputStream(failedTestsFile, false)) } def logPrintln(str: String) = { @@ -140,4 +152,16 @@ object TestReporter { } rep } + + def lastRunFailedTests: Option[List[String]] = + Option.when( + Properties.rerunFailed && + failedTestsFile.exists() && + failedTestsFile.isFile + )(java.nio.file.Files.readAllLines(failedTestsFile.toPath).asScala.toList) + + def writeFailedTests(tests: List[String]): Unit = + initLog() + tests.foreach(failed => failedTestsWriter.println(failed)) + failedTestsWriter.flush() } diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala index eb6ab8e8fb5f..1e7d7ef2c708 100644 --- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala +++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala @@ -20,7 +20,7 @@ class PatmatExhaustivityTest { val testsDir = "tests/patmat" // pagewidth/color: for a stable diff as the defaults are based on the terminal (e.g size) // stop-after: patmatexhaust-huge.scala crash compiler (but also hides other warnings..) - val options = List("-pagewidth", "80", "-color:never", "-Ystop-after:explicitSelf", "-classpath", TestConfiguration.basicClasspath) + val options = List("-pagewidth", "80", "-color:never", "-Ystop-after:explicitSelf", "-Ycheck-constraint-deps", "-classpath", TestConfiguration.basicClasspath) private def compile(files: List[JPath]): Seq[String] = { val opts = toolArgsFor(files).get(ToolName.Scalac).getOrElse(Nil) diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index 866647476888..bcb08cd232d7 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -347,27 +347,6 @@ class ReplCompilerTests extends ReplTest: assertEquals("java.lang.AssertionError: assertion failed", all.head) } - @Test def i14491 = - initially { - run("import language.experimental.fewerBraces") - } andThen { - run("""|val x = Seq(7,8,9).apply: - | 1 - |""".stripMargin) - assertEquals("val x: Int = 8", storedOutput().trim) - } - initially { - run("""|import language.experimental.fewerBraces - |import language.experimental.fewerBraces as _ - |""".stripMargin) - } andThen { - run("""|val x = Seq(7,8,9).apply: - | 1 - |""".stripMargin) - assert("expected error if fewerBraces is unimported", - lines().exists(_.contains("missing arguments for method apply"))) - } - object ReplCompilerTests: private val pattern = Pattern.compile("\\r[\\n]?|\\n"); diff --git a/compiler/test/dotty/tools/repl/ShadowingTests.scala b/compiler/test/dotty/tools/repl/ShadowingTests.scala index 1ba58a4babff..457819966346 100644 --- a/compiler/test/dotty/tools/repl/ShadowingTests.scala +++ b/compiler/test/dotty/tools/repl/ShadowingTests.scala @@ -80,7 +80,7 @@ class ShadowingTests extends ReplTest(options = ShadowingTests.options): shadowed = "class C(val c: Int)", script = """|scala> new C().c - |-- Error: ---------------------------------------------------------------------- + |-- [E171] Type Error: ---------------------------------------------------------- |1 | new C().c | | ^^^^^^^ | | missing argument for parameter c of constructor C in class C: (c: Int): C @@ -122,7 +122,7 @@ class ShadowingTests extends ReplTest(options = ShadowingTests.options): |val y: String = foo | |scala> if (true) x else y - |val res0: Matchable = 42 + |val res0: Int | String = 42 |""".stripMargin.linesIterator.toList ) diff --git a/compiler/test/dotty/tools/vulpix/FailedTestInfo.scala b/compiler/test/dotty/tools/vulpix/FailedTestInfo.scala new file mode 100644 index 000000000000..c7172f54aadc --- /dev/null +++ b/compiler/test/dotty/tools/vulpix/FailedTestInfo.scala @@ -0,0 +1,3 @@ +package dotty.tools.vulpix + +case class FailedTestInfo(title: String, extra: String) diff --git a/compiler/test/dotty/tools/vulpix/FileDiff.scala b/compiler/test/dotty/tools/vulpix/FileDiff.scala index c060c4d3938c..5e882be6425a 100644 --- a/compiler/test/dotty/tools/vulpix/FileDiff.scala +++ b/compiler/test/dotty/tools/vulpix/FileDiff.scala @@ -50,21 +50,6 @@ object FileDiff { outFile.writeAll(content.mkString("", EOL, EOL)) } - def checkAndDump(sourceTitle: String, actualLines: Seq[String], checkFilePath: String): Boolean = { - val outFilePath = checkFilePath + ".out" - FileDiff.check(sourceTitle, actualLines, checkFilePath) match { - case Some(msg) => - FileDiff.dump(outFilePath, actualLines) - println(msg) - println(FileDiff.diffMessage(checkFilePath, outFilePath)) - false - case _ => - val jOutFilePath = Paths.get(outFilePath) - Files.deleteIfExists(jOutFilePath) - true - } - } - def checkAndDumpOrUpdate(sourceTitle: String, actualLines: Seq[String], checkFilePath: String): Boolean = { val outFilePath = checkFilePath + ".out" FileDiff.check(sourceTitle, actualLines, checkFilePath) match { diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index fb60d98ea5cf..b64142c0021f 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -57,6 +57,9 @@ trait ParallelTesting extends RunnerOrchestration { self => /** Tests should override the checkfiles with the current output */ def updateCheckFiles: Boolean + /** Contains a list of failed tests to run, if list is empty no tests will run */ + def failedTests: Option[List[String]] + /** A test source whose files or directory of files is to be compiled * in a specific way defined by the `Test` */ @@ -204,6 +207,14 @@ trait ParallelTesting extends RunnerOrchestration { self => protected def shouldSkipTestSource(testSource: TestSource): Boolean = false + protected def shouldReRun(testSource: TestSource): Boolean = + failedTests.forall(rerun => testSource match { + case JointCompilationSource(_, files, _, _, _, _) => + rerun.exists(filter => files.exists(file => file.getPath.contains(filter))) + case SeparateCompilationSource(_, dir, _, _) => + rerun.exists(dir.getPath.contains) + }) + private trait CompilationLogic { this: Test => def suppressErrors = false @@ -281,10 +292,12 @@ trait ParallelTesting extends RunnerOrchestration { self => private final def onComplete(testSource: TestSource, reportersOrCrash: Try[Seq[TestReporter]], logger: LoggedRunnable): Unit = reportersOrCrash match { case TryFailure(exn) => onFailure(testSource, Nil, logger, Some(s"Fatal compiler crash when compiling: ${testSource.title}:\n${exn.getMessage}${exn.getStackTrace.map("\n\tat " + _).mkString}")) - case TrySuccess(reporters) => maybeFailureMessage(testSource, reporters) match { - case Some(msg) => onFailure(testSource, reporters, logger, Option(msg).filter(_.nonEmpty)) - case None => onSuccess(testSource, reporters, logger) - } + case TrySuccess(reporters) if !reporters.exists(_.skipped) => + maybeFailureMessage(testSource, reporters) match { + case Some(msg) => onFailure(testSource, reporters, logger, Option(msg).filter(_.nonEmpty)) + case None => onSuccess(testSource, reporters, logger) + } + case _ => } /** @@ -357,7 +370,7 @@ trait ParallelTesting extends RunnerOrchestration { self => case SeparateCompilationSource(_, dir, _, _) => testFilter.exists(dir.getPath.contains) } - filteredByName.filterNot(shouldSkipTestSource(_)) + filteredByName.filterNot(shouldSkipTestSource(_)).filter(shouldReRun(_)) /** Total amount of test sources being compiled by this test */ val sourceCount = filteredSources.length @@ -391,6 +404,10 @@ trait ParallelTesting extends RunnerOrchestration { self => /** Number of failed tests */ def failureCount: Int = _failureCount + private var _skipCount = 0 + protected final def registerSkip(): Unit = synchronized { _skipCount += 1 } + def skipCount: Int = _skipCount + protected def logBuildInstructions(testSource: TestSource, reporters: Seq[TestReporter]) = { val (errCount, warnCount) = countErrorsAndWarnings(reporters) val errorMsg = testSource.buildInstructions(errCount, warnCount) @@ -403,14 +420,14 @@ trait ParallelTesting extends RunnerOrchestration { self => synchronized { reproduceInstructions.append(ins) } /** The test sources that failed according to the implementing subclass */ - private val failedTestSources = mutable.ArrayBuffer.empty[String] + private val failedTestSources = mutable.ArrayBuffer.empty[FailedTestInfo] protected final def failTestSource(testSource: TestSource, reason: Failure = Generic) = synchronized { val extra = reason match { case TimeoutFailure(title) => s", test '$title' timed out" case JavaCompilationFailure(msg) => s", java test sources failed to compile with: \n$msg" case Generic => "" } - failedTestSources.append(testSource.title + s" failed" + extra) + failedTestSources.append(FailedTestInfo(testSource.title, s" failed" + extra)) fail(reason) } @@ -464,13 +481,13 @@ trait ParallelTesting extends RunnerOrchestration { self => val toolArgs = toolArgsFor(files.toList.map(_.toPath), getCharsetFromEncodingOpt(flags0)) val spec = raw"(\d+)(\+)?".r - val testFilter = toolArgs.get(ToolName.Test) match + val testIsFiltered = toolArgs.get(ToolName.Test) match case Some("-jvm" :: spec(n, more) :: Nil) => if more == "+" then isJavaAtLeast(n) else javaSpecVersion == n case Some(args) => throw new IllegalStateException(args.mkString("unknown test option: ", ", ", "")) case None => true - def scalacOptions = toolArgs.get(ToolName.Scalac).getOrElse(Nil) + def scalacOptions = toolArgs.getOrElse(ToolName.Scalac, Nil) val flags = flags0 .and(scalacOptions: _*) @@ -509,7 +526,7 @@ trait ParallelTesting extends RunnerOrchestration { self => val allArgs = flags.all - if testFilter then + if testIsFiltered then // If a test contains a Java file that cannot be parsed by Dotty's Java source parser, its // name must contain the string "JAVA_ONLY". val dottyFiles = files.filterNot(_.getName.contains("JAVA_ONLY")).map(_.getPath) @@ -523,6 +540,9 @@ trait ParallelTesting extends RunnerOrchestration { self => echo(s"\njava compilation failed: \n${ javaErrors.get }") fail(failure = JavaCompilationFailure(javaErrors.get)) } + else + registerSkip() + reporter.setSkip() end if reporter @@ -724,7 +744,7 @@ trait ParallelTesting extends RunnerOrchestration { self => } private def verifyOutput(checkFile: Option[JFile], dir: JFile, testSource: TestSource, warnings: Int, reporters: Seq[TestReporter], logger: LoggedRunnable) = { - if (Properties.testsNoRun) addNoRunWarning() + if Properties.testsNoRun then addNoRunWarning() else runMain(testSource.runClassPath, testSource.allToolArgs) match { case Success(output) => checkFile match { case Some(file) if file.exists => diffTest(testSource, file, output.linesIterator.toList, reporters, logger) @@ -748,7 +768,7 @@ trait ParallelTesting extends RunnerOrchestration { self => extends Test(testSources, times, threadLimit, suppressAllOutput) { override def suppressErrors = true - override def maybeFailureMessage(testSource: TestSource, reporters: Seq[TestReporter]): Option[String] = { + override def maybeFailureMessage(testSource: TestSource, reporters: Seq[TestReporter]): Option[String] = def compilerCrashed = reporters.exists(_.compilerCrashed) lazy val (errorMap, expectedErrors) = getErrorMapAndExpectedCount(testSource.sourceFiles.toIndexedSeq) lazy val actualErrors = reporters.foldLeft(0)(_ + _.errorCount) @@ -772,7 +792,7 @@ trait ParallelTesting extends RunnerOrchestration { self => else if !errorMap.isEmpty then s"\nExpected error(s) have {=}: $errorMap" else null } - } + end maybeFailureMessage override def onSuccess(testSource: TestSource, reporters: Seq[TestReporter], logger: LoggedRunnable): Unit = checkFile(testSource).foreach(diffTest(testSource, _, reporterOutputLines(reporters), reporters, logger)) @@ -780,12 +800,13 @@ trait ParallelTesting extends RunnerOrchestration { self => def reporterOutputLines(reporters: Seq[TestReporter]): List[String] = reporters.flatMap(_.consoleOutput.split("\n")).toList - // In neg-tests we allow two types of error annotations, - // "nopos-error" which doesn't care about position and "error" which - // has to be annotated on the correct line number. + // In neg-tests we allow two or three types of error annotations. + // Normally, `// error` must be annotated on the correct line number. + // `// nopos-error` allows for an error reported without a position. + // `// anypos-error` allows for an error reported with a position that can't be annotated in the check file. // // We collect these in a map `"file:row" -> numberOfErrors`, for - // nopos errors we save them in `"file" -> numberOfNoPosErrors` + // nopos and anypos errors we save them in `"file" -> numberOfNoPosErrors` def getErrorMapAndExpectedCount(files: Seq[JFile]): (HashMap[String, Integer], Int) = val comment = raw"//( *)(nopos-|anypos-)?error".r val errorMap = new HashMap[String, Integer]() @@ -950,8 +971,7 @@ trait ParallelTesting extends RunnerOrchestration { self => * =============== * Since this is a parallel test suite, it is essential to be able to * compose tests to take advantage of the concurrency. This is done using - * the `+` function. This function will make sure that tests being combined - * are compatible according to the `require`s in `+`. + * `aggregateTests` in the companion, which will ensure that aggregation is allowed. */ final class CompilationTest private ( private[ParallelTesting] val targets: List[TestSource], @@ -969,6 +989,14 @@ trait ParallelTesting extends RunnerOrchestration { self => def this(targets: List[TestSource]) = this(targets, 1, true, None, false, false) + def copy(targets: List[TestSource], + times: Int = times, + shouldDelete: Boolean = shouldDelete, + threadLimit: Option[Int] = threadLimit, + shouldFail: Boolean = shouldFail, + shouldSuppressOutput: Boolean = shouldSuppressOutput): CompilationTest = + CompilationTest(targets, times, shouldDelete, threadLimit, shouldFail, shouldSuppressOutput) + /** Creates a "pos" test run, which makes sure that all tests pass * compilation without generating errors and that they do not crash the * compiler @@ -981,7 +1009,7 @@ trait ParallelTesting extends RunnerOrchestration { self => if (!shouldFail && test.didFail) { fail(s"Expected no errors when compiling, failed for the following reason(s):\n${reasonsForFailure(test)}\n") } - else if (shouldFail && !test.didFail) { + else if (shouldFail && !test.didFail && test.skipCount == 0) { fail("Pos test should have failed, but didn't") } @@ -989,23 +1017,21 @@ trait ParallelTesting extends RunnerOrchestration { self => } /** Creates a "neg" test run, which makes sure that each test generates the - * correct amount of errors at the correct positions. It also makes sure - * that none of these tests crash the compiler + * correct number of errors at the correct positions. It also makes sure + * that none of these tests crashes the compiler. */ - def checkExpectedErrors()(implicit summaryReport: SummaryReporting): this.type = { + def checkExpectedErrors()(implicit summaryReport: SummaryReporting): this.type = val test = new NegTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput).executeTestSuite() cleanup() - if (shouldFail && !test.didFail) { + if shouldFail && !test.didFail && test.skipCount == 0 then fail(s"Neg test shouldn't have failed, but did. Reasons:\n${ reasonsForFailure(test) }") - } - else if (!shouldFail && test.didFail) { + else if !shouldFail && test.didFail then fail("Neg test should have failed, but did not") - } this - } + end checkExpectedErrors /** Creates a "fuzzy" test run, which makes sure that each test compiles (or not) without crashing */ def checkNoCrash()(implicit summaryReport: SummaryReporting): this.type = { @@ -1030,12 +1056,10 @@ trait ParallelTesting extends RunnerOrchestration { self => cleanup() - if (!shouldFail && test.didFail) { + if !shouldFail && test.didFail then fail(s"Run test failed, but should not, reasons:\n${ reasonsForFailure(test) }") - } - else if (shouldFail && !test.didFail) { + else if shouldFail && !test.didFail && test.skipCount == 0 then fail("Run test should have failed, but did not") - } this } @@ -1160,35 +1184,32 @@ trait ParallelTesting extends RunnerOrchestration { self => } } - object CompilationTest { + object CompilationTest: /** Compose test targets from `tests` - * - * It does this, only if the two tests are compatible. Otherwise it throws - * an `IllegalArgumentException`. - * - * Grouping tests together like this allows us to take advantage of the - * concurrency offered by this test suite as each call to an executing - * method (`pos()` / `checkExpectedErrors()`/ `run()`) will spin up a thread pool with the - * maximum allowed level of concurrency. Doing this for only a few targets - * does not yield any real benefit over sequential compilation. - * - * As such, each `CompilationTest` should contain as many targets as - * possible. - */ - def aggregateTests(tests: CompilationTest*): CompilationTest = { + * + * It does this, only if all the tests are mutally compatible. + * Otherwise it throws an `IllegalArgumentException`. + * + * Grouping tests together like this allows us to take advantage of the + * concurrency offered by this test suite, as each call to an executing + * method (`pos()` / `checkExpectedErrors()`/ `run()`) will spin up a thread pool with the + * maximum allowed level of concurrency. Doing this for only a few targets + * does not yield any real benefit over sequential compilation. + * + * As such, each `CompilationTest` should contain as many targets as + * possible. + */ + def aggregateTests(tests: CompilationTest*): CompilationTest = assert(tests.nonEmpty) - def aggregate(test1: CompilationTest, test2: CompilationTest) = { + def aggregate(test1: CompilationTest, test2: CompilationTest) = require(test1.times == test2.times, "can't combine tests that are meant to be benchmark compiled") require(test1.shouldDelete == test2.shouldDelete, "can't combine tests that differ on deleting output") require(test1.shouldFail == test2.shouldFail, "can't combine tests that have different expectations on outcome") require(test1.shouldSuppressOutput == test2.shouldSuppressOutput, "can't combine tests that both suppress and don't suppress output") - new CompilationTest(test1.targets ++ test2.targets, test1.times, test1.shouldDelete, test1.threadLimit, test1.shouldFail, test1.shouldSuppressOutput) - } + test1.copy(test1.targets ++ test2.targets) // what if thread limit differs? currently threads are limited on aggregate only tests.reduce(aggregate) - } - - } + end CompilationTest /** Create out directory for directory `d` */ def createOutputDirsForDir(d: JFile, sourceDir: JFile, outDir: String): JFile = { diff --git a/compiler/test/dotty/tools/vulpix/SummaryReport.scala b/compiler/test/dotty/tools/vulpix/SummaryReport.scala index e216ac1c5d4f..74612387015f 100644 --- a/compiler/test/dotty/tools/vulpix/SummaryReport.scala +++ b/compiler/test/dotty/tools/vulpix/SummaryReport.scala @@ -3,7 +3,6 @@ package tools package vulpix import scala.language.unsafeNulls - import scala.collection.mutable import dotc.reporting.TestReporter @@ -23,7 +22,7 @@ trait SummaryReporting { def reportPassed(): Unit /** Add the name of the failed test */ - def addFailedTest(msg: String): Unit + def addFailedTest(msg: FailedTestInfo): Unit /** Add instructions to reproduce the error */ def addReproduceInstruction(instr: String): Unit @@ -49,7 +48,7 @@ trait SummaryReporting { final class NoSummaryReport extends SummaryReporting { def reportFailed(): Unit = () def reportPassed(): Unit = () - def addFailedTest(msg: String): Unit = () + def addFailedTest(msg: FailedTestInfo): Unit = () def addReproduceInstruction(instr: String): Unit = () def addStartingMessage(msg: String): Unit = () def addCleanup(f: () => Unit): Unit = () @@ -66,7 +65,7 @@ final class SummaryReport extends SummaryReporting { import scala.jdk.CollectionConverters._ private val startingMessages = new java.util.concurrent.ConcurrentLinkedDeque[String] - private val failedTests = new java.util.concurrent.ConcurrentLinkedDeque[String] + private val failedTests = new java.util.concurrent.ConcurrentLinkedDeque[FailedTestInfo] private val reproduceInstructions = new java.util.concurrent.ConcurrentLinkedDeque[String] private val cleanUps = new java.util.concurrent.ConcurrentLinkedDeque[() => Unit] @@ -79,7 +78,7 @@ final class SummaryReport extends SummaryReporting { def reportPassed(): Unit = passed += 1 - def addFailedTest(msg: String): Unit = + def addFailedTest(msg: FailedTestInfo): Unit = failedTests.add(msg) def addReproduceInstruction(instr: String): Unit = @@ -108,7 +107,8 @@ final class SummaryReport extends SummaryReporting { startingMessages.asScala.foreach(rep.append) - failedTests.asScala.map(x => s" $x\n").foreach(rep.append) + failedTests.asScala.map(x => s" ${x.title}${x.extra}\n").foreach(rep.append) + TestReporter.writeFailedTests(failedTests.asScala.toList.map(_.title)) // If we're compiling locally, we don't need instructions on how to // reproduce failures diff --git a/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala b/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala index 75af0aa94893..0044ab8a94e5 100644 --- a/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala +++ b/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala @@ -30,6 +30,7 @@ object VulpixMetaTests extends ParallelTesting { def isInteractive = false // Don't beautify output for interactive use. def testFilter = Nil // Run all the tests. def updateCheckFiles: Boolean = false + def failedTests = None @AfterClass def tearDown() = this.cleanup() diff --git a/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala b/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala index 8a32fd636e76..baf61c845d96 100644 --- a/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala +++ b/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala @@ -108,6 +108,7 @@ object VulpixUnitTests extends ParallelTesting { def isInteractive = !sys.env.contains("DRONE") def testFilter = Nil def updateCheckFiles: Boolean = false + def failedTests = None @AfterClass def tearDown() = this.cleanup() diff --git a/docs/_assets/css/frontpage.css b/docs/_assets/css/frontpage.css index a3a5c0d7dd8a..d0894fbb5052 100644 --- a/docs/_assets/css/frontpage.css +++ b/docs/_assets/css/frontpage.css @@ -28,6 +28,7 @@ h1#main { /* navigation */ header { font-size: 24px; + margin-block-end: calc(2* var(--base-spacing)); } header .nav-item i { diff --git a/docs/_assets/docsScalaLangResources/scaladoc-assets.html b/docs/_assets/docsScalaLangResources/scaladoc-assets.html deleted file mode 100644 index 504a93b25fa8..000000000000 --- a/docs/_assets/docsScalaLangResources/scaladoc-assets.html +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/docs/_docs/contributing/procedures/index.md b/docs/_docs/contributing/procedures/index.md new file mode 100644 index 000000000000..01c76f72c00c --- /dev/null +++ b/docs/_docs/contributing/procedures/index.md @@ -0,0 +1,4 @@ +--- +layout: index +title: Procedures +--- diff --git a/docs/_docs/contributing/tools/index.md b/docs/_docs/contributing/tools/index.md new file mode 100644 index 000000000000..92503ee82013 --- /dev/null +++ b/docs/_docs/contributing/tools/index.md @@ -0,0 +1,4 @@ +--- +layout: index +title: IDEs and Tools +--- diff --git a/docs/_docs/internals/backend.md b/docs/_docs/internals/backend.md index 1861ed6c1508..e3215c3993ae 100644 --- a/docs/_docs/internals/backend.md +++ b/docs/_docs/internals/backend.md @@ -30,7 +30,7 @@ BCodeIdiomatic ----------------> utilities for code generation, e.g. The `BTypes.scala` class contains the `BType` class and predefined BTypes -### Data Flow ### +## Data Flow ## Compiler creates a `GenBCode` `Phase`, calls `runOn(compilationUnits)`, which calls `run(context)`. This: @@ -51,12 +51,12 @@ which calls `run(context)`. This: - `GenBCodePipeline.drainQ3` writes byte arrays to disk -### Architecture ### +## Architecture ## The architecture of `GenBCode` is the same as in Scalac. It can be partitioned into weakly coupled components (called "subsystems" below): -#### (a) The queue subsystem #### +### (a) The queue subsystem ### Queues mediate between processors, queues don't know what each processor does. The first queue contains AST trees for compilation units, the second queue @@ -70,7 +70,7 @@ serialization to disk. This subsystem is described in detail in `GenBCode.scala` -#### (b) Bytecode-level types, BType #### +### (b) Bytecode-level types, BType ### The previous bytecode emitter goes to great lengths to reason about bytecode-level types in terms of Symbols. @@ -89,7 +89,7 @@ spec (that's why they aren't documented in `GenBCode`, just read the [JVM 8 spec All things `BType` can be found in `BCodeGlue.scala` -#### (c) Utilities offering a more "high-level" API to bytecode emission #### +### (c) Utilities offering a more "high-level" API to bytecode emission ### Bytecode can be emitted one opcode at a time, but there are recurring patterns that call for a simpler API. @@ -100,7 +100,7 @@ of two strategies. All these utilities are encapsulated in file `BCodeIdiomatic.scala`. They know nothing about the type checker (because, just between us, they don't need to). -#### (d) Mapping between type-checker types and BTypes #### +### (d) Mapping between type-checker types and BTypes ### So that (c) can remain oblivious to what AST trees contain, some bookkeepers are needed: @@ -115,7 +115,7 @@ final def exemplar(csym0: Symbol): Tracked = { ... } Details in `BTypes.scala` -#### (e) More "high-level" utilities for bytecode emission #### +### (e) More "high-level" utilities for bytecode emission ### In the spirit of `BCodeIdiomatic`, utilities are added in `BCodeHelpers` for emitting: @@ -125,5 +125,5 @@ emitting: - annotations -#### (f) Building an ASM ClassNode given an AST TypeDef #### +### (f) Building an ASM ClassNode given an AST TypeDef ### It's done by `PlainClassBuilder`(see `GenBCode.scala`). diff --git a/docs/_docs/internals/contexts.md b/docs/_docs/internals/contexts.md index 3674f03e2e51..4129787ad3f6 100644 --- a/docs/_docs/internals/contexts.md +++ b/docs/_docs/internals/contexts.md @@ -16,7 +16,7 @@ The `Context` contains the state of the compiler, for example * `typerState` (for example undetermined type variables) * ... -### Contexts in the typer ### +## Contexts in the typer ## The type checker passes contexts through all methods and adapts fields where necessary, e.g. @@ -26,7 +26,7 @@ case tree: untpd.Block => typedBlock(desugar.block(tree), pt)(ctx.fresh.withNewS A number of fields in the context are typer-specific (`mode`, `typerState`). -### In other phases ### +## In other phases ## Other phases need a context for many things, for example to access the denotation of a symbols (depends on the period). However they typically don't need to modify / extend the context while traversing the AST. For these phases @@ -36,7 +36,7 @@ all members. **Careful**: beware of memory leaks. Don't hold on to contexts in long lived objects. -### Using contexts ### +## Using contexts ## Nested contexts should be named `ctx` to enable implicit shadowing: ```scala diff --git a/docs/_docs/internals/dotc-scalac.md b/docs/_docs/internals/dotc-scalac.md index a8de3bfff00e..3f88502934b7 100644 --- a/docs/_docs/internals/dotc-scalac.md +++ b/docs/_docs/internals/dotc-scalac.md @@ -6,7 +6,7 @@ title: "Differences between Scalac and Dotty" Overview explanation how symbols, named types and denotations hang together: [Denotations1] -### Denotation ### +## Denotation ## Comment with a few details: [Denotations2] A `Denotation` is the result of a name lookup during a given period @@ -21,7 +21,7 @@ A `Denotation` is the result of a name lookup during a given period Denotations of methods have a signature ([Signature1]), which uniquely identifies overloaded methods. -#### Denotation vs. SymDenotation #### +### Denotation vs. SymDenotation ### A `SymDenotation` is an extended denotation that has symbol-specific properties (that may change over phases) * `flags` @@ -31,7 +31,7 @@ A `SymDenotation` is an extended denotation that has symbol-specific properties `SymDenotation` implements lazy types (similar to scalac). The type completer assigns the denotation's `info`. -#### Implicit Conversion #### +### Implicit Conversion ### There is an implicit conversion: ```scala core.Symbols.toDenot(sym: Symbol)(implicit ctx: Context): SymDenotation @@ -42,7 +42,7 @@ implicit conversion does **not** need to be imported, it is part of the implicit scope of the type `Symbol` (check the Scala spec). However, it can only be applied if an implicit `Context` is in scope. -### Symbol ### +## Symbol ## * `Symbol` instances have a `SymDenotation` * Most symbol properties in the Scala 2 compiler are now in the denotation (in the Scala 3 compiler). @@ -57,7 +57,7 @@ if (sym is Flags.PackageClass) // Scala 3 (*) `(*)` Symbols are implicitly converted to their denotation, see above. Each `SymDenotation` has flags that can be queried using the `is` method. -### Flags ### +## Flags ## * Flags are instances of the value class `FlagSet`, which encapsulates a `Long` * Each flag is either valid for types, terms, or both @@ -74,7 +74,7 @@ if (sym is Flags.PackageClass) // Scala 3 (*) `ModuleVal` / `ModuleClass` for either of the two. * `flags.is(Method | Param)`: true if `flags` has either of the two -### Tree ### +## Tree ## * Trees don't have symbols - `tree.symbol` is `tree.denot.symbol` - `tree.denot` is `tree.tpe.denot` where the `tpe` is a `NamdedType` (see @@ -87,7 +87,7 @@ if (sym is Flags.PackageClass) // Scala 3 (*) using `prefix.member(name)`. -### Type ### +## Type ## * `MethodType(paramSyms, resultType)` from scalac => `mt @ MethodType(paramNames, paramTypes)`. Result type is `mt.resultType` diff --git a/docs/_docs/internals/gadts.md b/docs/_docs/internals/gadts.md index 777b9dd32e39..58f511c946c3 100644 --- a/docs/_docs/internals/gadts.md +++ b/docs/_docs/internals/gadts.md @@ -1,4 +1,9 @@ -# GADTs - Broad overview +--- +layout: doc-page +title: "GADTs - Broad overview" +--- + +## Introduction There are multiple levels to the implementation. They deal with slightly different problems. The most important levels are the following ones: @@ -18,9 +23,9 @@ There are also other parts to supporting GADTs. Roughly in order of importance, 1. Attachment key is named `inferredGadtConstraints`. 4. When we select members on a type that may have GADT constraints, we perform special "healing" by approximating the type using those constraints. We cannot take the constraints into account because member lookup is cached, and GADT constraints are only valid for specific scopes. -# Useful widgets +## Useful widgets -## Expr +### Expr This is the classical GADT example: @@ -36,7 +41,7 @@ enum Expr[T] { } ``` -## EQ +### EQ The following enum will result in an equality constraint between `S` and `T` if we match on it: @@ -46,7 +51,7 @@ enum EQ[S, T] { } ``` -## SUB +### SUB The following enum will result in a subtyping constraint `S <: T` if we match on it: @@ -56,9 +61,9 @@ enum SUB[-S, +T] { } ``` -# Details of above +## Details of above -## What abstract types can have GADT constraints +### What abstract types can have GADT constraints Right now, we record GADT constraints for: @@ -67,9 +72,9 @@ Right now, we record GADT constraints for: There is a branch on the way which will also record them for type members (so path-dependent types) and singleton types. It has a paper associated: "Implementing path-depepdent GADTs for Scala 3". -## What are necessary relationships? Any examples? +### What are necessary relationships? Any examples? -### Covariance means no constraint is necessary +#### Covariance means no constraint is necessary Standard (non-case) classes allow "strange" inheritance which means that we cannot infer any information from covariant type parameters. @@ -90,7 +95,7 @@ class Weird(list: List[String]) extends IntList with Expr[Nothing] Case classes have a special check which disallows inheritance like `Weird`. This means we can infer extra information from them. -## Breaking down the constraints +### Breaking down the constraints ```scala class Expr[A] @@ -113,9 +118,9 @@ def foo[T](e: Expr[List[T]]): T = } ``` -## Relation betweeen GadtConstraint and OrderingConstraint +### Relation betweeen GadtConstraint and OrderingConstraint -### Internal and external types +#### Internal and external types GadtConstraint uses OrderingConstraint as the datastructure to record information about GADT constraints. @@ -127,9 +132,9 @@ To solve this, GadtConstraint internally creates TypeParamRefs which it adds to The TypeParamRefs and TypeVars registered in one constraint cannot ever be present in types mentioned in the other type constraint. The internal TypeParamRefs and TypeVars cannot ever leak out of the GadtConstraint. We cannot ever record a bound in GadtConstraint which mentions TypeParamRefs used for type inference. (That part is ensured by the way TypeComparer is organised – we will always try to record bounds in the "normal" constraint before recording a GADT bound.) -# Other details +## Other details -## TypeComparer approximations +### TypeComparer approximations TypeComparer sometimes approximates the types it compares. Let's see an example based on these definitions: @@ -142,11 +147,11 @@ when comparing if `IntList <: Expr[Int]`, `TypeComparer` will approximate `IntLi The variables which TypeComparer sets are `approxState` and `frozenGadt`. -## Necessary/sufficient either +### Necessary/sufficient either TypeComparer sometimes needs to approximate some constraints, specifically when dealing with intersection and union types. The way this approximation works changes if we're currently inferring GADT constraints. This is hopefully documented well in TypeComparer in doc comments for `necessaryEither` and `sufficientEither`. -## Types bound in patterns +### Types bound in patterns ```scala (list : List[Int]) match { @@ -161,7 +166,7 @@ TypeComparer sometimes needs to approximate some constraints, specifically when } ``` -## Internal structure of OrderingConstraint +### Internal structure of OrderingConstraint Imagine we have two type parameters in scope, `A` and `B`. @@ -184,19 +189,19 @@ B <: A The first two constraints are "entries" – they are easy to look up whenever we ask for bounds of `A` or `B`. The third constraint is an ordering – it helps with correctly propagating the bounds we record. -# Possible broad improvements +## Possible broad improvements -## Allow OrderingConstraint to record bounds for things other than TypeParamRefs +### Allow OrderingConstraint to record bounds for things other than TypeParamRefs This would mean we no longer need to keep the bidirectional mapping in GadtConstraint. -## Not mixing OrderingConstraint and ConstraintHandling in GadtConstraint +### Not mixing OrderingConstraint and ConstraintHandling in GadtConstraint GadtConstraint right now mixes OrderingConstraint and ConstraintHandling. The first one is supposed to be the immutable constraint datastructure. The second one implements mutable functionality around a variable containing the immutable datastructure. GadtConstraint mixes them both. Things would be better organised if GadtConstraint was split like the normal constraint. -## Creating a separate TypeComparer for breaking down types into GADT constraints +### Creating a separate TypeComparer for breaking down types into GADT constraints TypeComparer is biased towards one specific way of approximating constraints. When we infer types, it's ok to be "optimistic". When inferring GADT constraints, we should be as pessimistic as possible, in order to only infer constraints which are necessary. diff --git a/docs/_docs/internals/syntax-3.1.md b/docs/_docs/internals/syntax-3.1.md index ef18bd1486b1..4d4d3b6d858d 100644 --- a/docs/_docs/internals/syntax-3.1.md +++ b/docs/_docs/internals/syntax-3.1.md @@ -16,7 +16,7 @@ hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | Informal descriptions are typeset as `“some comment”`. -### Lexical Syntax +## Lexical Syntax The lexical syntax of Scala is given by the following grammar in EBNF form. diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 8c8f83450397..7fce82cbebbc 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -30,7 +30,7 @@ hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | Informal descriptions are typeset as `“some comment”`. -### Lexical Syntax +## Lexical Syntax The lexical syntax of Scala is given by the following grammar in EBNF form. @@ -318,7 +318,10 @@ TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } TypeCaseClause ::= ‘case’ (InfixType | ‘_’) ‘=>’ Type [semi] Pattern ::= Pattern1 { ‘|’ Pattern1 } Alternative(pats) -Pattern1 ::= Pattern2 [‘:’ RefinedType] Bind(name, Typed(Ident(wildcard), tpe)) +Pattern1 ::= PatVar ‘:’ RefinedType Bind(name, Typed(Ident(wildcard), tpe)) + | [‘-’] integerLiteral ‘:’ RefinedType Typed(pat, tpe) + | [‘-’] floatingPointLiteral ‘:’ RefinedType Typed(pat, tpe) + | Pattern2 Pattern2 ::= [id ‘@’] InfixPattern [‘*’] Bind(name, pat) InfixPattern ::= SimplePattern { id [nl] SimplePattern } InfixOp(pat, op, pat) SimplePattern ::= PatVar Ident(wildcard) diff --git a/docs/_docs/reference/changed-features/imports.md b/docs/_docs/reference/changed-features/imports.md index 43e8704192ee..2058ef08b7db 100644 --- a/docs/_docs/reference/changed-features/imports.md +++ b/docs/_docs/reference/changed-features/imports.md @@ -38,13 +38,13 @@ import scala.annotation as ann import java as j ``` -### Migration +## Migration To support cross-building, Scala 3.0 supports the old import syntax with `_` for wildcards and `=>` for renamings in addition to the new one. The old syntax will be dropped in a future versions. Automatic rewritings from old to new syntax are offered under settings `-source 3.1-migration -rewrite`. -### Syntax +## Syntax ``` Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} diff --git a/docs/_docs/reference/changed-features/pattern-matching.md b/docs/_docs/reference/changed-features/pattern-matching.md index a9e4b427830f..fed76ff9260d 100644 --- a/docs/_docs/reference/changed-features/pattern-matching.md +++ b/docs/_docs/reference/changed-features/pattern-matching.md @@ -253,7 +253,7 @@ object CharList: For example, where `V = S`, `U = Option[S] <: R`, `S = (String, PN) <: Product`, `PN = Seq[Int]` -```Scala +```scala class Foo(val name: String, val children: Int*) object Foo: def unapplySeq(f: Foo): Option[(String, Seq[Int])] = diff --git a/docs/_docs/reference/changed-features/wildcards.md b/docs/_docs/reference/changed-features/wildcards.md index 6ef3e66c5e24..0d3e13c3d7e0 100644 --- a/docs/_docs/reference/changed-features/wildcards.md +++ b/docs/_docs/reference/changed-features/wildcards.md @@ -10,7 +10,7 @@ List[?] Map[? <: AnyRef, ? >: Null] ``` -### Motivation +## Motivation We would like to use the underscore syntax `_` to stand for an anonymous type parameter, aligning it with its meaning in value parameter lists. So, just as `f(_)` is a shorthand for the lambda `x => f(x)`, in the future `C[_]` will be a shorthand @@ -21,7 +21,7 @@ In the future, `F[_]` will mean the same thing, no matter where it is used. We pick `?` as a replacement syntax for wildcard types, since it aligns with [Java's syntax](https://docs.oracle.com/javase/tutorial/java/generics/wildcardGuidelines.html). -### Migration Strategy +## Migration Strategy The migration to the new scheme is complicated, in particular since the [kind projector](https://github.com/typelevel/kind-projector) compiler plugin still uses the reverse convention, with `?` meaning parameter placeholder instead of wildcard. Fortunately, kind projector has added `*` as an alternative syntax for `?`. diff --git a/docs/_docs/reference/contextual/by-name-context-parameters.md b/docs/_docs/reference/contextual/by-name-context-parameters.md index 92a399940822..3515efd78fa5 100644 --- a/docs/_docs/reference/contextual/by-name-context-parameters.md +++ b/docs/_docs/reference/contextual/by-name-context-parameters.md @@ -59,7 +59,7 @@ val s = summon[Test.Codec[Option[Int]]]( No local given instance was generated because the synthesized argument is not recursive. -### Reference +## Reference For more information, see [Issue #1998](https://github.com/lampepfl/dotty/issues/1998) and the associated [Scala SIP](https://docs.scala-lang.org/sips/byname-implicits.html). diff --git a/docs/_docs/reference/contextual/context-functions-spec.md b/docs/_docs/reference/contextual/context-functions-spec.md index 66a5bed432ce..109513e9da86 100644 --- a/docs/_docs/reference/contextual/context-functions-spec.md +++ b/docs/_docs/reference/contextual/context-functions-spec.md @@ -74,6 +74,6 @@ See the section on Expressiveness from [Simplicitly: foundations and applications of implicit function types](https://dl.acm.org/citation.cfm?id=3158130). -### Type Checking +## Type Checking After desugaring no additional typing rules are required for context function types. diff --git a/docs/_docs/reference/contextual/context-functions.md b/docs/_docs/reference/contextual/context-functions.md index 6eb838896fc9..0ad3c8757782 100644 --- a/docs/_docs/reference/contextual/context-functions.md +++ b/docs/_docs/reference/contextual/context-functions.md @@ -48,7 +48,7 @@ For example, continuing with the previous definitions, g((ctx: ExecutionContext) ?=> f(3)(using ctx)) // is left as it is ``` -### Example: Builder Pattern +## Example: Builder Pattern Context function types have considerable expressive power. For instance, here is how they can support the "builder pattern", where @@ -112,7 +112,7 @@ With that setup, the table construction code above compiles and expands to: }(using $t) } ``` -### Example: Postconditions +## Example: Postconditions As a larger example, here is a way to define constructs for checking arbitrary postconditions using an extension method `ensuring` so that the checked result can be referred to simply by `result`. The example combines opaque type aliases, context function types, and extension methods to provide a zero-overhead abstraction. @@ -146,7 +146,7 @@ val s = assert(result == 6) result ``` -### Reference +## Reference For more information, see the [blog article](https://www.scala-lang.org/blog/2016/12/07/implicit-function-types.html), (which uses a different syntax that has been superseded). diff --git a/docs/_docs/reference/contextual/contextual.md b/docs/_docs/reference/contextual/contextual.md index a2eaa8a27d2f..fda63397f8f9 100644 --- a/docs/_docs/reference/contextual/contextual.md +++ b/docs/_docs/reference/contextual/contextual.md @@ -4,7 +4,7 @@ title: "Contextual Abstractions" nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual.html --- -### Critique of the Status Quo +## Critique of the Status Quo Scala's implicits are its most distinguished feature. They are _the_ fundamental way to abstract over context. They represent a unified paradigm with a great variety of use cases, among them: implementing type classes, establishing context, dependency injection, expressing capabilities, computing new types and proving relationships between them. @@ -46,7 +46,7 @@ Historically, many of these shortcomings come from the way implicits were gradua Existing Scala programmers by and large have gotten used to the status quo and see little need for change. But for newcomers this status quo presents a big hurdle. I believe if we want to overcome that hurdle, we should take a step back and allow ourselves to consider a radically new design. -### The New Design +## The New Design The following pages introduce a redesign of contextual abstractions in Scala. They introduce four fundamental changes: diff --git a/docs/_docs/reference/contextual/derivation.md b/docs/_docs/reference/contextual/derivation.md index 83305917528b..31040615d38c 100644 --- a/docs/_docs/reference/contextual/derivation.md +++ b/docs/_docs/reference/contextual/derivation.md @@ -284,7 +284,7 @@ Note the following properties of `Mirror` types, + The methods `ordinal` and `fromProduct` are defined in terms of `MirroredMonoType` which is the type of kind-`*` which is obtained from `MirroredType` by wildcarding its type parameters. -### Implementing `derived` with `Mirror` +## Implementing `derived` with `Mirror` As seen before, the signature and implementation of a `derived` method for a type class `TC[_]` are arbitrary, but we expect it to typically be of the following form: @@ -310,7 +310,7 @@ authors would normally implement a `derived` method in this way, however this wa authors of the higher level derivation libraries that we expect typical type class authors will use (for a fully worked out example of such a library, see [Shapeless 3](https://github.com/milessabin/shapeless/tree/shapeless-3)). -#### How to write a type class `derived` method using low level mechanisms +## How to write a type class `derived` method using low level mechanisms The low-level method we will use to implement a type class `derived` method in this example exploits three new type-level constructs in Scala 3: inline methods, inline matches, and implicit searches via `summonInline` or `summonFrom`. Given this definition of the @@ -484,7 +484,7 @@ The framework described here enables all three of these approaches without manda For a brief discussion on how to use macros to write a type class `derived` method please read more at [How to write a type class `derived` method using macros](./derivation-macro.md). -### Syntax +## Syntax ``` Template ::= InheritClauses [TemplateBody] @@ -507,7 +507,7 @@ It is equivalent to the old form class A extends B with C { ... } ``` -### Discussion +## Discussion This type class derivation framework is intentionally very small and low-level. There are essentially two pieces of infrastructure in compiler-generated `Mirror` instances, diff --git a/docs/_docs/reference/contextual/extension-methods.md b/docs/_docs/reference/contextual/extension-methods.md index 77e06ee687b2..d23cadf513d7 100644 --- a/docs/_docs/reference/contextual/extension-methods.md +++ b/docs/_docs/reference/contextual/extension-methods.md @@ -20,7 +20,7 @@ val circle = Circle(0, 0, 1) circle.circumference ``` -### Translation of Extension Methods +## Translation of Extension Methods An extension method translates to a specially labelled method that takes the leading parameter section as its first argument list. The label, expressed as `` here, is compiler-internal. So, the definition of `circumference` above translates to the following method, and can also be invoked as such: @@ -31,7 +31,7 @@ as `` here, is compiler-internal. So, the definition of `circumferenc assert(circle.circumference == circumference(circle)) ``` -### Operators +## Operators The extension method syntax can also be used to define operators. Examples: @@ -63,7 +63,7 @@ compiler preprocesses an infix operation `x +: xs` to `xs.+:(x)`, so the extensi method ends up being applied to the sequence as first argument (in other words, the two swaps cancel each other out). See [here for details](./right-associative-extension-methods.md). -### Generic Extensions +## Generic Extensions It is also possible to extend generic types by adding type parameters to an extension. For instance: @@ -109,7 +109,7 @@ extension [T](x: T)(using n: Numeric[T]) def + (y: T): T = n.plus(x, y) ``` -### Collective Extensions +## Collective Extensions Sometimes, one wants to define several extension methods that share the same left-hand parameter type. In this case one can "pull out" the common parameters into @@ -166,7 +166,7 @@ extension [T](xs: List[T])(using Ordering[T]) xs.zipWithIndex.collect { case (x, i) if x <= limit => i } ``` -### Translation of Calls to Extension Methods +## Translation of Calls to Extension Methods To convert a reference to an extension method, the compiler has to know about the extension method. We say in this case that the extension method is _applicable_ at the point of reference. @@ -280,7 +280,7 @@ def position(s: String)(ch: Char, n: Int): Int = else n ``` -### Syntax +## Syntax Here are the syntax changes for extension methods and collective extensions relative to the [current syntax](../syntax.md). diff --git a/docs/_docs/reference/contextual/given-imports.md b/docs/_docs/reference/contextual/given-imports.md index c5936a58a4a6..6a55368979b1 100644 --- a/docs/_docs/reference/contextual/given-imports.md +++ b/docs/_docs/reference/contextual/given-imports.md @@ -40,7 +40,7 @@ There are two main benefits arising from these rules: can be anonymous, so the usual recourse of using named imports is not practical. -### Importing By Type +## Importing By Type Since givens can be anonymous it is not always practical to import them by their name, and wildcard imports are typically used instead. By-type imports provide a more specific alternative to wildcard imports, which makes it clearer what is imported. Example: @@ -82,7 +82,7 @@ import Instances.{im, given Ordering[?]} would import `im`, `intOrd`, and `listOrd` but leave out `ec`. -### Migration +## Migration The rules for imports stated above have the consequence that a library would have to migrate in lockstep with all its users from old style implicits and @@ -101,7 +101,7 @@ These rules mean that library users can use `given` selectors to access old-styl and will be gently nudged and then forced to do so in later versions. Libraries can then switch to given instances once their user base has migrated. -### Syntax +## Syntax ``` Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} diff --git a/docs/_docs/reference/contextual/type-classes.md b/docs/_docs/reference/contextual/type-classes.md index f56b91df022c..6a15ac3a83d4 100644 --- a/docs/_docs/reference/contextual/type-classes.md +++ b/docs/_docs/reference/contextual/type-classes.md @@ -12,7 +12,7 @@ A _type class_ is an abstract, parameterized type that lets you add new behavior Therefore in Scala 3, _type classes_ are just _traits_ with one or more parameters whose implementations are not defined through the `extends` keyword, but by **given instances**. Here are some examples of common type classes: -### Semigroups and monoids +## Semigroups and monoids Here's the `Monoid` type class definition: @@ -61,7 +61,7 @@ def combineAll[T: Monoid](xs: List[T]): T = xs.foldLeft(Monoid[T].unit)(_.combine(_)) ``` -### Functors +## Functors A `Functor` for a type provides the ability for its values to be "mapped over", i.e. apply a function that transforms inside a value while remembering its shape. For example, to modify every element of a collection without dropping or adding elements. We can represent all types that can be "mapped over" with `F`. It's a type constructor: the type of its values becomes concrete when provided a type argument. @@ -127,7 +127,7 @@ The `map` method is now directly used on `original`. It is available as an exten since `original`'s type is `F[A]` and a given instance for `Functor[F[A]]` which defines `map` is in scope. -### Monads +## Monads Applying `map` in `Functor[List]` to a mapping function of type `A => B` results in a `List[B]`. So applying it to a mapping function of type `A => List[B]` results in a `List[List[B]]`. To avoid managing lists of lists, we may want to "flatten" the values in a single list. @@ -154,7 +154,7 @@ trait Monad[F[_]] extends Functor[F]: end Monad ``` -#### List +### List A `List` can be turned into a monad via this `given` instance: @@ -171,7 +171,7 @@ Since `Monad` is a subtype of `Functor`, `List` is also a functor. The Functor's operation is already provided by the `Monad` trait, so the instance does not need to define it explicitly. -#### Option +### Option `Option` is an other type having the same kind of behaviour: @@ -185,7 +185,7 @@ given optionMonad: Monad[Option] with case None => None ``` -#### Reader +### Reader Another example of a `Monad` is the _Reader_ Monad, which acts on functions instead of data types like `List` or `Option`. It can be used to combine multiple functions @@ -271,7 +271,7 @@ given readerMonad[Ctx]: Monad[[X] =>> Ctx => X] with end readerMonad ``` -### Summary +## Summary The definition of a _type class_ is expressed with a parameterised type with abstract members, such as a `trait`. The main difference between subtype polymorphism and ad-hoc polymorphism with _type classes_ is how the definition of the _type class_ is implemented, in relation to the type it acts upon. diff --git a/docs/_docs/reference/dropped-features/auto-apply.md b/docs/_docs/reference/dropped-features/auto-apply.md index 154bd2620635..eadfe2f429ea 100644 --- a/docs/_docs/reference/dropped-features/auto-apply.md +++ b/docs/_docs/reference/dropped-features/auto-apply.md @@ -84,13 +84,13 @@ class B extends A: Methods overriding Java or Scala 2 methods are again exempted from this requirement. -### Migrating code +## Migrating code Existing Scala code with inconsistent parameters can still be compiled in Scala 3 under `-source 3.0-migration`. When paired with the `-rewrite` option, the code will be automatically rewritten to conform to Scala 3's stricter checking. -### Reference +## Reference For more information, see [Issue #2570](https://github.com/lampepfl/dotty/issues/2570) and [PR #2716](https://github.com/lampepfl/dotty/pull/2716). diff --git a/docs/_docs/reference/dropped-features/do-while.md b/docs/_docs/reference/dropped-features/do-while.md index be86a73fd40d..08a730b8b5a7 100644 --- a/docs/_docs/reference/dropped-features/do-while.md +++ b/docs/_docs/reference/dropped-features/do-while.md @@ -35,7 +35,7 @@ while do print(".") ``` -### Why Drop The Construct? +## Why Drop The Construct? - `do-while` is used relatively rarely and it can be expressed faithfully using just `while`. So there seems to be little point in having it as a separate syntax construct. - Under the [new syntax rules](../other-new-features/control-syntax.md) `do` is used as a statement continuation, which would clash with its meaning as a statement introduction. diff --git a/docs/_docs/reference/dropped-features/weak-conformance-spec.md b/docs/_docs/reference/dropped-features/weak-conformance-spec.md index 78fa0f553fd5..07625dcfe885 100644 --- a/docs/_docs/reference/dropped-features/weak-conformance-spec.md +++ b/docs/_docs/reference/dropped-features/weak-conformance-spec.md @@ -38,7 +38,7 @@ assigning a type to a constant expression. The new rule is: - an `Int -> Short` conversion of a constant `c` if `c.toShort.toInt != c`. -### Examples +## Examples ```scala inline val b = 33 diff --git a/docs/_docs/reference/enums/adts.md b/docs/_docs/reference/enums/adts.md index 818db9ee61be..3ab8c9f3b45b 100644 --- a/docs/_docs/reference/enums/adts.md +++ b/docs/_docs/reference/enums/adts.md @@ -91,7 +91,7 @@ enum Color(val rgb: Int): case Mix(mix: Int) extends Color(mix) ``` -### Parameter Variance of Enums +## Parameter Variance of Enums By default, parameterized cases of enums with type parameters will copy the type parameters of their parent, along with any variance notations. As usual, it is important to use type parameters carefully when they are variant, as shown @@ -147,7 +147,7 @@ enum View[-T, +U] extends (T => U): case refl: Refl[r] => refl.f(t) ``` -### Syntax of Enums +## Syntax of Enums Changes to the syntax fall in two categories: enum definitions and cases inside enums. The changes are specified below as deltas with respect to the Scala syntax given [here](../syntax.md) @@ -168,6 +168,6 @@ The changes are specified below as deltas with respect to the Scala syntax given EnumCase ::= `case' (id ClassConstr [`extends' ConstrApps]] | ids) ``` -### Reference +## Reference For more information, see [Issue #1970](https://github.com/lampepfl/dotty/issues/1970). diff --git a/docs/_docs/reference/enums/desugarEnums.md b/docs/_docs/reference/enums/desugarEnums.md index a06736feb1f0..477653d670bb 100644 --- a/docs/_docs/reference/enums/desugarEnums.md +++ b/docs/_docs/reference/enums/desugarEnums.md @@ -159,7 +159,7 @@ map into `case class`es or `val`s. as long as that type is still compatible with the expected type at the point of application. A call `t.copy(ts)` of `C`'s `copy` method is treated in the same way. -### Translation of Enums with Singleton Cases +## Translation of Enums with Singleton Cases An enum `E` (possibly generic) that defines one or more singleton cases will define the following additional synthetic members in its companion object (where `E'` denotes `E` with @@ -189,7 +189,7 @@ The `ordinal` method is only generated if the enum does not extend from `java.la `java.lang.Enum` defines it. Similarly there is no need to override `toString` as that is defined in terms of `name` in `java.lang.Enum`. Finally, `productPrefix` will call `this.name` when `E` extends `java.lang.Enum`. -### Scopes for Enum Cases +## Scopes for Enum Cases A case in an `enum` is treated similarly to a secondary constructor. It can access neither the enclosing `enum` using `this`, nor its value parameters or instance members using simple identifiers. @@ -197,7 +197,7 @@ identifiers. Even though translated enum cases are located in the enum's companion object, referencing this object or its members via `this` or a simple identifier is also illegal. The compiler typechecks enum cases in the scope of the enclosing companion object but flags any such illegal accesses as errors. -### Translation of Java-compatible enums +## Translation of Java-compatible enums A Java-compatible enum is an enum that extends `java.lang.Enum`. The translation rules are the same as above, with the reservations defined in this section. @@ -205,7 +205,7 @@ It is a compile-time error for a Java-compatible enum to have class cases. Cases such as `case C` expand to a `@static val` as opposed to a `val`. This allows them to be generated as static fields of the enum type, thus ensuring they are represented the same way as Java enums. -### Other Rules +## Other Rules - A normal case class which is not produced from an enum case is not allowed to extend `scala.reflect.Enum`. This ensures that the only cases of an enum are the ones that are diff --git a/docs/_docs/reference/enums/enums-index.md b/docs/_docs/reference/enums/enums-index.md index fb46b3e3ed6b..80d703c3e897 100644 --- a/docs/_docs/reference/enums/enums-index.md +++ b/docs/_docs/reference/enums/enums-index.md @@ -1,7 +1,7 @@ --- layout: index title: "Enums" -movedTo: https://docs.scala-lang.org/scala3/reference/enums/index.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/enums/index.html --- This chapter documents enums in Scala 3. diff --git a/docs/_docs/reference/enums/enums.md b/docs/_docs/reference/enums/enums.md index 2760fb037b2d..65051bdfb39f 100644 --- a/docs/_docs/reference/enums/enums.md +++ b/docs/_docs/reference/enums/enums.md @@ -15,7 +15,7 @@ This defines a new `sealed` class, `Color`, with three values, `Color.Red`, `Color.Green`, `Color.Blue`. The color values are members of `Color`s companion object. -### Parameterized enums +## Parameterized enums Enums can be parameterized. @@ -29,7 +29,7 @@ enum Color(val rgb: Int): As the example shows, you can define the parameter value by using an explicit extends clause. -### Methods defined for enums +## Methods defined for enums The values of an enum correspond to unique integers. The integer associated with an enum value is returned by its `ordinal` method: @@ -56,7 +56,7 @@ scala> Color.fromOrdinal(0) val res2: Color = Red ``` -### User-defined members of enums +## User-defined members of enums It is possible to add your own definitions to an enum. Example: @@ -77,7 +77,7 @@ enum Planet(mass: Double, radius: Double): end Planet ``` -### User-defined companion object of enums +## User-defined companion object of enums It is also possible to define an explicit companion object for an enum: ```scala @@ -90,7 +90,7 @@ object Planet: end Planet ``` -### Restrictions on Enum Cases +## Restrictions on Enum Cases Enum case declarations are similar to secondary constructors: they are scoped outside of the enum template, despite being declared within it. @@ -117,7 +117,7 @@ The fields referenced by `Mercury` are not visible, and the fields referenced by be referenced directly (using `import Planet.*`). You must use an indirect reference, such as demonstrated with `Earth`. -### Deprecation of Enum Cases +## Deprecation of Enum Cases As a library author, you may want to signal that an enum case is no longer intended for use. However you could still want to gracefully handle the removal of a case from your public API, such as special casing deprecated cases. @@ -161,7 +161,7 @@ object Planet { We could imagine that a library may use [type class derivation](../contextual/derivation.md) to automatically provide an instance for `Deprecations`. -### Compatibility with Java Enums +## Compatibility with Java Enums If you want to use the Scala-defined enums as [Java enums](https://docs.oracle.com/javase/tutorial/java/javaOO/enum.html), you can do so by extending the class `java.lang.Enum`, which is imported by default, as follows: @@ -182,7 +182,7 @@ val res15: Int = -1 For a more in-depth example of using Scala 3 enums from Java, see [this test](https://github.com/lampepfl/dotty/tree/main/tests/run/enum-java). In the test, the enums are defined in the `MainScala.scala` file and used from a Java source, `Test.java`. -### Implementation +## Implementation Enums are represented as `sealed` classes that extend the `scala.reflect.Enum` trait. This trait defines a single public method, `ordinal`: @@ -216,7 +216,7 @@ definition of value `Color.Red` above would expand to: val Red: Color = $new(0, "Red") ``` -### Reference +## Reference For more information, see [Issue #1970](https://github.com/lampepfl/dotty/issues/1970) and [PR #4003](https://github.com/lampepfl/dotty/pull/4003). diff --git a/docs/_docs/reference/experimental/cc.md b/docs/_docs/reference/experimental/cc.md index fe2b95a284aa..2a7236453eab 100644 --- a/docs/_docs/reference/experimental/cc.md +++ b/docs/_docs/reference/experimental/cc.md @@ -4,7 +4,10 @@ title: "Capture Checking" nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/cc.html --- -Capture checking is a research project that modifies the Scala type system to track references to capabilities in values. It can be enabled with a `-Ycc` compiler option. +Capture checking is a research project that modifies the Scala type system to track references to capabilities in values. It can be enabled by the language import +```scala +import language.experimental.captureChecking +``` At present, capture checking is still highly experimental and unstable. To get an idea what capture checking can do, let's start with a small example: @@ -78,10 +81,6 @@ The following sections explain in detail how capture checking works in Scala 3. The capture checker extension introduces a new kind of types and it enforces some rules for working with these types. -Capture checking is enabled by the compiler option `-Ycc`. If the option is not given, the new -type forms can still be written but they are not checked for consistency, because they are -treated simply as certain uninterpreted annotated types. - ## Capabilities and Capturing Types Capture checking is done in terms of _capturing types_ of the form @@ -129,7 +128,8 @@ any capturing type that adds a capture set to `T`. ## Function Types The usual function type `A => B` now stands for a function that can capture arbitrary capabilities. We call such functions -_impure_. By contrast, the new single arrow function type `A -> B` stands for a function that cannot capture any capabilities, or otherwise said, is _pure_. One can add a capture set in front of an otherwise pure function. +_impure_. By contrast, the new single arrow function type `A -> B` stands for a function that cannot capture any capabilities, or otherwise said, is _pure_. +One can add a capture set in front of an otherwise pure function. For instance, `{c, d} A -> B` would be a function that can capture capabilities `c` and `d`, but no others. The impure function type `A => B` is treated as an alias for `{*} A -> B`. That is, impure functions are functions that can capture anything. @@ -503,7 +503,7 @@ crasher() This code needs to be rejected since otherwise the call to `crasher()` would cause an unhandled `LimitExceeded` exception to be thrown. -Under `-Ycc`, the code is indeed rejected +Under the language import `language.experimental.captureChecking`, the code is indeed rejected ``` 14 | try () => xs.map(f).sum | ^ @@ -655,7 +655,6 @@ TBD The following options are relevant for capture checking. - - **-Ycc** Enables capture checking. - **-Xprint:cc** Prints the program with capturing types as inferred by capture checking. - **-Ycc-debug** Gives more detailed, implementation-oriented information about capture checking, as described in the next section. diff --git a/docs/_docs/reference/experimental/erased-defs-spec.md b/docs/_docs/reference/experimental/erased-defs-spec.md index 5395a8468399..24ae89c7e28b 100644 --- a/docs/_docs/reference/experimental/erased-defs-spec.md +++ b/docs/_docs/reference/experimental/erased-defs-spec.md @@ -62,3 +62,9 @@ TODO: complete 7. Overriding * Member definitions overriding each other must both be `erased` or not be `erased` * `def foo(x: T): U` cannot be overridden by `def foo(erased x: T): U` and vice-versa + * + + +8. Type Restrictions + * For dependent functions, `erased` parameters are limited to realizable types, that is, types that are inhabited by non-null values. + This restriction stops us from using a bad bound introduced by an erased value, which leads to unsoundness (see #4060). diff --git a/docs/_docs/reference/experimental/overview.md b/docs/_docs/reference/experimental/overview.md index 62109837290b..254f103896e4 100644 --- a/docs/_docs/reference/experimental/overview.md +++ b/docs/_docs/reference/experimental/overview.md @@ -5,7 +5,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/overview.ht redirectFrom: overview.html --- -### Experimental language features +## Experimental language features All experimental language features can be found under the `scala.language.experimental` package. They are enabled by importing the feature or using the `-language` compiler flag. @@ -16,12 +16,12 @@ They are enabled by importing the feature or using the `-language` compiler flag * [`namedTypeArguments`](./named-typeargs.md): Enable support for named type arguments * [`saferExceptions`](./canthrow.md): Enable support for checked exceptions. -### Experimental language imports +## Experimental language imports In general, experimental language features can be imported in an experimental scope (see [experimental definitions](../other-new-features/experimental-defs.md)). They can be imported at the top-level if all top-level definitions are `@experimental`. -### Experimental language features supported by special compiler options +## Experimental language features supported by special compiler options Some experimental language features that are still in research and development can be enabled with special compiler options. These include diff --git a/docs/_docs/reference/experimental/purefuns.md b/docs/_docs/reference/experimental/purefuns.md new file mode 100644 index 000000000000..7c369f85f010 --- /dev/null +++ b/docs/_docs/reference/experimental/purefuns.md @@ -0,0 +1,32 @@ +--- +layout: doc-page +title: "Pure Function Syntax" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/purefuns.html +--- + +Pure functions are an experimental feature that can be enabled by the language import +```scala +import language.experimental.pureFunctions +``` +Under that import the syntax `A -> B` is available with the intention that it should denote a pure, side effect-free function from `A` to `B`. Some other variants are also supported: +```scala + (A1, ..., An) -> B // a multi-argument pure function + (x1: A1, ..., xn: An) -> B // a dependent pure function + A ?-> B // a pure context function + (A1, ..., An) ?-> B // a multi-argument pure context function + (x1: A1, ..., xn: An) ?-> B // a dependent pure context function + -> B // a pure call-by-name parameter +``` +A function's purity can be checked by capture tracking, another experimental language feature which is presently in a very early stage. Until that second feature matures, the pure function syntax should be understood to be for documentation only. A pure function type is a requirement that all its instances should be side effect-free. This requirement currently needs to be checked manually, but checking might be automated in the future. + +## Why Enable It Now? + +There are at least three reasons why one might want to enable `pureFunctions` today: + + - to get better documentation since it makes the intent clear, + - to prepare the code base for a time when full effect checking is implemented, + - to have a common code base that can be compiled with or without capture checking enabled. + +## More info: + +TBD \ No newline at end of file diff --git a/docs/_docs/reference/language-versions/binary-compatibility.md b/docs/_docs/reference/language-versions/binary-compatibility.md index d0409d32e6b7..df1c19f97868 100644 --- a/docs/_docs/reference/language-versions/binary-compatibility.md +++ b/docs/_docs/reference/language-versions/binary-compatibility.md @@ -1,7 +1,7 @@ --- layout: doc-page title: "Binary Compatibility" -movedTo: https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html --- In Scala 2 different minor versions of the compiler were free to change the way how they encode different language features in JVM bytecode so each bump of the compiler's minor version resulted in breaking binary compatibility and if a project had any Scala dependencies they all needed to be (cross-)compiled to the same minor Scala version that was used in that project itself. On the contrary, Scala 3 has a stable encoding into JVM bytecode. diff --git a/docs/_docs/reference/language-versions/language-versions.md b/docs/_docs/reference/language-versions/language-versions.md index 1bc8d939a7e9..2dfd04857cab 100644 --- a/docs/_docs/reference/language-versions/language-versions.md +++ b/docs/_docs/reference/language-versions/language-versions.md @@ -1,7 +1,7 @@ --- layout: index title: "Language Versions" -movedTo: https://docs.scala-lang.org/scala3/reference/language-versions/index.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/index.html --- Additional information on interoperability and migration between Scala 2 and 3 can be found [here](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html). diff --git a/docs/_docs/reference/language-versions/source-compatibility.md b/docs/_docs/reference/language-versions/source-compatibility.md index 077f06b2b4db..699ba0d5c75d 100644 --- a/docs/_docs/reference/language-versions/source-compatibility.md +++ b/docs/_docs/reference/language-versions/source-compatibility.md @@ -1,7 +1,7 @@ --- layout: doc-page title: "Source Compatibility" -movedTo: https://docs.scala-lang.org/scala3/reference/language-versions/source-compatibility.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/source-compatibility.html --- Scala 3 does NOT guarantee source compatibility between different minor language versions (e.g. some syntax valid in 3.x might get deprecated and then phased out in 3.y for y > x). There are also some syntax structures that were valid in Scala 2 but are not anymore in Scala 3. However the compiler provides a possibility to specify the desired version of syntax used in a particular file or globally for a run of the compiler to make migration between versions easier. diff --git a/docs/_docs/reference/metaprogramming/inline.md b/docs/_docs/reference/metaprogramming/inline.md index 065b7a1e9e71..0c4800069bad 100644 --- a/docs/_docs/reference/metaprogramming/inline.md +++ b/docs/_docs/reference/metaprogramming/inline.md @@ -224,7 +224,7 @@ If a `inline` modifier is given for parameters, corresponding arguments must be pure expressions of constant type. --> -#### The definition of constant expression +### The definition of constant expression Right-hand sides of inline values and of arguments for inline parameters must be constant expressions in the sense defined by the [SLS §6.24](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#constant-expressions), @@ -344,10 +344,10 @@ In a transparent inline, an `inline if` will force the inlining of any inline de ## Inline Matches A `match` expression in the body of an `inline` method definition may be -prefixed by the `inline` modifier. If there is enough static information to -unambiguously take a branch, the expression is reduced to that branch and the -type of the result is taken. If not, a compile-time error is raised that -reports that the match cannot be reduced. +prefixed by the `inline` modifier. If there is enough type information +at compile time to select a branch, the expression is reduced to that branch and the +type of the expression is the type of the right-hand side of that result. +If not, a compile-time error is raised that reports that the match cannot be reduced. The example below defines an inline method with a single inline match expression that picks a case based on its static type: @@ -363,8 +363,9 @@ g("test") // Has type (String, String) ``` The scrutinee `x` is examined statically and the inline match is reduced -accordingly returning the corresponding value (with the type specialized because `g` is declared `transparent`). This example performs a simple type test over the -scrutinee. The type can have a richer structure like the simple ADT below. +accordingly returning the corresponding value (with the type specialized because `g` is declared `transparent`). +This example performs a simple type test over the scrutinee. +The type can have a richer structure like the simple ADT below. `toInt` matches the structure of a number in [Church-encoding](https://en.wikipedia.org/wiki/Church_encoding) and _computes_ the corresponding integer. @@ -384,6 +385,6 @@ val intTwo: 2 = natTwo `natTwo` is inferred to have the singleton type 2. -### Reference +## Reference For more information about the semantics of `inline`, see the [Scala 2020: Semantics-preserving inlining for metaprogramming](https://dl.acm.org/doi/10.1145/3426426.3428486) paper. diff --git a/docs/_docs/reference/new-types/dependent-function-types-spec.md b/docs/_docs/reference/new-types/dependent-function-types-spec.md index 984a44d25e37..f3237ddf7b9a 100644 --- a/docs/_docs/reference/new-types/dependent-function-types-spec.md +++ b/docs/_docs/reference/new-types/dependent-function-types-spec.md @@ -120,6 +120,6 @@ def composeFn[A, B, C]: assert(composeFn(i2s)(s2i)(22) == 2) ``` -### Type Checking +## Type Checking After desugaring no additional typing rules are required for dependent function types. diff --git a/docs/_docs/reference/new-types/match-types.md b/docs/_docs/reference/new-types/match-types.md index 9fcee09c34f5..d646dd11880b 100644 --- a/docs/_docs/reference/new-types/match-types.md +++ b/docs/_docs/reference/new-types/match-types.md @@ -83,6 +83,12 @@ following conditions are met: and these types are `=:=` to their corresponding type patterns in the match type +So you know, while the case body will be expected to have the type on the right-hand +side of the corresponding match type case, that doesn't imply the match type argument +is constrained. Using the example, the last case body must conform to X, but that +doesn't constrain X to be AnyVal, and therefore a LeafElem[X] inside the body wouldn't +reduce; it would remain stuck, and as such just an abstract type. + ## Representation of Match Types The internal representation of a match type diff --git a/docs/_docs/reference/new-types/new-types.md b/docs/_docs/reference/new-types/new-types.md index 8eb1d7b3bd1b..84c157495d6f 100644 --- a/docs/_docs/reference/new-types/new-types.md +++ b/docs/_docs/reference/new-types/new-types.md @@ -1,7 +1,7 @@ --- layout: index title: "New Types" -movedTo: https://docs.scala-lang.org/scala3/reference/new-types/index.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/index.html --- This chapter documents the new types introduced in Scala 3. diff --git a/docs/_docs/reference/new-types/type-lambdas-spec.md b/docs/_docs/reference/new-types/type-lambdas-spec.md index 2aa90ddbbba8..76937e5160f7 100644 --- a/docs/_docs/reference/new-types/type-lambdas-spec.md +++ b/docs/_docs/reference/new-types/type-lambdas-spec.md @@ -13,7 +13,7 @@ TypeParam ::= {Annotation} (id [HkTypeParamClause] | ‘_’) TypeBounds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] ``` -### Type Checking +## Type Checking A type lambda such as `[X] =>> F[X]` defines a function from types to types. The parameter(s) may carry bounds. If a parameter is bounded, as in `[X >: L <: U] =>> F[X]` it is checked that arguments to the parameters conform to the bounds `L` and `U`. diff --git a/docs/_docs/reference/new-types/union-types-spec.md b/docs/_docs/reference/new-types/union-types-spec.md index d250d3f11713..1093631e7c63 100644 --- a/docs/_docs/reference/new-types/union-types-spec.md +++ b/docs/_docs/reference/new-types/union-types-spec.md @@ -72,6 +72,10 @@ a non-union type, for this purpose we define the _join_ of a union type `T1 | `T1`,...,`Tn`. Note that union types might still appear as type arguments in the resulting type, this guarantees that the join is always finite. +The _visible join_ of a union type is its join where all operands of the intersection that +are instances of [transparent](../other-new-features/transparent-traits.md) traits or classes are removed. + + ### Example Given @@ -80,31 +84,50 @@ Given trait C[+T] trait D trait E -class A extends C[A] with D -class B extends C[B] with D with E +transparent trait X +class A extends C[A], D, X +class B extends C[B], D, E, X ``` -The join of `A | B` is `C[A | B] & D` +The join of `A | B` is `C[A | B] & D & X` and the visible join of `A | B` is `C[A | B] & D`. + +## Hard and Soft Union Types + +We distinguish between hard and soft union types. A _hard_ union type is a union type that's explicitly +written in the source. For instance, in +```scala +val x: Int | String = ... +``` +`Int | String` would be a hard union type. A _soft_ union type is a type that arises from type checking +an alternative of expressions. For instance, the type of the expression +```scala +val x = 1 +val y = "abc" +if cond then x else y +``` +is the soft unon type `Int | String`. Similarly for match expressions. The type of +```scala +x match + case 1 => x + case 2 => "abc" + case 3 => List(1, 2, 3) +``` +is the soft union type `Int | "abc" | List[Int]`. + ## Type inference When inferring the result type of a definition (`val`, `var`, or `def`) and the -type we are about to infer is a union type, then we replace it by its join. +type we are about to infer is a soft union type, then we replace it by its visible join, +provided it is not empty. Similarly, when instantiating a type argument, if the corresponding type parameter is not upper-bounded by a union type and the type we are about to -instantiate is a union type, we replace it by its join. This mirrors the +instantiate is a soft union type, we replace it by its visible join, provided it is not empty. +This mirrors the treatment of singleton types which are also widened to their underlying type unless explicitly specified. The motivation is the same: inferring types which are "too precise" can lead to unintuitive typechecking issues later on. -**Note:** Since this behavior limits the usability of union types, it might -be changed in the future. For example by not widening unions that have been -explicitly written down by the user and not inferred, or by not widening a type -argument when the corresponding type parameter is covariant. - -See [PR #2330](https://github.com/lampepfl/dotty/pull/2330) and -[Issue #4867](https://github.com/lampepfl/dotty/issues/4867) for further discussions. - ### Example ```scala diff --git a/docs/_docs/reference/new-types/union-types.md b/docs/_docs/reference/new-types/union-types.md index ebc4565e36fb..978e08649d9e 100644 --- a/docs/_docs/reference/new-types/union-types.md +++ b/docs/_docs/reference/new-types/union-types.md @@ -8,8 +8,9 @@ A union type `A | B` has as values all values of type `A` and also all values of ```scala -case class UserName(name: String) -case class Password(hash: Hash) +trait ID +case class UserName(name: String) extends ID +case class Password(hash: Hash) extends ID def help(id: UserName | Password) = val user = id match @@ -22,7 +23,10 @@ Union types are duals of intersection types. `|` is _commutative_: `A | B` is the same type as `B | A`. The compiler will assign a union type to an expression only if such a -type is explicitly given. This can be seen in the following [REPL](https://docs.scala-lang.org/overviews/repl/overview.html) transcript: +type is explicitly given or if the common supertype of all alternatives is [transparent](../other-new-features/transparent-traits.md). + + +This can be seen in the following [REPL](https://docs.scala-lang.org/overviews/repl/overview.html) transcript: ```scala scala> val password = Password(123) @@ -32,15 +36,36 @@ scala> val name = UserName("Eve") val name: UserName = UserName(Eve) scala> if true then name else password -val res2: Object = UserName(Eve) +val res1: ID = UserName(Eve) scala> val either: Password | UserName = if true then name else password -val either: Password | UserName = UserName(Eve) +val either: UserName | Password = UserName(Eve) ``` - -The type of `res2` is `Object & Product`, which is a supertype of -`UserName` and `Password`, but not the least supertype `Password | -UserName`. If we want the least supertype, we have to give it +The type of `res1` is `ID`, which is a supertype of +`UserName` and `Password`, but not the least supertype `UserName | Password`. +If we want the least supertype, we have to give it explicitly, as is done for the type of `either`. +The inference behavior changes if the common supertrait `ID` is declared `transparent`: +```scala +transparent trait ID +``` +In that case the union type is not widened. +```scala +scala> if true then name else password +val res2: UserName | Password = UserName(Eve) +``` +The more precise union type is also inferred if `UserName` and `Password` are declared without an explicit +parent, since in that case their implied superclass is `Object`, which is among the classes that are +assumed to be transparent. See [Transparent Traits and Classes](../other-new-features/transparent-traits.md) +for a list of such classes. +```scala +case class UserName(name: String) +case class Password(hash: Hash) + +scala> if true then UserName("Eve") else Password(123) +val res3: UserName | Password = UserName(Eve) +``` + + [More details](./union-types-spec.md) diff --git a/docs/_docs/reference/other-new-features/control-syntax.md b/docs/_docs/reference/other-new-features/control-syntax.md index 574e53406e27..92204690f0b7 100644 --- a/docs/_docs/reference/other-new-features/control-syntax.md +++ b/docs/_docs/reference/other-new-features/control-syntax.md @@ -41,7 +41,7 @@ The rules in detail are: - A `catch` can be followed by a single case on the same line. If there are multiple cases, these have to appear within braces (just like in Scala 2) or an indented block. -### Rewrites +## Rewrites The Scala 3 compiler can rewrite source code from old syntax to new syntax and back. When invoked with options `-rewrite -new-syntax` it will rewrite from old to new syntax, dropping parentheses and braces in conditions and enumerators. When invoked with options `-rewrite -old-syntax` it will rewrite in the reverse direction, inserting parentheses and braces as needed. diff --git a/docs/_docs/reference/other-new-features/creator-applications.md b/docs/_docs/reference/other-new-features/creator-applications.md index a7b0366210c0..81f09d897955 100644 --- a/docs/_docs/reference/other-new-features/creator-applications.md +++ b/docs/_docs/reference/other-new-features/creator-applications.md @@ -50,7 +50,7 @@ Constructor proxies are also not allowed to shadow normal definitions. That is, if an identifier resolves to a constructor proxy, and the same identifier is also defined or imported in some other scope, an ambiguity is reported. -### Motivation +## Motivation Leaving out `new` hides an implementation detail and makes code more pleasant to read. Even though it requires a new rule, it will likely increase the perceived regularity of the language, since case diff --git a/docs/_docs/reference/other-new-features/experimental-defs.md b/docs/_docs/reference/other-new-features/experimental-defs.md index d110c8bc079b..88815ad1e136 100644 --- a/docs/_docs/reference/other-new-features/experimental-defs.md +++ b/docs/_docs/reference/other-new-features/experimental-defs.md @@ -216,6 +216,7 @@ Experimental definitions can only be referenced in an experimental scope. Experi
Example 1 + ```scala import scala.annotation.experimental @@ -241,6 +242,7 @@ Experimental definitions can only be referenced in an experimental scope. Experi } } ``` +
5. Annotations of an experimental definition are in experimental scopes. Examples: @@ -268,13 +270,6 @@ Can use the `-Yno-experimental` compiler flag to disable it and run as a proper In any other situation, a reference to an experimental definition will cause a compilation error. -## Experimental inheritance - -All subclasses of an experimental `class` or `trait` must be marked as [`@experimental`](https://scala-lang.org/api/3.x/scala/annotation/experimental.html) even if they are in an experimental scope. -Anonymous classes and SAMs of experimental classes are considered experimental. - -We require explicit annotations to make sure we do not have completion or cycles issues with nested classes. This restriction could be relaxed in the future. - ## Experimental overriding For an overriding member `M` and overridden member `O`, if `O` is non-experimental then `M` must be non-experimental. diff --git a/docs/_docs/reference/other-new-features/indentation.md b/docs/_docs/reference/other-new-features/indentation.md index 57c8b8ad2d50..40e2fc6fb38c 100644 --- a/docs/_docs/reference/other-new-features/indentation.md +++ b/docs/_docs/reference/other-new-features/indentation.md @@ -12,7 +12,7 @@ Scala 3 enforces some rules on indentation and allows some occurrences of braces These changes can be turned off with the compiler flag `-no-indent`. -### Indentation Rules +## Indentation Rules The compiler enforces two rules for well-indented programs, flagging violations as warnings. @@ -42,7 +42,7 @@ any restrictions on indentation within expressions, nor do they require that all The rules are generally helpful in pinpointing the root cause of errors related to missing opening or closing braces. These errors are often quite hard to diagnose, in particular in large programs. -### Optional Braces +## Optional Braces The compiler will insert `` or `` tokens at certain line breaks. Grammatically, pairs of `` and `` tokens have the same effect as pairs of braces `{` and `}`. @@ -130,7 +130,7 @@ else d ``` is parsed as `if x then a + b + c else d`. -### Optional Braces Around Template Bodies +## Optional Braces Around Template Bodies The Scala grammar uses the term _template body_ for the definitions of a class, trait, or object that are normally enclosed in braces. The braces around a template body can also be omitted by means of the following rule. @@ -186,11 +186,65 @@ Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> Packaging ::= ‘package’ QualId :<<< TopStats >>> ``` -### Spaces vs Tabs +## Optional Braces for Method Arguments + +Starting with Scala 3.3, a `` token is also recognized where a function argument would be expected. Examples: + +```scala +times(10): + println("ah") + println("ha") +``` + +or + +```scala +credentials `++`: + val file = Path.userHome / ".credentials" + if file.exists + then Seq(Credentials(file)) + else Seq() +``` + +or + +```scala +xs.map: + x => + val y = x - 1 + y * y +``` +What's more, a `:` in these settings can also be followed on the same line by the parameter part and arrow of a lambda. So the last example could be compressed to this: + +```scala +xs.map: x => + val y = x - 1 + y * y +``` +and the following would also be legal: +```scala +xs.foldLeft(0): (x, y) => + x + y +``` + +The grammar changes for optional braces around arguments are as follows. + +``` +SimpleExpr ::= ... + | SimpleExpr ColonArgument +InfixExpr ::= ... + | InfixExpr id ColonArgument +ColonArgument ::= colon [LambdaStart] + indent (CaseClauses | Block) outdent +LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + | HkTypeParamClause ‘=>’ +``` + +## Spaces vs Tabs Indentation prefixes can consist of spaces and/or tabs. Indentation widths are the indentation prefixes themselves, ordered by the string prefix relation. So, so for instance "2 tabs, followed by 4 spaces" is strictly less than "2 tabs, followed by 5 spaces", but "2 tabs, followed by 4 spaces" is incomparable to "6 tabs" or to "4 spaces, followed by 2 tabs". It is an error if the indentation width of some line is incomparable with the indentation width of the region that's current at that point. To avoid such errors, it is a good idea not to mix spaces and tabs in the same source file. -### Indentation and Braces +## Indentation and Braces Indentation can be mixed freely with braces `{...}`, as well as brackets `[...]` and parentheses `(...)`. For interpreting indentation inside such regions, the following rules apply. @@ -224,7 +278,7 @@ statement starting with `val`). (i.e. the indentation width of `y + 1`). - Finally, the indentation width of the last region in parentheses starting with `(x` is 6 (i.e. the indentation width of the indented region following the `=>`. -### Special Treatment of Case Clauses +## Special Treatment of Case Clauses The indentation rules for `match` expressions and `catch` clauses are refined as follows: @@ -247,7 +301,7 @@ case 5 => print("V") println(".") ``` -### Using Indentation to Signal Statement Continuation +## Using Indentation to Signal Statement Continuation Indentation is used in some situations to decide whether to insert a virtual semicolon between two consecutive lines or to treat them as one statement. Virtual semicolon insertion is @@ -279,7 +333,7 @@ The Scala-2 behavior is retained under source `-no-indent` or `-source 3.0-migra -### The End Marker +## The End Marker Indentation-based syntax has many advantages over other conventions. But one possible problem is that it makes it hard to discern when a large indentation region ends, since there is no specific token that delineates the end. Braces are not much better since a brace by itself also contains no information about what region is closed. @@ -364,7 +418,7 @@ package p1.p2: end p2 ``` -#### When to Use End Markers +### When to Use End Markers It is recommended that `end` markers are used for code where the extent of an indentation region is not immediately apparent "at a glance". People will have different preferences what this means, but one can nevertheless give some guidelines that stem from experience. An end marker makes sense if @@ -374,7 +428,7 @@ It is recommended that `end` markers are used for code where the extent of an in If none of these criteria apply, it's often better to not use an end marker since the code will be just as clear and more concise. If there are several ending regions that satisfy one of the criteria above, we usually need an end marker only for the outermost closed region. So cascades of end markers as in the example above are usually better avoided. -#### Syntax +### Syntax ``` EndMarker ::= ‘end’ EndMarkerTag -- when followed by EOL @@ -385,7 +439,7 @@ TemplateStat ::= ... | EndMarker TopStat ::= ... | EndMarker ``` -### Example +## Example Here is a (somewhat meta-circular) example of code using indentation. It provides a concrete representation of indentation widths as defined above together with efficient operations for constructing and comparing indentation widths. @@ -438,7 +492,7 @@ object IndentWidth: end IndentWidth ``` -### Settings and Rewrites +## Settings and Rewrites Significant indentation is enabled by default. It can be turned off by giving any of the options `-no-indent`, `-old-syntax` and `-source 3.0-migration`. If indentation is turned off, it is nevertheless checked that indentation conforms to the logical program structure as defined by braces. If that is not the case, the compiler issues a warning. @@ -448,62 +502,3 @@ indented regions where possible. When invoked with options `-rewrite -no-indent` The `-indent` option only works on [new-style syntax](./control-syntax.md). So to go from old-style syntax to new-style indented code one has to invoke the compiler twice, first with options `-rewrite -new-syntax`, then again with options `-rewrite -indent`. To go in the opposite direction, from indented code to old-style syntax, it's `-rewrite -no-indent`, followed by `-rewrite -old-syntax`. -### Variant: Indentation Marker `:` for Arguments - -Generally, the possible indentation regions coincide with those regions where braces `{...}` are also legal, no matter whether the braces enclose an expression or a set of definitions. There is one exception, though: Arguments to functions can be enclosed in braces but they cannot be simply indented instead. Making indentation always significant for function arguments would be too restrictive and fragile. - -To allow such arguments to be written without braces, a variant of the indentation scheme is implemented under language import -```scala -import language.experimental.fewerBraces -``` -In this variant, a `` token is also recognized where function argument would be expected. Examples: - -```scala -times(10): - println("ah") - println("ha") -``` - -or - -```scala -credentials `++`: - val file = Path.userHome / ".credentials" - if file.exists - then Seq(Credentials(file)) - else Seq() -``` - -or - -```scala -xs.map: - x => - val y = x - 1 - y * y -``` -What's more, a `:` in these settings can also be followed on the same line by the parameter part and arrow of a lambda. So the last example could be compressed to this: - -```scala -xs.map: x => - val y = x - 1 - y * y -``` -and the following would also be legal: -```scala -xs.foldLeft(0): (x, y) => - x + y -``` - -The grammar changes for this variant are as follows. - -``` -SimpleExpr ::= ... - | SimpleExpr ColonArgument -InfixExpr ::= ... - | InfixExpr id ColonArgument -ColonArgument ::= colon [LambdaStart] - indent (CaseClauses | Block) outdent -LambdaStart ::= FunParams (‘=>’ | ‘?=>’) - | HkTypeParamClause ‘=>’ -``` diff --git a/docs/_docs/reference/other-new-features/matchable.md b/docs/_docs/reference/other-new-features/matchable.md index bd0daba189ee..234fdf03220c 100644 --- a/docs/_docs/reference/other-new-features/matchable.md +++ b/docs/_docs/reference/other-new-features/matchable.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/match A new trait [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) controls the ability to pattern match. -### The Problem +## The Problem The Scala 3 standard library has a type [`IArray`](https://scala-lang.org/api/3.x/scala.html#IArray-0) for immutable arrays that is defined like this: @@ -47,7 +47,7 @@ f(imm) Finally, note that the problem is not linked to just [opaque types](opaques.md). No unbounded type parameter or abstract type should be decomposable with a pattern match. -### The Solution +## The Solution There is a new type [`scala.Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) that controls pattern matching. When typing a pattern match of a constructor pattern `C(...)` or a type pattern `_: C` it is required that the selector type conforms @@ -95,7 +95,7 @@ class Object extends Any, Matchable [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) is currently a marker trait without any methods. Over time we might migrate methods `getClass` and `isInstanceOf` to it, since these are closely related to pattern-matching. -### `Matchable` and Universal Equality +## `Matchable` and Universal Equality Methods that pattern-match on selectors of type `Any` will need a cast once the Matchable warning is turned on. The most common such method is the universal diff --git a/docs/_docs/reference/other-new-features/opaques-details.md b/docs/_docs/reference/other-new-features/opaques-details.md index 6506699305ef..87e56e240481 100644 --- a/docs/_docs/reference/other-new-features/opaques-details.md +++ b/docs/_docs/reference/other-new-features/opaques-details.md @@ -52,6 +52,7 @@ def id(x: o.T): o.T = x ``` Opaque type aliases cannot be `private` and cannot be overridden in subclasses. +Opaque type aliases cannot have a context function type as right-hand side. ## Type Parameters of Opaque Types diff --git a/docs/_docs/reference/other-new-features/opaques.md b/docs/_docs/reference/other-new-features/opaques.md index b87d45485a93..567b51098016 100644 --- a/docs/_docs/reference/other-new-features/opaques.md +++ b/docs/_docs/reference/other-new-features/opaques.md @@ -59,7 +59,7 @@ l * 2 // error: found: Int(2), required: Logarithm l / l2 // error: `/` is not a member of Logarithm ``` -### Bounds For Opaque Type Aliases +## Bounds For Opaque Type Aliases Opaque type aliases can also come with bounds. Example: @@ -147,7 +147,7 @@ On the other hand, the call `roItem.rights.isOneOf(ReadWrite)` would give a type `Permissions` and `PermissionChoice` are different, unrelated types outside `Access`. -### Opaque Type Members on Classes +## Opaque Type Members on Classes While typically, opaque types are used together with objects to hide implementation details of a module, they can also be used with classes. For example, we can redefine the above example of Logarithms as a class. @@ -174,6 +174,6 @@ val z = l2(3.1) l1.mul(x, y) // type checks l1.mul(x, z) // error: found l2.Logarithm, required l1.Logarithm ``` -In general, one can think of an opaque type as being only transparent in the scope of `private[this]`. +In general, one can think of an opaque type as being only transparent in the scope of `private[this]` (unless the type is a top level definition - in this case, it's transparent only within the file it's defined in). [More details](opaques-details.md) diff --git a/docs/_docs/reference/other-new-features/targetName.md b/docs/_docs/reference/other-new-features/targetName.md index 1a7ca33f6950..717ce4247a1f 100644 --- a/docs/_docs/reference/other-new-features/targetName.md +++ b/docs/_docs/reference/other-new-features/targetName.md @@ -23,7 +23,7 @@ VecOps.append(vec1, vec2) The [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation has no bearing on Scala usages. Any application of that method in Scala has to use `++=`, not `append`. -### Details +## Details 1. `@targetName` is defined in package `scala.annotation`. It takes a single argument of type `String`. That string is called the _external name_ of the definition @@ -40,7 +40,7 @@ The [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.h 5. Definitions with names in backticks that are not legal host platform names should also have a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation. -### Relationship with Overriding +## Relationship with Overriding [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotations are significant for matching two method definitions to decide whether they conflict or override each other. Two method definitions match if they have the same name, signature, and erased name. Here, diff --git a/docs/_docs/reference/other-new-features/threadUnsafe-annotation.md b/docs/_docs/reference/other-new-features/threadUnsafe-annotation.md index 0068d0a6fe53..ae1af1e4b671 100644 --- a/docs/_docs/reference/other-new-features/threadUnsafe-annotation.md +++ b/docs/_docs/reference/other-new-features/threadUnsafe-annotation.md @@ -8,7 +8,7 @@ A new annotation [`@threadUnsafe`](https://scala-lang.org/api/3.x/scala/annotati a `lazy val`. When this annotation is used, the initialization of the [`lazy val`](../changed-features/lazy-vals-init.md) will use a faster mechanism which is not thread-safe. -### Example +## Example ```scala import scala.annotation.threadUnsafe diff --git a/docs/_docs/reference/other-new-features/trait-parameters.md b/docs/_docs/reference/other-new-features/trait-parameters.md index 1d13574bce03..c704e73ce9b8 100644 --- a/docs/_docs/reference/other-new-features/trait-parameters.md +++ b/docs/_docs/reference/other-new-features/trait-parameters.md @@ -53,7 +53,7 @@ The correct way to write `E` is to extend both `Greeting` and class E extends Greeting("Bob"), FormalGreeting ``` -### Traits With Context Parameters +## Traits With Context Parameters This "explicit extension required" rule is relaxed if the missing trait contains only [context parameters](../contextual/using-clauses.md). In that case the trait reference is diff --git a/docs/_docs/reference/other-new-features/transparent-traits.md b/docs/_docs/reference/other-new-features/transparent-traits.md index 699ce0b9ddd8..b930ffbfde00 100644 --- a/docs/_docs/reference/other-new-features/transparent-traits.md +++ b/docs/_docs/reference/other-new-features/transparent-traits.md @@ -1,6 +1,6 @@ --- layout: doc-page -title: "Transparent Traits" +title: "Transparent Traits and Classes" nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/transparent-traits.html --- @@ -20,12 +20,13 @@ val x = Set(if condition then Val else Var) Here, the inferred type of `x` is `Set[Kind & Product & Serializable]` whereas one would have hoped it to be `Set[Kind]`. The reasoning for this particular type to be inferred is as follows: -- The type of the conditional above is the [union type](../new-types/union-types.md) `Val | Var`. -- A union type is widened in type inference to the least supertype that is not a union type. - In the example, this type is `Kind & Product & Serializable` since all three traits are traits of both `Val` and `Var`. +- The type of the conditional above is the [union type](../new-types/union-types.md) `Val | Var`. This union type is treated as "soft", which means it was not explicitly written in the source program, but came from forming an upper bound of the types of +some alternatives. +- A soft union type is widened in type inference to the least product of class or trait types that is a supertype of the union type. + In the example, this type is `Kind & Product & Serializable` since all three traits are super-traits of both `Val` and `Var`. So that type becomes the inferred element type of the set. -Scala 3 allows one to mark a mixin trait as `transparent`, which means that it can be suppressed in type inference. Here's an example that follows the lines of the code above, but now with a new transparent trait `S` instead of `Product`: +Scala 3 allows one to mark a trait or class as `transparent`, which means that it can be suppressed in type inference. Here's an example that follows the lines of the code above, but now with a new transparent trait `S` instead of `Product`: ```scala transparent trait S @@ -38,13 +39,40 @@ val x = Set(if condition then Val else Var) Now `x` has inferred type `Set[Kind]`. The common transparent trait `S` does not appear in the inferred type. -## Transparent Traits +In the previous example, one could also declare `Kind` as `transparent`: +```scala +transparent trait Kind +``` +The widened union type of `if condition then Val else Var` would then +_only_ contain the transparent traits `Kind` and `S`. In this case, +the widening is not performed at all, so `x` would have type `Set[Val | Var]`. + +The root classes and traits `Any`, `AnyVal`, `Object`, and `Matchable` are +considered to be transparent. This means that an expression such +as +```scala +if condition then 1 else "hello" +``` +will have type `Int | String` instead of the widened type `Any`. + -The traits [`scala.Product`](https://scala-lang.org/api/3.x/scala/Product.html), [`java.io.Serializable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/Serializable.html) and [`java.lang.Comparable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Comparable.html) -are treated automatically as transparent. Other traits are turned into transparent traits using the modifier `transparent`. Scala 2 traits can also be made transparent -by adding a [`@transparentTrait`](https://scala-lang.org/api/3.x/scala/annotation/transparentTrait.html) annotation. This annotation is defined in [`scala.annotation`](https://scala-lang.org/api/3.x/scala/annotation.html). It will be deprecated and phased out once Scala 2/3 interoperability is no longer needed. -Typically, transparent traits are traits +## Which Traits and Classes Are Transparent? + +Traits and classes are declared transparent by adding the modifier `transparent`. Scala 2 traits and classes can also be declared transparent by adding a [`@transparentTrait`](https://scala-lang.org/api/3.x/scala/annotation/transparentTrait.html) annotation. This annotation is defined in [`scala.annotation`](https://scala-lang.org/api/3.x/scala/annotation.html). It will be deprecated and phased out once Scala 2/3 interoperability is no longer needed. + +The following classes and traits are automatically treated as transparent: +```scala + scala.Any + scala.AnyVal + scala.Matchable + scala.Product + java.lang.Object + java.lang.Comparable + java.io.Serializable +``` + +Typically, transparent types other than the root classes are traits that influence the implementation of inheriting classes and traits that are not usually used as types by themselves. Two examples from the standard collection library are: - [`IterableOps`](https://scala-lang.org/api/3.x/scala/collection/IterableOps.html), which provides method implementations for an [`Iterable`](https://scala-lang.org/api/3.x/scala/collection/Iterable.html). @@ -55,7 +83,10 @@ declared transparent. ## Rules for Inference -Transparent traits can be given as explicit types as usual. But they are often elided when types are inferred. Roughly, the rules for type inference say that transparent traits are dropped from intersections where possible. +Transparent traits and classes can be given as explicit types as usual. But they are often elided when types are inferred. Roughly, the rules for type inference imply the following. + + - Transparent traits are dropped from intersections where possible. + - Union types are not widened if widening would result in only transparent supertypes. The precise rules are as follows: @@ -63,8 +94,8 @@ The precise rules are as follows: - where that type is not higher-kinded, - and where `B` is its known upper bound or `Any` if none exists: - If the type inferred so far is of the form `T1 & ... & Tn` where - `n >= 1`, replace the maximal number of transparent `Ti`s by `Any`, while ensuring that + `n >= 1`, replace the maximal number of transparent traits `Ti`s by `Any`, while ensuring that the resulting type is still a subtype of the bound `B`. -- However, do not perform this widening if all transparent traits `Ti` can get replaced in that way. +- However, do not perform this widening if all types `Ti` can get replaced in that way. This clause ensures that a single transparent trait instance such as [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) is not widened to [`Any`](https://scala-lang.org/api/3.x/scala/Any.html). Transparent trait instances are only dropped when they appear in conjunction with some other type. -The last clause ensures that a single transparent trait instance such as [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) is not widened to [`Any`](https://scala-lang.org/api/3.x/scala/Any.html). Transparent trait instances are only dropped when they appear in conjunction with some other type. +- If the original type was a is union type that got widened in a previous step to a product consisting only of transparent traits and classes, keep the original union type instead of its widened form. \ No newline at end of file diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index 2072da165966..7e4b81b1ef5a 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -31,7 +31,7 @@ hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | Informal descriptions are typeset as `“some comment”`. -### Lexical Syntax +## Lexical Syntax The lexical syntax of Scala is given by the following grammar in EBNF form. @@ -249,6 +249,7 @@ Catches ::= ‘catch’ (Expr | ExprCaseClause) PostfixExpr ::= InfixExpr [id] -- only if language.postfixOperators is enabled InfixExpr ::= PrefixExpr | InfixExpr id [nl] InfixExpr + | InfixExpr id ColonArgument | InfixExpr MatchClause MatchClause ::= ‘match’ <<< CaseClauses >>> PrefixExpr ::= [PrefixOperator] SimpleExpr @@ -267,6 +268,11 @@ SimpleExpr ::= SimpleRef | SimpleExpr ‘.’ MatchClause | SimpleExpr TypeArgs | SimpleExpr ArgumentExprs + | SimpleExpr ColonArgument +ColonArgument ::= colon [LambdaStart] + indent (CaseClauses | Block) outdent +LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + | HkTypeParamClause ‘=>’ Quoted ::= ‘'’ ‘{’ Block ‘}’ | ‘'’ ‘[’ Type ‘]’ ExprSplice ::= spliceId -- if inside quoted block @@ -306,7 +312,10 @@ TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } TypeCaseClause ::= ‘case’ (InfixType | ‘_’) ‘=>’ Type [semi] Pattern ::= Pattern1 { ‘|’ Pattern1 } -Pattern1 ::= Pattern2 [‘:’ RefinedType] +Pattern1 ::= PatVar ‘:’ RefinedType + | [‘-’] integerLiteral ‘:’ RefinedType + | [‘-’] floatingPointLiteral ‘:’ RefinedType + | Pattern2 Pattern2 ::= [id ‘@’] InfixPattern [‘*’] InfixPattern ::= SimplePattern { id [nl] SimplePattern } SimplePattern ::= PatVar diff --git a/docs/_layouts/doc-page.html b/docs/_layouts/doc-page.html index 36e806678136..5f88a3301875 100644 --- a/docs/_layouts/doc-page.html +++ b/docs/_layouts/doc-page.html @@ -5,9 +5,7 @@
{% if urls.editSource %} - - Edit this page on GitHub - + Edit this page on GitHub {% endif %}

{{ page.title }}

diff --git a/docs/_layouts/index.html b/docs/_layouts/index.html index 72e3bb609d56..247f916530dd 100644 --- a/docs/_layouts/index.html +++ b/docs/_layouts/index.html @@ -1,15 +1,24 @@ --- layout: static-site-main --- -

{{ page.title }}

- -{{ content }} - -

Table of Contents

- + +
+
+ {% if urls.editSource %} + + {% endif %} +

{{ page.title }}

+
+ + {{ content }} + +

Table of Contents

+ + +
\ No newline at end of file diff --git a/docs/_layouts/main.html b/docs/_layouts/main.html index a22f912e3eef..6adc6cacda46 100644 --- a/docs/_layouts/main.html +++ b/docs/_layouts/main.html @@ -1,9 +1,5 @@ --- layout: base -extraJS: - - js/contributors.js -extraCSS: - - css/content-contributors.css ---
{{ content }}
diff --git a/docs/_layouts/static-site-main.html b/docs/_layouts/static-site-main.html index 618525782626..508cf61efe2d 100644 --- a/docs/_layouts/static-site-main.html +++ b/docs/_layouts/static-site-main.html @@ -4,12 +4,15 @@
- {% if page.movedTo %} + {% if page.nightlyOf %} {% endif %} {{ content }}
@@ -28,7 +31,7 @@
{% endif %} {% if page.next %}
- Next + Next
diff --git a/project/scripts/check-cla.sh b/project/scripts/check-cla.sh index 21efa74eb2eb..1a91363f5079 100755 --- a/project/scripts/check-cla.sh +++ b/project/scripts/check-cla.sh @@ -2,15 +2,19 @@ set -eux echo "Pull request submitted by $AUTHOR"; -signed=$(curl -s https://www.lightbend.com/contribute/cla/scala/check/$AUTHOR | jq -r ".signed"); -if [ "$signed" = "true" ] ; then +if [ "$AUTHOR" = "github-actions[bot]" ] ; then echo "CLA check for $AUTHOR successful"; else - echo "CLA check for $AUTHOR failed"; - echo "Please sign the Scala CLA to contribute to the Scala compiler."; - echo "Go to https://www.lightbend.com/contribute/cla/scala and then"; - echo "comment on the pull request to ask for a new check."; - echo ""; - echo "Check if CLA is signed: https://www.lightbend.com/contribute/cla/scala/check/$AUTHOR"; - exit 1; + signed=$(curl -s "https://www.lightbend.com/contribute/cla/scala/check/$AUTHOR" | jq -r ".signed"); + if [ "$signed" = "true" ] ; then + echo "CLA check for $AUTHOR successful"; + else + echo "CLA check for $AUTHOR failed"; + echo "Please sign the Scala CLA to contribute to the Scala compiler."; + echo "Go to https://www.lightbend.com/contribute/cla/scala and then"; + echo "comment on the pull request to ask for a new check."; + echo ""; + echo "Check if CLA is signed: https://www.lightbend.com/contribute/cla/scala/check/$AUTHOR"; + exit 1; + fi; fi; diff --git a/project/scripts/cmdScaladocTests b/project/scripts/cmdScaladocTests index 343be777cea7..ad97b568e29d 100755 --- a/project/scripts/cmdScaladocTests +++ b/project/scripts/cmdScaladocTests @@ -16,16 +16,16 @@ DOTTY_NONBOOTSTRAPPED_VERSION=$(eval $DOTTY_NONBOOTSTRAPPED_VERSION_COMMAND | ta DOTTY_BOOTSTRAPPED_VERSION_COMMAND="$SBT \"eval println(Build.dottyVersion)\"" DOTTY_BOOTSTRAPPED_VERSION=$(eval $DOTTY_BOOTSTRAPPED_VERSION_COMMAND | tail -n 2 | head -n 1) -GITHUB_REPOSITORY="lampepfl/dotty" -GITHUB_SHA="3.0.0" +SOURCE_LINKS_REPOSITORY="lampepfl/dotty" +SOURCE_LINKS_VERSION="${GITHUB_SHA:-$DOTTY_BOOTSTRAPPED_VERSION}" "$SBT" "scaladoc/generateTestcasesDocumentation" > "$tmp" 2>&1 || echo "generated testcases project with sbt" dist/target/pack/bin/scaladoc \ -d "$OUT1" \ -project "scaladoc testcases" \ -source-links:out/bootstrap/stdlib-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/scala-library-src=github://scala/scala/v"${STDLIB_VERSION}"#src/library \ - -source-links:out/bootstrap/stdlib-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/dotty-library-src=github://"${GITHUB_REPOSITORY}"/"${GITHUB_SHA}"\#library/src \ - -source-links:github://"${GITHUB_REPOSITORY}"/"${GITHUB_SHA}" \ + -source-links:out/bootstrap/stdlib-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/dotty-library-src=github://"${SOURCE_LINKS_REPOSITORY}"/"${SOURCE_LINKS_VERSION}"\#library/src \ + -source-links:github://"${SOURCE_LINKS_REPOSITORY}"/"${SOURCE_LINKS_VERSION}" \ "-external-mappings:.*scala/.*::scaladoc3::https://dotty.epfl.ch/api/,.*java/.*::javadoc::https://docs.oracle.com/javase/8/docs/api/" \ "-skip-by-regex:.+\.internal($|\..+)" \ "-skip-by-regex:.+\.impl($|\..+)" \ @@ -40,6 +40,6 @@ dist/target/pack/bin/scaladoc \ -project-footer "Copyright (c) 2002-2022, LAMP/EPFL" \ -default-template static-site-main \ -author -groups -revision main -project-version "${DOTTY_BOOTSTRAPPED_VERSION}" \ - "-quick-links:Learn::https://docs.scala-lang.org/,Install::https://www.scala-lang.org/download/,Playground::https://scastie.scala-lang.org,Find A Library::https://index.scala-lang.org,Community::https://www.scala-lang.org/community/,Blog::https://www.scala-lang.org/blog/" \ + "-quick-links:Learn::https://docs.scala-lang.org/,Install::https://www.scala-lang.org/download/,Playground::https://scastie.scala-lang.org,Find A Library::https://index.scala-lang.org,Community::https://www.scala-lang.org/community/,Blog::https://www.scala-lang.org/blog/," \ out/bootstrap/scaladoc-testcases/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/classes > "$tmp" 2>&1 || echo "generated testcases project with scripts" diff -rq "$OUT1" "scaladoc/output/testcases" diff --git a/project/scripts/expected-links/reference-expected-links.txt b/project/scripts/expected-links/reference-expected-links.txt index 755aee589e60..a09737574c1d 100644 --- a/project/scripts/expected-links/reference-expected-links.txt +++ b/project/scripts/expected-links/reference-expected-links.txt @@ -40,7 +40,6 @@ ./contextual/type-classes.html ./contextual/using-clauses.html ./docs/reference/other-new-features/named-typeargs.html -./docsScalaLangResources/scaladoc-assets.html ./dropped-features.html ./dropped-features/auto-apply.html ./dropped-features/class-shadowing-spec.html diff --git a/sbt-test/scala2-compat/i16351/app/App.scala b/sbt-test/scala2-compat/i16351/app/App.scala new file mode 100644 index 000000000000..5c152f515ada --- /dev/null +++ b/sbt-test/scala2-compat/i16351/app/App.scala @@ -0,0 +1,8 @@ +package app + +import lib.* + +object App { + def main(args: Array[String]): Unit = + new Lib(Value("Foo"), b = 2) {} +} diff --git a/sbt-test/scala2-compat/i16351/build.sbt b/sbt-test/scala2-compat/i16351/build.sbt new file mode 100644 index 000000000000..433a5e8baddf --- /dev/null +++ b/sbt-test/scala2-compat/i16351/build.sbt @@ -0,0 +1,13 @@ +val scala3Version = sys.props("plugin.scalaVersion") +val scala2Version = sys.props("plugin.scala2Version") + +lazy val lib = project.in(file("lib")) + .settings( + scalaVersion := scala2Version + ) + +lazy val app = project.in(file("app")) + .dependsOn(lib) + .settings( + scalaVersion := scala3Version + ) diff --git a/sbt-test/scala2-compat/i16351/lib/lib.scala b/sbt-test/scala2-compat/i16351/lib/lib.scala new file mode 100644 index 000000000000..cfc3c6c780d9 --- /dev/null +++ b/sbt-test/scala2-compat/i16351/lib/lib.scala @@ -0,0 +1,10 @@ +// Should be compiled with 2.13 +package lib + +class Value(val value: String) + +class Lib( + value: => Value, + a: Int = 0, + b: Int +) diff --git a/sbt-test/scala2-compat/i16351/test b/sbt-test/scala2-compat/i16351/test new file mode 100644 index 000000000000..63092ffa4a03 --- /dev/null +++ b/sbt-test/scala2-compat/i16351/test @@ -0,0 +1 @@ +> app/run diff --git a/scaladoc-js/common/src/code-snippets/CodeSnippets.scala b/scaladoc-js/common/src/code-snippets/CodeSnippets.scala index c857d7d7df98..9ac09c93fb06 100644 --- a/scaladoc-js/common/src/code-snippets/CodeSnippets.scala +++ b/scaladoc-js/common/src/code-snippets/CodeSnippets.scala @@ -147,5 +147,7 @@ class CodeSnippets: ) } - enrichSnippets() + window.addEventListener("dynamicPageLoad", (e: Event) => { + enrichSnippets() + }) diff --git a/scaladoc-js/common/src/utils/html.scala b/scaladoc-js/common/src/utils/html.scala index fd49179752a2..1a7f108a3555 100644 --- a/scaladoc-js/common/src/utils/html.scala +++ b/scaladoc-js/common/src/utils/html.scala @@ -2,7 +2,7 @@ package dotty.tools.scaladoc package utils import scala.scalajs.js -import org.scalajs.dom.{html => domhtml, _} +import org.scalajs.dom.{html as domhtml, *} object HTML { type TagArg = domhtml.Element | Seq[domhtml.Element | String] | String @@ -50,6 +50,11 @@ object HTML { extension (key: String) def :=(value: String): AppliedAttr = (key, value) + def aRaw(content: String): domhtml.Element = + val x = document.createElement("a").asInstanceOf[domhtml.Element] + x.innerHTML = content + x + opaque type AppliedAttr = (String, String) val div = Tag[domhtml.Div]("div") diff --git a/scaladoc-js/contributors/src/Globals.scala b/scaladoc-js/contributors/src/Globals.scala deleted file mode 100644 index 51bbf39ab3a3..000000000000 --- a/scaladoc-js/contributors/src/Globals.scala +++ /dev/null @@ -1,12 +0,0 @@ -package dotty.tools.scaladoc - -import scala.scalajs.js -import scala.scalajs.js.annotation.JSGlobalScope - -@js.native -@JSGlobalScope -object Globals extends js.Object { - val githubContributorsUrl: String = js.native - val githubContributorsFilename: String = js.native -} - diff --git a/scaladoc-js/contributors/src/content-contributors/ContentContributors.scala b/scaladoc-js/contributors/src/content-contributors/ContentContributors.scala index 28b78cef0296..2e10e4fae0fa 100644 --- a/scaladoc-js/contributors/src/content-contributors/ContentContributors.scala +++ b/scaladoc-js/contributors/src/content-contributors/ContentContributors.scala @@ -49,7 +49,10 @@ trait FileChange extends js.Object: class ContentContributors: val indenticonsUrl = "https://github.com/identicons" - def linkForFilename(filename: String) = Globals.githubContributorsUrl + s"/commits?path=$filename" + val htmlElement = window.document.documentElement + def githubContributorsUrl() = htmlElement.getAttribute("data-githubContributorsUrl") + def githubContributorsFilename() = htmlElement.getAttribute("data-githubContributorsFilename") + def linkForFilename(filename: String) = githubContributorsUrl() + s"/commits?path=$filename" def getAuthorsForFilename(filename: String): Future[List[FullAuthor]] = { val link = linkForFilename(filename) Ajax.get(link).map(_.responseText).flatMap { json => @@ -85,29 +88,3 @@ class ContentContributors: .map(_.previous_filename) } } - document.addEventListener("DOMContentLoaded", (e: Event) => { - if js.typeOf(Globals.githubContributorsUrl) != "undefined" && - js.typeOf(Globals.githubContributorsFilename) != "undefined" - then { - getAuthorsForFilename(Globals.githubContributorsFilename.stripPrefix("/")).onComplete { - case Success(authors) => - val maybeDiv = Option(document.getElementById("documentation-contributors")) - maybeDiv.foreach { mdiv => - authors.foreach { case FullAuthor(name, url, imgUrl) => - val inner = div( - img(src := imgUrl)(), - a(href := url)(name) - ) - mdiv.appendChild(inner) - } - - if authors.nonEmpty then - mdiv.asInstanceOf[html.Div].parentElement.classList.toggle("hidden") - } - case Failure(err) => - println(s"Couldn't fetch contributors. $err") - None - } - } - }) - diff --git a/scaladoc-js/main/src/Main.scala b/scaladoc-js/main/src/Main.scala index 924b45400eb0..79dce8dba88c 100644 --- a/scaladoc-js/main/src/Main.scala +++ b/scaladoc-js/main/src/Main.scala @@ -3,8 +3,6 @@ import scala.scalajs.js.annotation._ object Main extends App: Searchbar() - SocialLinks() DropdownHandler() - Ux() TooltipNormalizer() CodeSnippets() diff --git a/scaladoc-js/main/src/searchbar/PageEntry.scala b/scaladoc-js/main/src/searchbar/PageEntry.scala index 3910f721192c..a44647d58814 100644 --- a/scaladoc-js/main/src/searchbar/PageEntry.scala +++ b/scaladoc-js/main/src/searchbar/PageEntry.scala @@ -11,11 +11,13 @@ trait PageEntryJS extends js.Object { val l: String = js.native val e: Boolean = js.native val k: String = js.native + val x: String = js.native } case class PageEntry( fullName: String, description: String, + extraDescription: String, extensionTarget: String, location: String, isLocationExternal: Boolean, @@ -37,6 +39,7 @@ object PageEntry { def apply(jsObj: PageEntryJS): PageEntry = PageEntry( jsObj.t, jsObj.d, + jsObj.x, jsObj.i, jsObj.l, jsObj.e, diff --git a/scaladoc-js/main/src/searchbar/Searchbar.scala b/scaladoc-js/main/src/searchbar/Searchbar.scala index 687888cbae61..c1c66104ccf1 100644 --- a/scaladoc-js/main/src/searchbar/Searchbar.scala +++ b/scaladoc-js/main/src/searchbar/Searchbar.scala @@ -3,7 +3,7 @@ package dotty.tools.scaladoc class Searchbar { val pages = SearchbarGlobals.pages.toList.map(PageEntry.apply) val parser = QueryParser() - val searchEngine = SearchbarEngine(pages) + val searchEngine = PageSearchEngine(pages) val inkuireEngine = InkuireJSSearchEngine() val component = SearchbarComponent(searchEngine, inkuireEngine, parser) } \ No newline at end of file diff --git a/scaladoc-js/main/src/searchbar/SearchbarComponent.scala b/scaladoc-js/main/src/searchbar/SearchbarComponent.scala index 5b6f798b9e0b..010129eb9f59 100644 --- a/scaladoc-js/main/src/searchbar/SearchbarComponent.scala +++ b/scaladoc-js/main/src/searchbar/SearchbarComponent.scala @@ -1,5 +1,7 @@ package dotty.tools.scaladoc +import scala.concurrent.{ Future, ExecutionContext } +import concurrent.ExecutionContext.Implicits.global import utils.HTML._ import scala.scalajs.js.Date @@ -8,20 +10,20 @@ import org.scalajs.dom.ext._ import org.scalajs.dom.html.Input import scala.scalajs.js.timers._ import scala.concurrent.duration.{span => dspan, _} - import scala.util.chaining._ import java.net.URI -class SearchbarComponent(engine: SearchbarEngine, inkuireEngine: InkuireJSSearchEngine, parser: QueryParser): +class SearchbarComponent(engine: PageSearchEngine, inkuireEngine: InkuireJSSearchEngine, parser: QueryParser): val initialChunkSize = 5 val resultsChunkSize = 20 + def pathToRoot() = window.document.documentElement.getAttribute("data-pathToRoot") extension (p: PageEntry) def toHTML(boldChars: Set[Int]) = val location = if (p.isLocationExternal) { p.location } else { - Globals.pathToRoot + p.location + pathToRoot() + p.location } val extensionTargetMessage = if (p.extensionTarget.isEmpty()) { @@ -30,27 +32,32 @@ class SearchbarComponent(engine: SearchbarEngine, inkuireEngine: InkuireJSSearch " extension on " + p.extensionTarget } - div(cls := "scaladoc-searchbar-row mono-small-inline", "result" := "")( - a(href := location)( - p.fullName.zipWithIndex.map((c, i) => if boldChars.contains(i) then b(c.toString) else c.toString), - span(i(extensionTargetMessage)), - span(cls := "pull-right scaladoc-searchbar-location")(p.description) - ).tap { _.onclick = (event: Event) => - if (document.body.contains(rootDiv)) { - document.body.removeChild(rootDiv) - } + a(cls := "scaladoc-searchbar-row mono-small-inline", href := location)( + p.fullName.zipWithIndex.map((c, i) => + if c == ' ' then aRaw(" ") + else if boldChars.contains(i) then b(c.toString) + else c.toString), + span(i(extensionTargetMessage)), + span(cls := "pull-right scaladoc-searchbar-location")(p.description), + if p.extraDescription == "" then "" + else div(cls := "scaladoc-searchbar-extra-info")(p.extraDescription) + ).tap { _.onclick = (event: Event) => + if (document.body.contains(rootDiv)) { + document.body.removeChild(rootDiv) } - ).tap { wrapper => wrapper.addEventListener("mouseover", { + }.tap { wrapper => + wrapper.addEventListener("mouseover", { case e: MouseEvent => handleHover(wrapper) }) } + extension (m: InkuireMatch) def toHTML = val location = if (m.pageLocation(0) == 'e') { m.pageLocation.substring(1) } else { - Globals.pathToRoot + m.pageLocation.substring(1) + pathToRoot() + m.pageLocation.substring(1) } div(cls := "scaladoc-searchbar-row mono-small-inline", "result" := "", "inkuire-result" := "", "mq" := m.mq.toString)( @@ -91,49 +98,82 @@ class SearchbarComponent(engine: SearchbarEngine, inkuireEngine: InkuireJSSearch span(cls := s"micon ${kind.take(2)} $customClass"), span(kind) ) - - def handleNewFluffQuery(matchers: List[Matchers]) = - val result: List[(PageEntry, Set[Int])] = engine.query(matchers) - val fragment = document.createDocumentFragment() - def createLoadMoreElement = - div(cls := "scaladoc-searchbar-row mono-small-inline", "loadmore" := "")( - a( - span("Load more") + def handleNewFluffQuery(query: NameAndKindQuery) = + val searchTask: Future[List[MatchResult]] = Future(engine.query(query)) + searchTask.map { result => + if result.isEmpty then + val noResultsDiv = div(id := "no-results-container")( + div(cls := "no-result-icon"), + h2(cls := "h200 no-result-header")("No results match your filter criteria."), + p(cls := "body-small no-result-content")("Try adjusting or clearing your filters", p("to display better result")), + button(id := "searchbar-clear-button", cls := "clearButton label-only-button")("Clear all filters").tap(_.addEventListener("click", _ => { + inputElem.value = "" + inputElem.dispatchEvent(new Event("input")) + })) ) - ).tap { loadMoreElement => loadMoreElement - .addEventListener("mouseover", _ => handleHover(loadMoreElement)) - } - - result.groupBy(_._1.kind).map { - case (kind, entries) => - val kindSeparator = createKindSeparator(kind) - val htmlEntries = entries.map((p, set) => p.toHTML(set)) - val loadMoreElement = createLoadMoreElement - def loadMoreResults(entries: List[raw.HTMLElement]): Unit = { - loadMoreElement.onclick = (event: Event) => { - entries.take(resultsChunkSize).foreach(_.classList.remove("hidden")) - val nextElems = entries.drop(resultsChunkSize) - if nextElems.nonEmpty then loadMoreResults(nextElems) else loadMoreElement.classList.add("hidden") + resultsDiv.scrollTop = 0 + resultsDiv.appendChild(noResultsDiv) + else + val resultWithDocBonus = result + .map(entry => + // add bonus score for static pages when in documentation section + if entry.pageEntry.kind == "static" && !window.location.href.contains("api") then + entry.copy(score = entry.score + 7) + else entry + ) + val fragment = document.createDocumentFragment() + + def createLoadMoreElement = + div(cls := "scaladoc-searchbar-row mono-small-inline", "loadmore" := "")( + a( + span("Load more") + ) + ).tap { loadMoreElement => + loadMoreElement + .addEventListener("mouseover", _ => handleHover(loadMoreElement)) } + + val groupedResults = resultWithDocBonus.groupBy(_.pageEntry.kind) + val groupedResultsSortedByScore = groupedResults.map { + case (kind, results) => (kind, results.maxByOption(_.score).map(_.score), results) + }.toList.sortBy { + case (_, topScore, _) => -topScore.getOrElse(0) + }.map { + case (kind, _, results) => (kind, results.take(40)) // limit to 40 results per category } - fragment.appendChild(kindSeparator) - htmlEntries.foreach(fragment.appendChild) - fragment.appendChild(loadMoreElement) + groupedResultsSortedByScore.map { + case (kind, results) => + val kindSeparator = createKindSeparator(kind) + val htmlEntries = results.map(result => result.pageEntry.toHTML(result.indices)) + val loadMoreElement = createLoadMoreElement + + def loadMoreResults(entries: List[raw.HTMLElement]): Unit = { + loadMoreElement.onclick = (event: Event) => { + entries.take(resultsChunkSize).foreach(_.classList.remove("hidden")) + val nextElems = entries.drop(resultsChunkSize) + if nextElems.nonEmpty then loadMoreResults(nextElems) else loadMoreElement.classList.add("hidden") + } + } + + fragment.appendChild(kindSeparator) + htmlEntries.foreach(fragment.appendChild) + fragment.appendChild(loadMoreElement) + + val nextElems = htmlEntries.drop(initialChunkSize) + if nextElems.nonEmpty then { + nextElems.foreach(_.classList.add("hidden")) + loadMoreResults(nextElems) + } else { + loadMoreElement.classList.add("hidden") + } - val nextElems = htmlEntries.drop(initialChunkSize) - if nextElems.nonEmpty then { - nextElems.foreach(_.classList.add("hidden")) - loadMoreResults(nextElems) - } else { - loadMoreElement.classList.add("hidden") } + resultsDiv.scrollTop = 0 + resultsDiv.appendChild(fragment) } - resultsDiv.scrollTop = 0 - resultsDiv.appendChild(fragment) - def handleRecentQueries(query: String) = { val recentQueries = RecentQueryStorage.getData if query != "" then RecentQueryStorage.addEntry(RecentQuery(query, Date.now())) @@ -167,13 +207,13 @@ class SearchbarComponent(engine: SearchbarEngine, inkuireEngine: InkuireJSSearch resultsDiv.scrollTop = 0 resultsDiv.onscroll = (event: Event) => { } val fragment = document.createDocumentFragment() - timeoutHandle = setTimeout(600.millisecond) { + timeoutHandle = setTimeout(300.millisecond) { clearResults() handleRecentQueries(query) parser.parse(query) match { - case EngineMatchersQuery(matchers) => - handleNewFluffQuery(matchers) - case BySignature(signature) => + case query: NameAndKindQuery => + handleNewFluffQuery(query) + case SignatureQuery(signature) => val loading = createLoadingAnimation val kindSeparator = createKindSeparator("inkuire") resultsDiv.appendChild(loading) @@ -257,7 +297,11 @@ class SearchbarComponent(engine: SearchbarEngine, inkuireEngine: InkuireJSSearch inputContainer, resultsDiv ).tap { elem => - elem.addEventListener("mousedown", (e: Event) => e.stopPropagation()) + elem.addEventListener("mousedown", (e: Event) => + val evTargetId = e.target.asInstanceOf[html.Element].id + + if evTargetId != "scaladoc-searchbar" then + e.stopPropagation()) elem.addEventListener("keydown", { case e: KeyboardEvent => if e.keyCode == 40 then handleArrowDown() @@ -270,8 +314,8 @@ class SearchbarComponent(engine: SearchbarEngine, inkuireEngine: InkuireJSSearch val searchbarFooter = div(id := "searchbar-footer", cls := "body-small")( span(cls := "searchbar-footer-left-container")( span("Smart search:"), - span(b("CC "), "to find CamcelCase phrases"), - span(b("A=>B "), "to find CamcelCase signatures"), + span(b("CC "), "to find CamelCase phrases"), + span(b("A=>B "), "to find CamelCase signatures"), ), span(cls := "searchbar-footer-right-container")( span(b("Esc "), "to close"), diff --git a/scaladoc-js/main/src/searchbar/engine/InkuireJSSearchEngine.scala b/scaladoc-js/main/src/searchbar/engine/InkuireJSSearchEngine.scala index f3e7cfd52417..2dde7178e3f7 100644 --- a/scaladoc-js/main/src/searchbar/engine/InkuireJSSearchEngine.scala +++ b/scaladoc-js/main/src/searchbar/engine/InkuireJSSearchEngine.scala @@ -45,4 +45,4 @@ class InkuireJSSearchEngine { worker.postMessage(s) } -} \ No newline at end of file +} diff --git a/scaladoc-js/main/src/searchbar/engine/Matchers.scala b/scaladoc-js/main/src/searchbar/engine/Matchers.scala index 944578b43fa0..8b78644d95fc 100644 --- a/scaladoc-js/main/src/searchbar/engine/Matchers.scala +++ b/scaladoc-js/main/src/searchbar/engine/Matchers.scala @@ -1,29 +1,8 @@ package dotty.tools.scaladoc -sealed trait EngineQuery -case class EngineMatchersQuery(matchers: List[Matchers]) extends EngineQuery -case class BySignature(signature: String) extends EngineQuery - -case class Match(priority: Int, matchedIndexes: Set[Int]) // matchedIndexes - indexes of chars that got matched +case class MatchResult(score: Int, pageEntry: PageEntry, indices: Set[Int]) -sealed trait Matchers extends Function1[PageEntry, Match] - -case class ByName(query: String) extends Matchers: - val tokens = StringUtils.createCamelCaseTokens(query) - def apply(p: PageEntry): Match = { - val nameOption = Option(p.shortName.toLowerCase) - //Edge case for empty query string - if query == "" then Match(1, Set.empty) - else { - val (result, indexes) = p.shortName.toLowerCase.zipWithIndex.foldLeft((query.toLowerCase, Set.empty[Int])) { - case ((pattern, indexes), (nextChar, index)) => - if !pattern.isEmpty then { - if pattern.head.toString.equalsIgnoreCase(nextChar.toString) then (pattern.tail, indexes + index) else (pattern, indexes) - } else ("", indexes) - } - if result.isEmpty then Match(p.shortName.size - query.size + 1, indexes) else Match(-1, Set.empty) - } - } +sealed trait EngineQuery +case class NameAndKindQuery(name: Option[String], kind: Option[String]) extends EngineQuery -case class ByKind(kind: String) extends Matchers: - def apply(p: PageEntry): Match = Match(if p.kind.equalsIgnoreCase(kind) then 1 else -1, Set.empty) +case class SignatureQuery(signature: String) extends EngineQuery diff --git a/scaladoc-js/main/src/searchbar/engine/PageSearchEngine.scala b/scaladoc-js/main/src/searchbar/engine/PageSearchEngine.scala new file mode 100644 index 000000000000..c6af1d1f1c29 --- /dev/null +++ b/scaladoc-js/main/src/searchbar/engine/PageSearchEngine.scala @@ -0,0 +1,183 @@ +package dotty.tools.scaladoc + +import scala.concurrent.{ Future, ExecutionContext } +import concurrent.ExecutionContext.Implicits.global +import math.Ordering.Implicits.seqOrdering +import org.scalajs.dom.Node + +import scala.annotation.tailrec + +/** + * TODO: + * - Prematcher simple scoring + * - Test first token score + * - Maybe matcher for len > 1? + * - Fix kinds (class List) + * - Search for docs in Docs, for classes/etc. in Api. + * - Write tests! Lists of pages and assert ordering. + * - Optimize. + */ +class PageSearchEngine(pages: List[PageEntry]): + + def query(query: NameAndKindQuery): List[MatchResult] = { + matchPages(query) + .filter { + case MatchResult(score, _, _) => score >= 0 + } + .sortBy { + case MatchResult(score, _, _) => -score + } + } + + private def kindScoreBonus(kind: String): Int = kind match { + case "class" => 5 + case "object" | "enum" => 4 + case "trait" => 3 + case "def" | "val" | "given" | "type" => 1 + case _ => 0 + } + + private val positionScores = List(8,4,2,1).orElse(PartialFunction.fromFunction(_ => 0)) + + private def matchCompletnessBonus(nameCharacters: Int, matchCharacters: Int): Int = + (matchCharacters * 6) / nameCharacters + + (if nameCharacters == matchCharacters then 2 else 0) + + private def matchPages(query: NameAndKindQuery): List[MatchResult] = query match + case NameAndKindQuery(None, None) => List.empty + case NameAndKindQuery(None, Some(kind)) => + filterKind(pages, kind) + .map(MatchResult(1, _, Set.empty)) + case NameAndKindQuery(Some(""), kind) => + kind.fold(pages)(filterKind(pages, _)) + .map(MatchResult(1, _, Set.empty)) + case NameAndKindQuery(Some(nameSearch), kind) => + val kindFiltered = kind.fold(pages)(filterKind(pages, _)) + val prematchedPages = prematchPages(kindFiltered, nameSearch) + + if nameSearch.length > 1 then + prematchedPages.map { prematched => + val finalMatch = matchPage(prematched, nameSearch) + val bonusScore = kindScoreBonus(prematched.pageEntry.kind) + + matchCompletnessBonus(prematched.pageEntry.shortName.length, nameSearch.length) + finalMatch.copy(score = finalMatch.score + bonusScore) + } + else prematchedPages + + private def filterKind(pages: List[PageEntry], kind: String): List[PageEntry] = + pages.filter(_.kind == kind) + + def prematchPages(pages: List[PageEntry], search: String): List[MatchResult] = + pages.map(prematchPage(_, search)).filter(_.indices.nonEmpty) + + private def prematchPage(page: PageEntry, search: String): MatchResult = + val pageName = page.shortName + @tailrec + def prematchPageAcc(nameIndex: Int, searchIndex: Int, acc: Set[Int], scoreAcc: Int, consecutiveMatches: Int): MatchResult = + if searchIndex >= search.length then + MatchResult(scoreAcc, page, acc) + else if nameIndex >= pageName.length then + MatchResult(0, page, Set.empty) + else if pageName(nameIndex).toLower == search(searchIndex).toLower then + val score = (if consecutiveMatches > 0 then 1 else 0) + positionScores(nameIndex) + prematchPageAcc(nameIndex + 1, searchIndex + 1, acc + nameIndex, scoreAcc + score, consecutiveMatches + 1) + else + prematchPageAcc(nameIndex + 1, searchIndex, acc, scoreAcc, 0) + + val result = prematchPageAcc(0, 0, Set.empty, 0, 0) + result.copy(score = result.score + kindScoreBonus(page.kind)) + + private def matchPage(prematched: MatchResult, nameSearch: String): MatchResult = + val searchTokens: List[List[Char]] = StringUtils.createCamelCaseTokens(nameSearch).map(_.toList) //todo extract + val pageTokens: List[List[Char]] = prematched.pageEntry.tokens.map(_.toList) + val pageName = prematched.pageEntry.shortName + val searchTokensLifted = searchTokens.lift + val pageTokensLifted = pageTokens.lift + + @tailrec + def matchTokens(searchTokenIndex: Int, pageTokenIndex: Int, acc: Set[(Int, Int)]): Set[(Int, Int)] = + (searchTokensLifted(searchTokenIndex).map(_.toList), pageTokensLifted(pageTokenIndex).map(_.toList)) match + case (None, _) | (_, None) => acc + case (Some(searchHead :: _), Some(pageHead :: _)) => + if searchHead == pageHead then + matchTokens(searchTokenIndex + 1, pageTokenIndex + 1, acc + ((searchTokenIndex, pageTokenIndex))) + else + matchTokens(searchTokenIndex, pageTokenIndex + 1, acc) + // empty tokens edge cases + case (Some(_), Some(_ :: _)) => matchTokens(searchTokenIndex + 1, pageTokenIndex, acc) + case (Some(_ :: _), Some(_)) => matchTokens(searchTokenIndex, pageTokenIndex + 1, acc) + case _ => matchTokens(searchTokenIndex + 1, pageTokenIndex + 1, acc) + end matchTokens + + val matchedTokens = matchTokens(0, 0, Set.empty) + val searchTokenPositions = searchTokens.map(_.length).scanLeft(0)(_ + _) + val pageTokensPositions = pageTokens.map(_.length).scanLeft(0)(_ + _) + + @tailrec + def findHighScoreMatch( + searchTokenIndex: Int, + searchPosition: Int, + pageTokenIndex: Int, + pagePosition: Int, + positionAcc: Set[Int], + scoreAcc: Int, + consecutiveMatches: Int + ): Option[MatchResult] = + if searchPosition >= nameSearch.length then + Some(MatchResult(scoreAcc, prematched.pageEntry, positionAcc)) + else if pagePosition >= pageName.length then + None + else + val currentSearchTokenStart = searchTokenPositions(searchTokenIndex) + val matchingPageToken = matchedTokens.find(_._1 == searchTokenIndex).map(_._2) + val searchChar = nameSearch.charAt(searchPosition).toLower + val pageChar = pageName.charAt(pagePosition).toLower + + def recalculateTokenIndex(tokenPositions: Seq[Int], previousIndex: Int, position: Int): Int = + if tokenPositions.length <= previousIndex + 1 || tokenPositions(previousIndex + 1) > position then + previousIndex + else + previousIndex + 1 + + def getMatchScore(matchedPagePosition: Int, matchedPageTokenStart: Int): Int = + val consecutiveMatchesScore = if consecutiveMatches > 0 then 1 else 0 + val matchPositionScore = positionScores(matchedPagePosition - matchedPageTokenStart) + val firstTokenScore = if matchPositionScore > 0 && matchedPageTokenStart == 0 then 3 else 0 + consecutiveMatchesScore + matchPositionScore + firstTokenScore + + matchingPageToken match + case Some(matchingToken) if searchPosition == currentSearchTokenStart => + val matchedTokenPosition = pageTokensPositions(matchingToken) + findHighScoreMatch( + recalculateTokenIndex(searchTokenPositions, searchTokenIndex, searchPosition + 1), + searchPosition + 1, + recalculateTokenIndex(pageTokensPositions, pageTokenIndex, matchedTokenPosition + 1), + matchedTokenPosition + 1, + positionAcc + matchedTokenPosition, + scoreAcc + getMatchScore(matchedTokenPosition, matchedTokenPosition), + consecutiveMatches + 1 + ) + case _ if searchChar == pageChar => + val matchedTokenPosition = matchingPageToken.map(pageTokensPositions).getOrElse(0) + findHighScoreMatch( + recalculateTokenIndex(searchTokenPositions, searchTokenIndex, searchPosition + 1), + searchPosition + 1, + recalculateTokenIndex(pageTokensPositions, pageTokenIndex, pagePosition + 1), + pagePosition + 1, + positionAcc + pagePosition, + scoreAcc + getMatchScore(pagePosition, matchedTokenPosition), + consecutiveMatches + 1 + ) + case _ => + findHighScoreMatch( + searchTokenIndex, + searchPosition, + recalculateTokenIndex(pageTokensPositions, pageTokenIndex, pagePosition + 1), + pagePosition + 1, + positionAcc, + scoreAcc, + 0 + ) + + val highScoreMatch = findHighScoreMatch(0, 0, 0, 0, Set.empty, 0, 0) + highScoreMatch.getOrElse(prematched) diff --git a/scaladoc-js/main/src/searchbar/engine/QueryParser.scala b/scaladoc-js/main/src/searchbar/engine/QueryParser.scala index c03becb0026e..59d198e4804d 100644 --- a/scaladoc-js/main/src/searchbar/engine/QueryParser.scala +++ b/scaladoc-js/main/src/searchbar/engine/QueryParser.scala @@ -17,18 +17,19 @@ class QueryParser: "type" ) val kindRegex = ("(?i)" + kinds.mkString("(","|",")") + " (.*)").r - val restRegex = raw"(.*)".r + val nameRegex = raw"(.*)".r val escapedRegex = raw"`(.*)`".r val signatureRegex = raw"(.*=>.*)".r - def parseMatchers(query: String): List[Matchers] = query match { - case escapedRegex(rest) => List(ByName(rest)) - case kindRegex(kind, rest) => List(ByKind(kind)) ++ parseMatchers(rest) - case restRegex(name) => List(ByName(name)) - case _ => List() + def parseMatchers(query: String): EngineQuery = query match { + case escapedRegex(rest) => NameAndKindQuery(Some(rest), None) + case kindRegex(kind, rest) => NameAndKindQuery(Some(rest), Some(kind)) + case nameRegex(name) => NameAndKindQuery(Some(name), None) + case _ => NameAndKindQuery(None, None) } def parse(query: String): EngineQuery = query match { - case signatureRegex(signature) => BySignature(signature) - case other => EngineMatchersQuery(parseMatchers(other)) - } \ No newline at end of file + case signatureRegex(signature) => SignatureQuery(signature) + case other => parseMatchers(other) + } + \ No newline at end of file diff --git a/scaladoc-js/main/src/searchbar/engine/SearchbarEngine.scala b/scaladoc-js/main/src/searchbar/engine/SearchbarEngine.scala deleted file mode 100644 index 95b3eab48811..000000000000 --- a/scaladoc-js/main/src/searchbar/engine/SearchbarEngine.scala +++ /dev/null @@ -1,20 +0,0 @@ -package dotty.tools.scaladoc - -import math.Ordering.Implicits.seqOrdering -import org.scalajs.dom.Node - -class SearchbarEngine(pages: List[PageEntry]): - def query(query: List[Matchers]): List[(PageEntry, Set[Int])] = - pages - .map( page => - page -> query.map(matcher => matcher(page)) - ) - .filterNot { - case (page, matchResults) => matchResults.map(_.priority).exists(_ < 0) - } - .sortBy { - case (page, matchResults) => matchResults.map(_.priority) - } - .map { - case (page, matchResults) => page -> matchResults.map(_.matchedIndexes).reduceLeft(_ ++ _) - } diff --git a/scaladoc-js/main/src/searchbar/recent/recentQueries.scala b/scaladoc-js/main/src/searchbar/recent/recentQueries.scala index 04e966bb4774..c3bb96fa195d 100644 --- a/scaladoc-js/main/src/searchbar/recent/recentQueries.scala +++ b/scaladoc-js/main/src/searchbar/recent/recentQueries.scala @@ -5,10 +5,11 @@ import scala.scalajs.js class RecentQuery(val query: String, val timestamp: Double) extends js.Object object RecentQueryStorage extends SafeLocalStorage[js.Array[RecentQuery]]("__RECENT__QUERIES__", js.Array()) { - val maxEntries = 5 + val maxEntries = 3 def addEntry(rq: RecentQuery): Unit = { - val newData = getData :+ rq - setData(newData.sortBy(_.timestamp).reverse.distinctBy(_.query).take(maxEntries)) + if !getData.exists(_.query.contains(rq.query)) then + val newData = getData.filter(q => !rq.query.contains(q.query)) :+ rq + setData(newData.sortBy(_.timestamp).reverse.distinctBy(_.query).take(maxEntries)) } } diff --git a/scaladoc-js/main/src/social-links/SocialLinks.scala b/scaladoc-js/main/src/social-links/SocialLinks.scala deleted file mode 100644 index 3e33e4065955..000000000000 --- a/scaladoc-js/main/src/social-links/SocialLinks.scala +++ /dev/null @@ -1,14 +0,0 @@ -package dotty.tools.scaladoc - -import org.scalajs.dom._ -import org.scalajs.dom.ext._ - -import utils.HTML._ - -class SocialLinks: - def addIcon(elem: html.Element) = - elem.appendChild( - img(src := s"${Globals.pathToRoot}images/${elem.getAttribute("data-icon-path")}")() - ) - - document.querySelectorAll(".social-icon").collect { case e: html.Element => e }.foreach(addIcon) diff --git a/scaladoc-js/main/src/ux/Ux.scala b/scaladoc-js/main/src/ux/Ux.scala deleted file mode 100644 index fc8867508777..000000000000 --- a/scaladoc-js/main/src/ux/Ux.scala +++ /dev/null @@ -1,51 +0,0 @@ -package dotty.tools.scaladoc - -import scala.scalajs.js -import org.scalajs.dom._ -import org.scalajs.dom.ext._ - -import scala.util.matching.Regex._ -import scala.util.matching._ - -class Ux(): - def sideMenuItemsWordBreaking(): Unit = - val matchingRegex = raw"([.A-Z])".r - - def modifySpan = (span: html.Span) => { - val textNodes = span.childNodes.filter(_.nodeType == 3) - val texts = textNodes.map(_.nodeValue).mkString - span.innerHTML = matchingRegex.replaceAllIn(texts, m => s"${m.group(0)}") - } - - val nodes = document.querySelectorAll("#sideMenu2 a span").collect { - case e: html.Span => e - }.foreach(modifySpan) - - def loadConciseView(): Unit = - val localStorageValue = SafeLocalStorage("__CONCISE_VIEW__", js.Array(false)) // One-element js.Array is a hack for having type extending js.Any - val conciseViewSwitchInput = Option(document.getElementById("concise-view-switch")) - .map(_.querySelector("input").asInstanceOf[html.Input]) - - def modifyContent(concise: Boolean) = - if (concise) { - document.querySelector(".membersList").classList.add("concise") - } else { - document.querySelector(".membersList").classList.remove("concise") - } - - conciseViewSwitchInput.foreach { input => - val storedValue = localStorageValue.getData.head - modifyContent(storedValue) - input.checked = storedValue - input.addEventListener("change", e => { - val target = e.target.asInstanceOf[html.Input] - localStorageValue.setData(js.Array(target.checked)) - modifyContent(target.checked) - }) - } - - sideMenuItemsWordBreaking() - loadConciseView() - - - diff --git a/scaladoc-js/main/test/dotty/tools/scaladoc/MatchersTest.scala b/scaladoc-js/main/test/dotty/tools/scaladoc/MatchersTest.scala deleted file mode 100644 index 20952fedf675..000000000000 --- a/scaladoc-js/main/test/dotty/tools/scaladoc/MatchersTest.scala +++ /dev/null @@ -1,82 +0,0 @@ -package dotty.tools.scaladoc - -import org.junit.{Test, Assert} -import org.junit.Assert._ - -class MatchersTest: - private val kinds = Seq( - "class", - "trait", - "enum", - "object", - "def", - "val", - "var", - "package", - "given", - "type" - ) - private val names = Seq( - "NullPointerException", - "NPException", - "Seq", - "SeqOps", - "writeBytes", - "lessOrEqual", - "testFuzzySearch1", - "testF", - "testFS" - ) - private val pages = for { - kind <- kinds - name <- names - } yield PageEntry( - s"$kind $name", - "", - "", - "", - false, - s"$name", - kind, - StringUtils.createCamelCaseTokens(name) - ) - - private def result(matchers: List[Matchers]) = { - pages.map { p => - p -> matchers.map(_(p)) - }.filterNot { (page, results) => - results.exists(r => r.priority == -1) - }.map((page, results) => page) - } - - @Test - def testByKind = kinds.foreach { kind => - val res = result(List(ByKind(kind))) - val expected = pages.filter(p => p.fullName.startsWith(kind)).toSet - assertEquals( - s"Matchers test error: for kind: $kind should match $expected but matched $res", - expected, - res.toSet - ) - } - - private def byNameTestCase(query: String, expectedMatch: String*) = expectedMatch.foreach { expMatch => - assertTrue( - s"Matchers test error: for query: $query expected $expMatch", - result(List(ByName(query))).exists(p => p.shortName.contains(expMatch)) - ) - } - - @Test - def testByName = { - names.foreach(n => byNameTestCase(n, n)) - byNameTestCase("NPE", "NPException", "NullPointerException") - byNameTestCase("NullPE", "NullPointerException") - byNameTestCase("tFuzzS", "testFuzzySearch1") - byNameTestCase("SO", "SeqOps") - byNameTestCase("teFS", "testFS") - byNameTestCase("writeBy", "writeBytes") - byNameTestCase("seQ", "Seq") - byNameTestCase("lOrEqu", "lessOrEqual") - byNameTestCase("teF", "testFS", "testF") - } diff --git a/scaladoc-js/main/test/dotty/tools/scaladoc/PageSearchEngineTest.scala b/scaladoc-js/main/test/dotty/tools/scaladoc/PageSearchEngineTest.scala new file mode 100644 index 000000000000..7b2fa3ab9571 --- /dev/null +++ b/scaladoc-js/main/test/dotty/tools/scaladoc/PageSearchEngineTest.scala @@ -0,0 +1,153 @@ +package dotty.tools.scaladoc + +import org.junit.{Assert, Test} +import org.junit.Assert.* + +import scala.concurrent.Await +import scala.concurrent.duration.* + +class PageSearchEngineTest { + + def page(kind: String, name: String) = PageEntry( + s"$kind $name", + "", + "", + "", + "", + false, + s"$name", + kind, + StringUtils.createCamelCaseTokens(name) + ) + + case class ExpectedMatch(kind: String, name: String, indices: Set[Int]) + def assertMatches(query: NameAndKindQuery, pages: List[PageEntry], matches: List[String]): Unit = + val expectedMatches = matches.map { mat => + val splitResult = mat.split(" ") + val kind = splitResult(0) + val name = splitResult.tail.mkString(" ") + val (realName, indices, _) = name.foldLeft[(String, Set[Int], Boolean)]("", Set.empty, false) { + case ((name, matchIndices, inParam), c) => + val index = name.length + if c == '(' then + if inParam then + throw new IllegalArgumentException("Nested params not allowed") + else + (name, matchIndices, true) + else if c == ')' then + (name, matchIndices, false) + else if inParam then + (name + c, matchIndices + index, true) + else + (name + c, matchIndices, false) + } + ExpectedMatch(kind, realName, indices) + } + val engine = new PageSearchEngine(pages) + val resultingMatches = engine.query(query) + .map(mat => ExpectedMatch(mat.pageEntry.kind, mat.pageEntry.shortName, mat.indices)) + + val matchesNames = resultingMatches.map(s => (s.name, s.kind)) + val expectedNames = expectedMatches.map(s => (s.name, s.kind)) + val missingNames = expectedNames.diff(matchesNames) + val extraNames = matchesNames.diff(expectedNames) + val itemsNotMatchingNames = (resultingMatches.diff(expectedMatches) ++ expectedMatches.diff(resultingMatches)) + .filter(m => !(missingNames ++ extraNames).contains((m.name, m.kind))).map(s => (s.name, s.kind)) + val itemsNotMatching = itemsNotMatchingNames.map { + case pair @ (itemName, itemKind) => + val expectedItem: ExpectedMatch = expectedMatches.find(s => (s.name, s.kind) == pair).get + val matchedItem: ExpectedMatch = resultingMatches.find(s => (s.name, s.kind) == pair).get + s"${itemKind} ${itemName}: ${expectedItem.indices.toList.sorted.mkString("[", ", ", "]")} vs ${matchedItem.indices.toList.sorted.mkString("[", ", ", "]")}" + }.mkString("\n") + + assertTrue( + s"\nFound: ${matchesNames.mkString("[", ", ", "]")} \n" + + s"Expected: ${expectedNames.mkString("[", ", ", "]")} \n" + + s"Extra elements: ${extraNames.mkString(", ")} \n" + + s"Missing elements: ${missingNames.mkString(", ")}\n" + + s"Not matching items: \n${itemsNotMatching}\n", + resultingMatches == expectedMatches + ) + + + private val correctFilterPages = List( + page("class", "ListBuffer"), + page("object", "ListBuffer"), + page("class", "ListBuff"), + page("class", "LisBfufer"), + page("class", "ListBufferTwo"), + page("class", "ListerBuffer") + ) + @Test + def correctFilter(): Unit = { + assertMatches( + NameAndKindQuery(Some("ListBuffer"), Some("class")), + correctFilterPages, + List( + "class (ListBuffer)", + "class (List)er(Buffer)", + "class (ListBuffer)Two", + ) + ) + } + + private val abbrevFilterPages = List( + page("class", "NullPointerException"), + page("class", "NullBointerException"), + page("class", "NullBpointerException"), + page("class", "nullpointerexception"), + ) + @Test + def abbrevFilter(): Unit = { + assertMatches( + NameAndKindQuery(Some("NPE"), Some("class")), + abbrevFilterPages, + List( + "class (N)ull(P)ointer(E)xception", + "class (N)ullB(p)oint(e)rException", + "class (n)ull(p)oint(e)rexception", + ) + ) + } + + private val correctOrderPages = List( + page("class", "ListBuffer"), + page("object", "ListBuffer"), + page("static", "Using List Buffers"), + page("class", "ListUnbucle"), + page("object", "Malibu") + ) + @Test + def correctOrder(): Unit = { + assertMatches( + NameAndKindQuery(Some("LiBu"), None), + correctOrderPages, + List( + "class (Li)st(Bu)ffer", + "object (Li)st(Bu)ffer", + "static Using (Li)st (Bu)ffers", + "class (Li)stUn(bu)cle", + "object Ma(libu)" + ) + ) + } + + private val correctSelectionPages = List( + page("class", "FoobarBar"), + page("class", "FooBbar"), + page("class", "FobaroBar") + ) + + @Test + def correctSelection(): Unit = { + assertMatches( + NameAndKindQuery(Some("FooBar"), None), + correctSelectionPages, + List( + "class (Foo)bar(Bar)", + "class (FooB)b(ar)", + "class (Fo)bar(oBar)" + ) + ) + } +} diff --git a/scaladoc-js/main/test/dotty/tools/scaladoc/QueryParserTest.scala b/scaladoc-js/main/test/dotty/tools/scaladoc/QueryParserTest.scala index 8a79a28abdd1..414fa198a86e 100644 --- a/scaladoc-js/main/test/dotty/tools/scaladoc/QueryParserTest.scala +++ b/scaladoc-js/main/test/dotty/tools/scaladoc/QueryParserTest.scala @@ -28,8 +28,8 @@ class QueryParserTest: @Test def queryParserTests() = { - kinds.foreach(k => testCase(s"$k ", EngineMatchersQuery(List(ByKind(k), ByName(""))))) - testCase("trait", EngineMatchersQuery(List(ByName("trait")))) - testCase("trait A", EngineMatchersQuery(List(ByKind("trait"), ByName("A")))) - testCase("`trait A`", EngineMatchersQuery(List(ByName("trait A")))) + kinds.foreach(k => testCase(s"$k ", NameAndKindQuery(Some(""), Some(k)))) + testCase("trait", NameAndKindQuery(Some("trait"), None)) + testCase("trait A", NameAndKindQuery(Some("A"), Some("trait"))) + testCase("`trait A`", NameAndKindQuery(Some("trait A"), None)) } diff --git a/scaladoc-testcases/docs/_layouts/static-site-main.html b/scaladoc-testcases/docs/_layouts/static-site-main.html index e9b6d094f2a8..43981dcc01e1 100644 --- a/scaladoc-testcases/docs/_layouts/static-site-main.html +++ b/scaladoc-testcases/docs/_layouts/static-site-main.html @@ -1,9 +1,6 @@ --- -extraJS: - - js/contributors.js extraCSS: - css/bootstrap.min.css - - css/content-contributors.css ---
diff --git a/scaladoc-testcases/src/tests/classSignatureTestSource.scala b/scaladoc-testcases/src/tests/classSignatureTestSource.scala index 27aef1d2f461..4d4ebf9578ec 100644 --- a/scaladoc-testcases/src/tests/classSignatureTestSource.scala +++ b/scaladoc-testcases/src/tests/classSignatureTestSource.scala @@ -6,7 +6,7 @@ import scala.annotation.* import scala.math.{Pi, max} import example.level2.Documentation -abstract class Documentation[T, A <: Int, B >: String, -X, +Y](c1: String, val c2: List[T]) extends Seq[T] with Product with Serializable +abstract class Documentation[T, A <: Int, B >: String, -X, +Y](c1: String, val c2: List[T]) extends Seq[T], Product, Serializable { def this(ac: String) = this(ac, Nil) diff --git a/scaladoc-testcases/src/tests/objectSignatures.scala b/scaladoc-testcases/src/tests/objectSignatures.scala index 54f65ef3be50..627604f4f306 100644 --- a/scaladoc-testcases/src/tests/objectSignatures.scala +++ b/scaladoc-testcases/src/tests/objectSignatures.scala @@ -13,7 +13,7 @@ trait C object Base -object A2 extends A[String] with C +object A2 extends A[String], C object < diff --git a/scaladoc-testcases/src/tests/traitSignatures.scala b/scaladoc-testcases/src/tests/traitSignatures.scala index 1ac63e3b1787..8979c308a61c 100644 --- a/scaladoc-testcases/src/tests/traitSignatures.scala +++ b/scaladoc-testcases/src/tests/traitSignatures.scala @@ -7,4 +7,4 @@ trait B extends A trait C(a: Int) -trait D(b: Double) extends C with A \ No newline at end of file +trait D(b: Double) extends C, A \ No newline at end of file diff --git a/scaladoc/noResultStructure.html b/scaladoc/noResultStructure.html new file mode 100644 index 000000000000..93ef4bbf1396 --- /dev/null +++ b/scaladoc/noResultStructure.html @@ -0,0 +1,6 @@ +
+ Sick face +

No results match your filter criteria

+

Try adjusting or clearing your filters
to display better result

+ +
\ No newline at end of file diff --git a/scaladoc/resources/dotty_res/images/banner-icons/error.svg b/scaladoc/resources/dotty_res/images/banner-icons/error.svg new file mode 100644 index 000000000000..77ca3d7eb210 --- /dev/null +++ b/scaladoc/resources/dotty_res/images/banner-icons/error.svg @@ -0,0 +1,3 @@ + + + diff --git a/scaladoc/resources/dotty_res/images/banner-icons/info.svg b/scaladoc/resources/dotty_res/images/banner-icons/info.svg new file mode 100644 index 000000000000..a5d255207d94 --- /dev/null +++ b/scaladoc/resources/dotty_res/images/banner-icons/info.svg @@ -0,0 +1,4 @@ + + + + diff --git a/scaladoc/resources/dotty_res/images/banner-icons/neutral.svg b/scaladoc/resources/dotty_res/images/banner-icons/neutral.svg new file mode 100644 index 000000000000..ffac1dcc37ba --- /dev/null +++ b/scaladoc/resources/dotty_res/images/banner-icons/neutral.svg @@ -0,0 +1,4 @@ + + + + diff --git a/scaladoc/resources/dotty_res/images/banner-icons/success.svg b/scaladoc/resources/dotty_res/images/banner-icons/success.svg new file mode 100644 index 000000000000..7498b5a7ea28 --- /dev/null +++ b/scaladoc/resources/dotty_res/images/banner-icons/success.svg @@ -0,0 +1,3 @@ + + + diff --git a/scaladoc/resources/dotty_res/images/banner-icons/warning.svg b/scaladoc/resources/dotty_res/images/banner-icons/warning.svg new file mode 100644 index 000000000000..1c89ec95fdc9 --- /dev/null +++ b/scaladoc/resources/dotty_res/images/banner-icons/warning.svg @@ -0,0 +1,3 @@ + + + diff --git a/scaladoc/resources/dotty_res/images/class-dark-big.svg b/scaladoc/resources/dotty_res/images/class-dark-big.svg index dea4e4dbf740..4f6afc4a843f 100644 --- a/scaladoc/resources/dotty_res/images/class-dark-big.svg +++ b/scaladoc/resources/dotty_res/images/class-dark-big.svg @@ -1,4 +1,11 @@ - - + + + + + + + + + diff --git a/scaladoc/resources/dotty_res/images/def-dark-big.svg b/scaladoc/resources/dotty_res/images/def-dark-big.svg index 111dc51f2035..f51ca993d300 100644 --- a/scaladoc/resources/dotty_res/images/def-dark-big.svg +++ b/scaladoc/resources/dotty_res/images/def-dark-big.svg @@ -1,4 +1,11 @@ - - + + + + + + + + + diff --git a/scaladoc/resources/dotty_res/images/def-dark.svg b/scaladoc/resources/dotty_res/images/def-dark.svg new file mode 100644 index 000000000000..c7e2077c6fff --- /dev/null +++ b/scaladoc/resources/dotty_res/images/def-dark.svg @@ -0,0 +1,4 @@ + + + + diff --git a/scaladoc/resources/dotty_res/images/def.svg b/scaladoc/resources/dotty_res/images/def.svg new file mode 100644 index 000000000000..7865019d49a0 --- /dev/null +++ b/scaladoc/resources/dotty_res/images/def.svg @@ -0,0 +1,4 @@ + + + + diff --git a/scaladoc/resources/dotty_res/images/enum-dark-big.svg b/scaladoc/resources/dotty_res/images/enum-dark-big.svg index 78f1dfc56a1d..1c00d592a319 100644 --- a/scaladoc/resources/dotty_res/images/enum-dark-big.svg +++ b/scaladoc/resources/dotty_res/images/enum-dark-big.svg @@ -1,4 +1,11 @@ - - + + + + + + + + + diff --git a/scaladoc/resources/dotty_res/images/given-dark-big.svg b/scaladoc/resources/dotty_res/images/given-dark-big.svg index 0f1c61a32d69..0b021140f189 100644 --- a/scaladoc/resources/dotty_res/images/given-dark-big.svg +++ b/scaladoc/resources/dotty_res/images/given-dark-big.svg @@ -1,4 +1,11 @@ - - + + + + + + + + + diff --git a/scaladoc/resources/dotty_res/images/method-dark-big.svg b/scaladoc/resources/dotty_res/images/method-dark-big.svg index 6e0989826ed3..da3a1e3aa54b 100644 --- a/scaladoc/resources/dotty_res/images/method-dark-big.svg +++ b/scaladoc/resources/dotty_res/images/method-dark-big.svg @@ -1,4 +1,11 @@ - - + + + + + + + + + diff --git a/scaladoc/resources/dotty_res/images/no-results-icon.svg b/scaladoc/resources/dotty_res/images/no-results-icon.svg new file mode 100644 index 000000000000..1052724a9d3b --- /dev/null +++ b/scaladoc/resources/dotty_res/images/no-results-icon.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/scaladoc/resources/dotty_res/images/object-dark-big.svg b/scaladoc/resources/dotty_res/images/object-dark-big.svg index 4c183f084c7a..b128cef88332 100644 --- a/scaladoc/resources/dotty_res/images/object-dark-big.svg +++ b/scaladoc/resources/dotty_res/images/object-dark-big.svg @@ -1,4 +1,11 @@ - - + + + + + + + + + diff --git a/scaladoc/resources/dotty_res/images/package-dark-big.svg b/scaladoc/resources/dotty_res/images/package-dark-big.svg index 9d80391b6865..0be13a1c76bd 100644 --- a/scaladoc/resources/dotty_res/images/package-dark-big.svg +++ b/scaladoc/resources/dotty_res/images/package-dark-big.svg @@ -1,4 +1,11 @@ - - + + + + + + + + + diff --git a/scaladoc/resources/dotty_res/images/static-dark-big.svg b/scaladoc/resources/dotty_res/images/static-dark-big.svg index b559eb9f451b..ee391358f0e8 100644 --- a/scaladoc/resources/dotty_res/images/static-dark-big.svg +++ b/scaladoc/resources/dotty_res/images/static-dark-big.svg @@ -1,4 +1,11 @@ - - + + + + + + + + + diff --git a/scaladoc/resources/dotty_res/images/trait-dark-big.svg b/scaladoc/resources/dotty_res/images/trait-dark-big.svg index 2cbee3f4e373..9bc80ea659fa 100644 --- a/scaladoc/resources/dotty_res/images/trait-dark-big.svg +++ b/scaladoc/resources/dotty_res/images/trait-dark-big.svg @@ -1,4 +1,11 @@ - - + + + + + + + + + diff --git a/scaladoc/resources/dotty_res/images/type-dark-big.svg b/scaladoc/resources/dotty_res/images/type-dark-big.svg index 2aa2cecdf250..81c6c463b44f 100644 --- a/scaladoc/resources/dotty_res/images/type-dark-big.svg +++ b/scaladoc/resources/dotty_res/images/type-dark-big.svg @@ -1,4 +1,11 @@ - - + + + + + + + + + diff --git a/scaladoc/resources/dotty_res/images/val-dark-big.svg b/scaladoc/resources/dotty_res/images/val-dark-big.svg index 20cf98172163..241f339bba43 100644 --- a/scaladoc/resources/dotty_res/images/val-dark-big.svg +++ b/scaladoc/resources/dotty_res/images/val-dark-big.svg @@ -1,4 +1,11 @@ - - + + + + + + + + + diff --git a/scaladoc/resources/dotty_res/scripts/common/utils.js b/scaladoc/resources/dotty_res/scripts/common/utils.js index c600e1f85626..3f3efb7c89d4 100644 --- a/scaladoc/resources/dotty_res/scripts/common/utils.js +++ b/scaladoc/resources/dotty_res/scripts/common/utils.js @@ -9,8 +9,6 @@ const withEvent = (element, listener, callback) => { return () => element && element.removeEventListener(listener, callback); }; -const init = (cb) => window.addEventListener("DOMContentLoaded", cb); - const attachDOM = (element, html) => { if (element) { element.innerHTML = htmlToString(html); diff --git a/scaladoc/resources/dotty_res/scripts/components/DocumentableList.js b/scaladoc/resources/dotty_res/scripts/components/DocumentableList.js index 25c95a81cfda..120f64d60092 100644 --- a/scaladoc/resources/dotty_res/scripts/components/DocumentableList.js +++ b/scaladoc/resources/dotty_res/scripts/components/DocumentableList.js @@ -5,67 +5,85 @@ */ class DocumentableList extends Component { - constructor(props) { - super(props); - - this.refs = { - tabs: findRefs(".names .tab[data-togglable]", findRef(".membersList")).concat( - findRefs(".contents h2[data-togglable]", findRef(".membersList")) - ), - sections: findRefs(".contents .tab[data-togglable]", findRef(".membersList")), - }; - - this.state = { - list: new List(this.refs.tabs, this.refs.sections), - }; - - this.render(this.props); - } - - toggleElementDatasetVisibility(isVisible, ref) { - ref.dataset.visibility = isVisible - } - - toggleDisplayStyles(condition, ref) { - ref.style.display = condition ? null : 'none' - } - - render({ filter }) { - this.state.list.sectionsRefs.map(sectionRef => { - const isTabVisible = this.state.list - .getSectionListRefs(sectionRef) - .filter((listRef) => { - const isListVisible = this.state.list - .getSectionListElementsRefs(listRef) - .map(elementRef => this.state.list.toListElement(elementRef)) - .filter(elementData => { - const isElementVisible = this.state.list.isElementVisible(elementData, filter); - - this.toggleDisplayStyles(isElementVisible, elementData.ref); - this.toggleElementDatasetVisibility(isElementVisible, elementData.ref); - - return isElementVisible; - }).length; - - findRefs("span.groupHeader", listRef).forEach(h => { - const headerSiblings = this.state.list.getSectionListElementsRefs(h.parentNode).map(ref => this.state.list.toListElement(ref)) - const isHeaderVisible = headerSiblings.filter(s => this.state.list.isElementVisible(s, filter)) != 0 - - this.toggleDisplayStyles(isHeaderVisible, h) - }) - - this.toggleDisplayStyles(isListVisible, listRef); - - return isListVisible; - }).length; - - const outerThis = this - this.state.list.getTabRefFromSectionRef(sectionRef).forEach(function(tabRef){ - outerThis.toggleDisplayStyles(isTabVisible, tabRef); - }) - }); - } -} + constructor(props) { + super(props); + + this.refs = { + tabs: findRefs( + ".names .tab[data-togglable]", + findRef(".membersList"), + ).concat( + findRefs(".contents h2[data-togglable]", findRef(".membersList")), + ), + sections: findRefs( + ".contents .tab[data-togglable]", + findRef(".membersList"), + ), + }; + + this.state = { + list: new List(this.refs.tabs, this.refs.sections), + }; + + this.render(this.props); + } + + toggleElementDatasetVisibility(isVisible, ref) { + ref.dataset.visibility = isVisible; + } + + toggleDisplayStyles(condition, ref) { + ref.style.display = condition ? null : "none"; + } + + render({ filter }) { + this.state.list.sectionsRefs.map((sectionRef) => { + const isTabVisible = this.state.list + .getSectionListRefs(sectionRef) + .filter((listRef) => { + const isListVisible = this.state.list + .getSectionListElementsRefs(listRef) + .map((elementRef) => this.state.list.toListElement(elementRef)) + .filter((elementData) => { + const isElementVisible = this.state.list.isElementVisible( + elementData, + filter, + ); + + this.toggleDisplayStyles(isElementVisible, elementData.ref); + this.toggleElementDatasetVisibility( + isElementVisible, + elementData.ref, + ); + + return isElementVisible; + }).length; + + findRefs("span.groupHeader", listRef).forEach((h) => { + const headerSiblings = this.state.list + .getSectionListElementsRefs(h.parentNode) + .map((ref) => this.state.list.toListElement(ref)); + const isHeaderVisible = + headerSiblings.filter((s) => + this.state.list.isElementVisible(s, filter), + ) != 0; + + this.toggleDisplayStyles(isHeaderVisible, h); + }); + + this.toggleDisplayStyles(isListVisible, listRef); + return isListVisible; + }).length; + + const outerThis = this; + this.state.list + .getTabRefFromSectionRef(sectionRef) + .forEach(function (tabRef) { + outerThis.toggleDisplayStyles(isTabVisible, tabRef); + }); + }); + } + } class List { /** diff --git a/scaladoc/resources/dotty_res/scripts/components/Filter.js b/scaladoc/resources/dotty_res/scripts/components/Filter.js index 931a8e17527a..73b8c647f7fb 100644 --- a/scaladoc/resources/dotty_res/scripts/components/Filter.js +++ b/scaladoc/resources/dotty_res/scripts/components/Filter.js @@ -5,216 +5,236 @@ */ class Filter { - /** - * @param value { string } - * @param filters { Filters } - * @param elementsRefs { Element[] } - */ - constructor(value, filters, elementsRefs, init = false) { - this._init = init; - this._value = value; - this._elementsRefs = elementsRefs; - - this._filters = this._init ? this._withNewFilters() : filters; - } - - static get defaultFilters() { - return scaladocData.filterDefaults - } - - get value() { - return this._value; - } - - get filters() { - return this._filters; - } - - get elementsRefs() { - return this._elementsRefs; - } - - /** - * @param key { string } - * @param value { string } - */ - onFilterToggle(key, value) { - return new Filter( - this.value, - this._withToggledFilter(key, value), - this.elementsRefs - ); - } - - /** - * @param key { string } - * @param isActive { boolean } - */ - onGroupSelectionChange(key, isActive) { - return new Filter( - this.value, - this._withNewSelectionOfGroup(key, isActive), - this.elementsRefs - ); - } - - /** - * @param value { string } - */ - onInputValueChange(value) { - return new Filter( - value, - this._generateFiltersOnTyping(value), - this.elementsRefs - ); - } - - /** - * @private - * @param value { string } - * @returns { Filters } - */ - _generateFiltersOnTyping(value) { - const lcValue = value.toLowerCase() - - const elementsDatasets = this.elementsRefs - .filter(element => { - const lcName = getElementTextContent(getElementNameRef(element)).toLowerCase(); - const lcDescription = getElementTextContent(getElementDescription(element)).toLowerCase(); - - return lcName.includes(lcValue) || lcDescription.includes(lcValue); - }) - .map(element => this._getDatasetWithKeywordData(element.dataset)) - - const newFilters = elementsDatasets.reduce((filtersObject, datasets) => { - datasets.forEach(([key, value]) => { - this._splitByComma(value).forEach((val) => { - filtersObject[key] = { ...filtersObject[key], [val]: { ...filtersObject[key][val], visible: true} }; - }); - }); - - return filtersObject; - }, this._allFiltersAreHidden()); - - return this._attachDefaultFilters(newFilters) - - } - - /** - * @private - * @returns { Filters } - */ - _allFiltersAreHidden() { - return Object.entries(this.filters).reduce( - (filters, [key, filterGroup]) => { - filters[key] = Object.keys(filterGroup).reduce( - (group, key) => ( - (group[key] = { ...filterGroup[key], visible: false }), group - ), - {} - ); - return filters; - }, - {} - ); - } - - /** - * @private - * @param key { string } - * @param isActive { boolean } - * @returns { Filters } - */ - _withNewSelectionOfGroup(key, isActive) { - return { - ...this.filters, - [key]: Object.keys(this.filters[key]).reduce( - (obj, filterKey) => ( - (obj[filterKey] = { - ...this.filters[key][filterKey], - ...(this.filters[key][filterKey].visible && { selected: isActive }), - }), - obj - ), - {} - ), - }; - } - - /** - * @private - * @returns { Filters } - */ - _withNewFilters() { - const newFilters = this._elementsRefs.reduce((filtersObject, elementRef) => { - this._getDatasetWithKeywordData(elementRef.dataset).forEach(([key, value]) => - this._splitByComma(value).forEach((val) => { - filtersObject[key] = filtersObject[key] - ? { ...filtersObject[key], [val]: filtersObject[key][val] ?? new FilterItem() } - : { [val]: new FilterItem() } - }) - ); - return filtersObject; - }, {}); - - return this._attachDefaultFilters(newFilters) - } - - /** - * @private - * @param {Filters} newFilters - * @returns {Filters} - */ - _attachDefaultFilters(newFilters) { - return Object.entries(Filter.defaultFilters).reduce((acc, [key, defaultFilter]) => { - const filterKey = getFilterKey(key) - const shouldAddDefaultKeywordFilter = this._elementsRefs.some(ref => !!ref.dataset[filterKey]) - - return shouldAddDefaultKeywordFilter - ? { - ...acc, - [filterKey]: { - ...acc[filterKey], - [defaultFilter]: new FilterItem() - } - } - : acc - }, newFilters) - } - - /** - * @private - * @param key { string } - * @param value { string } - * @returns { Filters } - */ - _withToggledFilter(key, value) { - return { - ...this.filters, - [key]: { - ...this.filters[key], - [value]: { - ...this.filters[key][value], - selected: !this.filters[key][value].selected, - }, - }, - }; - } - - /** - * @private - * @param str { string } - */ - _splitByComma = (str) => str.split(","); - - /** - * @private - * @param dataset { DOMStringMap } - * @returns { [key: string, value: string][] } - */ - _getDatasetWithKeywordData = (dataset) => - Object.entries(dataset).filter(([key]) => isFilterData(key)); -} + /** + * @param value { string } + * @param filters { Filters } + * @param elementsRefs { Element[] } + */ + constructor(value, filters, elementsRefs, init = false) { + this._init = init; + this._value = value; + this._elementsRefs = elementsRefs; + + this._filters = this._init ? this._withNewFilters() : filters; + } + + static get defaultFilters() { + return scaladocData.filterDefaults; + } + + get value() { + return this._value; + } + + get filters() { + return this._filters; + } + + get elementsRefs() { + return this._elementsRefs; + } + + /** + * @param key { string } + * @param value { string } + */ + onFilterToggle(key, value) { + return new Filter( + this.value, + this._withToggledFilter(key, value), + this.elementsRefs, + ); + } + + /** + * @param key { string } + * @param isActive { boolean } + */ + onGroupSelectionChange(key, isActive) { + return new Filter( + this.value, + this._withNewSelectionOfGroup(key, isActive), + this.elementsRefs, + ); + } + + /** + * @param value { string } + */ + onInputValueChange(value) { + return new Filter( + value, + this._generateFiltersOnTyping(value), + this.elementsRefs, + ); + } + + /** + * @private + * @param value { string } + * @returns { Filters } + */ + _generateFiltersOnTyping(value) { + const lcValue = value.toLowerCase(); + + const elementsDatasets = this.elementsRefs + .filter((element) => { + const lcName = getElementTextContent( + getElementNameRef(element), + ).toLowerCase(); + const lcDescription = getElementTextContent( + getElementDescription(element), + ).toLowerCase(); + + return lcName.includes(lcValue) || lcDescription.includes(lcValue); + }) + .map((element) => this._getDatasetWithKeywordData(element.dataset)); + + const newFilters = elementsDatasets.reduce((filtersObject, datasets) => { + datasets.forEach(([key, value]) => { + this._splitByComma(value).forEach((val) => { + filtersObject[key] = { + ...filtersObject[key], + [val]: { ...filtersObject[key][val], visible: true }, + }; + }); + }); + + return filtersObject; + }, this._allFiltersAreHidden()); + + return this._attachDefaultFilters(newFilters); + } + + /** + * @private + * @returns { Filters } + */ + _allFiltersAreHidden() { + return Object.entries(this.filters).reduce( + (filters, [key, filterGroup]) => { + filters[key] = Object.keys(filterGroup).reduce( + (group, key) => ( + (group[key] = { ...filterGroup[key], visible: false }), group + ), + {}, + ); + return filters; + }, + {}, + ); + } + + /** + * @private + * @param key { string } + * @param isActive { boolean } + * @returns { Filters } + */ + _withNewSelectionOfGroup(key, isActive) { + return { + ...this.filters, + [key]: Object.keys(this.filters[key]).reduce( + (obj, filterKey) => ( + (obj[filterKey] = { + ...this.filters[key][filterKey], + ...(this.filters[key][filterKey].visible && { + selected: isActive, + }), + }), + obj + ), + {}, + ), + }; + } + + /** + * @private + * @returns { Filters } + */ + _withNewFilters() { + const newFilters = this._elementsRefs.reduce( + (filtersObject, elementRef) => { + this._getDatasetWithKeywordData(elementRef.dataset).forEach( + ([key, value]) => + this._splitByComma(value).forEach((val) => { + filtersObject[key] = filtersObject[key] + ? { + ...filtersObject[key], + [val]: filtersObject[key][val] ?? new FilterItem(), + } + : { [val]: new FilterItem() }; + }), + ); + return filtersObject; + }, + {}, + ); + + return this._attachDefaultFilters(newFilters); + } + + /** + * @private + * @param {Filters} newFilters + * @returns {Filters} + */ + _attachDefaultFilters(newFilters) { + return Object.entries(Filter.defaultFilters).reduce( + (acc, [key, defaultFilter]) => { + const filterKey = getFilterKey(key); + const shouldAddDefaultKeywordFilter = this._elementsRefs.some( + (ref) => !!ref.dataset[filterKey], + ); + + return shouldAddDefaultKeywordFilter + ? { + ...acc, + [filterKey]: { + ...acc[filterKey], + [defaultFilter]: new FilterItem(), + }, + } + : acc; + }, + newFilters, + ); + } + + /** + * @private + * @param key { string } + * @param value { string } + * @returns { Filters } + */ + _withToggledFilter(key, value) { + return { + ...this.filters, + [key]: { + ...this.filters[key], + [value]: { + ...this.filters[key][value], + selected: !this.filters[key][value].selected, + }, + }, + }; + } + + /** + * @private + * @param str { string } + */ + _splitByComma = (str) => str.split(","); + + /** + * @private + * @param dataset { DOMStringMap } + * @returns { [key: string, value: string][] } + */ + _getDatasetWithKeywordData = (dataset) => + Object.entries(dataset).filter(([key]) => isFilterData(key)); + } class FilterItem { constructor(selected = false, visible = true) { diff --git a/scaladoc/resources/dotty_res/scripts/components/FilterBar.js b/scaladoc/resources/dotty_res/scripts/components/FilterBar.js index 9b3e9ab2412a..6de212f9b6e1 100644 --- a/scaladoc/resources/dotty_res/scripts/components/FilterBar.js +++ b/scaladoc/resources/dotty_res/scripts/components/FilterBar.js @@ -2,7 +2,7 @@ * @typedef { import("./Filter").Filter } Filter */ - class FilterBar extends Component { +class FilterBar extends Component { constructor(props) { super(props); @@ -14,7 +14,7 @@ this.state = { filter: new Filter("", {}, this.refs.elements, true), isVisible: false, - selectedPill: '', + selectedPill: "", }; this.inputComp = new Input({ onInputChange: this.onInputChange }); @@ -37,6 +37,8 @@ this.setState((prevState) => ({ filter: prevState.filter.onInputValueChange(value), })); + this.onChangeDisplayedElements(); + this.onDisplayClearButton(); }; onGroupSelectChange = (key, isActive) => { @@ -46,10 +48,13 @@ }; onClearFilters = () => { - this.setState(() => ({ - filter: "" - })) - } + this.inputComp.inputRef.value = ""; + this.setState((prevState) => ({ + filter: prevState.filter.onInputValueChange(""), + })); + const noResultContainer = document.querySelector("#no-results-container"); + if (noResultContainer) noResultContainer.remove(); + }; onFilterVisibilityChange = () => { this.setState((prevState) => ({ isVisible: !prevState.isVisible })); @@ -59,23 +64,65 @@ this.setState((prevState) => ({ filter: prevState.filter.onFilterToggle(key, value), })); + this.onChangeDisplayedElements(); + this.onDisplayClearButton(); }; onPillClick = (key) => { this.setState((prevState) => ({ filter: prevState.filter, - selectedPill: key - })) - } + selectedPill: key, + })); + }; onPillCollapse = () => { this.setState((prevState) => ({ filter: prevState.filter, - selectedPill: "" - })) - } + selectedPill: "", + })); + }; + + onChangeDisplayedElements = () => { + const elementsDisplayed = this.refs.elements.filter( + (member) => member.style.display !== "none", + ); + const noResultContainer = document.querySelector("#no-results-container"); + if (elementsDisplayed.length === 0 && !noResultContainer) { + const emptySpace = document.querySelector("#Value-members"); + emptySpace.insertAdjacentHTML( + "beforeend", + `
+
+

No results match your filter criteria

+

Try adjusting or clearing your filters
to display better result

+ +
`, + ); + } + if(noResultContainer && elementsDisplayed.length !== 0) { + noResultContainer.remove(); + } + }; + + onDisplayClearButton = () => { + const clearButton = document.querySelector(".clearButton"); + + const isPillFilterChecked = Object.values(this.state.filter._filters).some( + (bigFilter) => + Object.values(bigFilter).some((smallFilter) => smallFilter.selected), + ); + + if (clearButton) { + if (this.state.filter._value.length === 0 && !isPillFilterChecked) { + clearButton.style.display = "none"; + } else { + clearButton.style.display = "block"; + } + } + }; render() { + this.onDisplayClearButton(); if (this.refs.filterBar) { if (this.state.isVisible) { this.refs.filterBar.classList.add("active"); @@ -85,8 +132,18 @@ } this.listComp.render({ filter: this.state.filter }); - this.filterGroupComp.render({ filter: this.state.filter, selectedPill: this.state.selectedPill }); + this.filterGroupComp.render({ + filter: this.state.filter, + selectedPill: this.state.selectedPill, + }); } } -init(() => new FilterBar()); +window.addEventListener("dynamicPageLoad", () => { + new FilterBar(); +}); + +document.addEventListener("click", (e) => { + const isClearButton = e.target.classList.contains("clearButton"); + if (isClearButton) new FilterBar().onClearFilters(); +}); diff --git a/scaladoc/resources/dotty_res/scripts/components/Input.js b/scaladoc/resources/dotty_res/scripts/components/Input.js index dbe6ad2d724a..0b6015f50db3 100644 --- a/scaladoc/resources/dotty_res/scripts/components/Input.js +++ b/scaladoc/resources/dotty_res/scripts/components/Input.js @@ -8,7 +8,7 @@ class Input extends Component { } onInputChange = ({ currentTarget: { value } }) => { - this.props.onInputChange(value); + setTimeout(this.props.onInputChange(value), 300); }; onKeydown = (e) => { diff --git a/scaladoc/resources/dotty_res/scripts/ux.js b/scaladoc/resources/dotty_res/scripts/ux.js index f82e055e9c4c..304f2af9e129 100644 --- a/scaladoc/resources/dotty_res/scripts/ux.js +++ b/scaladoc/resources/dotty_res/scripts/ux.js @@ -1,103 +1,311 @@ -window.addEventListener("DOMContentLoaded", () => { +let observer = null; + +const attrsToCopy = [ + "data-githubContributorsUrl", + "data-githubContributorsFilename", + "data-pathToRoot", +] + +/** + * @typedef {Object} SavedPageState + * @property {Strign} mainDiv + * @property {String} leftColumn + * @property {String} title + * @property {Record} attrs + */ + +/** + * @param {Document} doc + * @returns {SavedPageState} + */ +function savePageState(doc) { + const attrs = {}; + for (const attr of attrsToCopy) { + attrs[attr] = doc.documentElement.getAttribute(attr); + } + return { + mainDiv: doc.querySelector("#main")?.innerHTML, + leftColumn: doc.querySelector("#leftColumn").innerHTML, + title: doc.title, + attrs, + }; +} + +/** + * @param {Document} doc + * @param {SavedPageState} saved + */ +function loadPageState(doc, saved) { + doc.title = saved.title; + doc.querySelector("#main").innerHTML = saved.mainDiv; + doc.querySelector("#leftColumn").innerHTML = saved.leftColumn; + for (const attr of attrsToCopy) { + doc.documentElement.setAttribute(attr, saved.attrs[attr]); + } +} + +function attachAllListeners() { + if (observer) { + observer.disconnect(); + } + + var anyNodeExpanded = document.querySelectorAll(".ni.n0.expanded").length > 0; + var firstNavNode = document.querySelector(".ni.n0"); + if (!anyNodeExpanded && firstNavNode != null) { + var firstNavNodeAddress = firstNavNode.querySelector("a"); + firstNavNode.classList.add("expanded"); + var button = firstNavNode.querySelector("button.ar"); + if (button != null) { + button.classList.add("expanded"); + } + } + + var scrollPosition = sessionStorage.getItem("scroll_value"); + if (scrollPosition) { + var sideMenu = document.querySelector(".side-menu"); + sideMenu.scrollTo(0, scrollPosition); + } + + const currentLocationHash = window.location.hash; + + const currentSection = [ + ...document.querySelectorAll("#content section[id]"), + ].find((section) => currentLocationHash === `#${section.id}`); - var toggler = document.getElementById("leftToggler"); - if (toggler) { - toggler.onclick = function () { - document.getElementById("leftColumn").classList.toggle("open"); - }; + if (currentSection) { + document.querySelector("#main").scrollTo(0, currentSection.offsetTop - 100); } - var elements = document.getElementsByClassName("documentableElement") + var elements = document.getElementsByClassName("documentableElement"); if (elements) { for (i = 0; i < elements.length; i++) { - if (elements[i].querySelector(".show-content") !== null) { - elements[i].onclick = function (e) { + var expanderChild = elements[i].querySelector( + ".documentableElement-expander", + ); + if ( + elements[i].querySelector(".show-content") !== null && + expanderChild !== null + ) { + expanderChild.onclick = function (e) { if (!$(e.target).is("a") && e.fromSnippet !== true) { - this.classList.toggle("expand") - this.querySelector(".show-content").classList.toggle("expand") + this.parentElement.classList.toggle("expand"); + this.children[0].classList.toggle("expanded"); + this.querySelector(".show-content").classList.toggle("expand"); } - } + }; } } } - var documentableLists = document.getElementsByClassName("documentableList") - if (documentableLists) { - for (i = 0; i < documentableLists.length; i++) { - documentableLists[i].children[0].onclick = function(e) { - this.classList.toggle("expand"); - this.parentElement.classList.toggle("expand"); - } +document + .querySelectorAll(".documentableElement .signature") + .forEach((signature) => { + const short = signature.querySelector(".signature-short"); + const long = signature.querySelector(".signature-long"); + const extender = document.createElement("span"); + const extenderDots = document.createTextNode("..."); + extender.appendChild(extenderDots); + extender.classList.add("extender"); + if (short && long && signature.children[1].hasChildNodes()) { + signature.children[0].append(extender); } - } + }); + + const documentableLists = document.getElementsByClassName("documentableList"); + [...documentableLists].forEach((list) => { + list.children[0].addEventListener("click", () => { + list.classList.toggle("expand"); + list.children[0].children[0].classList.toggle("expand"); + }); + }); - var memberLists = document.getElementsByClassName("tab") + var memberLists = document.getElementsByClassName("tab"); if (memberLists) { for (i = 0; i < memberLists.length; i++) { - if ($(memberLists[i].children[0]).is("button")) { - memberLists[i].children[0].onclick = function(e) { + if ($(memberLists[i].children[0].children[0]).is("button")) { + memberLists[i].children[0].onclick = function (e) { this.classList.toggle("expand"); + this.children[0].classList.toggle("expand"); this.parentElement.classList.toggle("expand"); - } + this.parentElement.parentElement.classList.toggle("expand"); + }; } } } - $(".side-menu span").on('click', function () { - $(this).parent().toggleClass("expanded") + const documentableBriefs = document.querySelectorAll(".documentableBrief"); + [...documentableBriefs].forEach((brief) => { + brief.addEventListener("click", () => { + brief.parentElement.parentElement.parentElement.parentElement.classList.add( + "expand", + ); + brief.parentElement.parentElement.parentElement.previousElementSibling.children[0].classList.add( + "expanded", + ); + }); + }); + + document.querySelectorAll("a").forEach((el) => { + const href = el.href; + if (href === "") { + return; + } + const url = new URL(href); + el.addEventListener("click", (e) => { + if ( + url.href.replace(/#.*/, "") === window.location.href.replace(/#.*/, "") + ) { + return; + } + if (url.origin !== window.location.origin) { + return; + } + if (e.metaKey || e.ctrlKey || e.shiftKey || e.altKey || e.button !== 0) { + return; + } + e.preventDefault(); + e.stopPropagation(); + $.get(href, function (data) { + if (window.history.state === null) { + window.history.replaceState(savePageState(document), ""); + } + const parser = new DOMParser(); + const parsedDocument = parser.parseFromString(data, "text/html"); + const state = savePageState(parsedDocument); + window.history.pushState(state, "", href); + loadPageState(document, state); + window.dispatchEvent(new Event(DYNAMIC_PAGE_LOAD)); + document + .querySelector("#main") + .scrollTo({ top: 0, left: 0, behavior: "instant" }); + }); + }); + }); + + $(".ar").on("click", function (e) { + $(this).parent().parent().toggleClass("expanded"); + $(this).toggleClass("expanded"); + e.stopPropagation(); }); - $(".ar").on('click', function (e) { - $(this).parent().parent().toggleClass("expanded") - $(this).toggleClass("expanded") - e.stopPropagation() + document.querySelectorAll(".documentableList .ar").forEach((arrow) => { + arrow.addEventListener("click", () => { + arrow.parentElement.parentElement.classList.toggle("expand"); + arrow.classList.toggle("expand"); + }); }); - document.querySelectorAll(".nh").forEach(el => el.addEventListener('click', () => { - el.lastChild.click() - el.first.addClass("expanded") - el.parent.addClass("expanded") - })) - - document.querySelectorAll(".supertypes").forEach(el => el.firstChild.addEventListener('click', () => { - el.classList.toggle("collapsed"); - el.firstChild.classList.toggle("expand"); - })) - - - document.querySelectorAll(".subtypes").forEach(el => el.firstChild.addEventListener('click', () => { - el.classList.toggle("collapsed"); - el.firstChild.classList.toggle("expand"); - })) - - document.querySelectorAll(".nh").forEach(el => el.addEventListener('click', () => { - el.lastChild.click() - el.first.addClass("expanded") - el.parent.addClass("expanded") - })) - - const observer = new IntersectionObserver(entries => { - entries.forEach(entry => { - const id = entry.target.getAttribute('id'); - if (entry.intersectionRatio > 0) { - document.querySelector(`#toc li a[href="#${id}"]`).parentElement.classList.add('active'); + document.querySelectorAll(".nh").forEach((el) => + el.addEventListener("click", () => { + if ( + el.lastChild.href.replace("#", "") === + window.location.href.replace("#", "") + ) { + el.parentElement.classList.toggle("expanded"); + el.firstChild.classList.toggle("expanded"); } else { - document.querySelector(`#toc li a[href="#${id}"]`).parentElement.classList.remove('active'); + el.lastChild.click(); + } + }), + ); + + const toggleShowAllElem = (element) => { + if (element.textContent == "Show all") { + element.textContent = "Collapse"; + } else { + element.textContent = "Show all"; + } + }; + + document.querySelectorAll(".supertypes").forEach((el) => + el.lastElementChild.addEventListener("click", () => { + el.classList.toggle("collapsed"); + toggleShowAllElem(el.lastElementChild); + }), + ); + + document.querySelectorAll(".subtypes").forEach((el) => + el.lastElementChild.addEventListener("click", () => { + el.classList.toggle("collapsed"); + toggleShowAllElem(el.lastElementChild); + }), + ); + + document.querySelectorAll(".ni").forEach((link) => + link.addEventListener("mouseenter", (_e) => { + sessionStorage.setItem( + "scroll_value", + link.offsetTop - window.innerHeight / 2, + ); + }), + ); + + const getIdOfElement = (element) => element.target.getAttribute("id"); + const getTocListElement = (selector) => + document.querySelector(`#toc li a[href="#${selector}"]`); + + const tocHashes = [...document.querySelectorAll("#toc li a")].reduce( + (acc, link) => { + if (link.hash.length) { + acc.push(link.hash); + } + return acc; + }, + [], + ); + + const removeAllHighlights = () => { + tocHashes.forEach((hash) => { + const element = document.querySelector(`#toc li a[href="${hash}"]`); + if (element.parentElement?.classList?.contains("active")) { + element.parentElement.classList.remove("active"); } }); - }); + }; + + observer = new IntersectionObserver( + (entries) => { + const firstEntry = entries[0]; + const lastEntry = entries[entries.length - 1]; + + const currentHash = window.location.hash; - document.querySelectorAll('#content section[id]').forEach((section) => { + const element = document.querySelector( + `#toc li a[href="${currentHash}"]`, + ); + if (element) { + removeAllHighlights(); + element.parentElement?.classList.toggle("active"); + } + + if (entries.length > 3) { + removeAllHighlights(); + const id = getIdOfElement(firstEntry); + + getTocListElement(id).parentElement.classList.toggle("active"); + } + if (lastEntry.isIntersecting) { + window.location.hash = ""; + removeAllHighlights(); + const id = getIdOfElement(lastEntry); + + getTocListElement(id).parentElement.classList.toggle("active"); + } + }, + { + rootMargin: "-10% 0px -50%", + }, + ); + + document.querySelectorAll("#content section[id]").forEach((section) => { observer.observe(section); }); - document.querySelectorAll(".side-menu a").forEach(elem => elem.addEventListener('click', e => e.stopPropagation())) - if (location.hash) { var target = location.hash.substring(1); // setting the 'expand' class on the top-level container causes undesireable styles // to apply to the top-level docs, so we avoid this logic for that element. - if (target != 'container') { + if (target != "container") { var selected = document.getElementById(location.hash.substring(1)); if (selected) { selected.classList.toggle("expand"); @@ -105,95 +313,126 @@ window.addEventListener("DOMContentLoaded", () => { } } - var logo = document.getElementById("logo"); - if (logo) { - logo.onclick = function () { - window.location = pathToRoot; // global variable pathToRoot is created by the html renderer - }; - } - - document.querySelectorAll('.documentableAnchor').forEach(elem => { - elem.addEventListener('click', event => { - var $temp = $("") - $("body").append($temp) - var a = document.createElement('a') - a.href = $(elem).attr("link") - $temp.val(a.href).select(); - document.execCommand("copy") - $temp.remove(); - }) - }) - - hljs.registerLanguage("scala", highlightDotty); - hljs.registerAliases(["dotty", "scala3"], "scala"); - hljs.initHighlighting(); + document.querySelectorAll("pre code").forEach((el) => { + hljs.highlightBlock(el); + }); /* listen for the `F` key to be pressed, to focus on the member filter input (if it's present) */ - document.body.addEventListener('keydown', e => { + document.body.addEventListener("keydown", (e) => { if (e.key == "f") { const tag = e.target.tagName; if (tag != "INPUT" && tag != "TEXTAREA") { - const filterInput = findRef('.documentableFilter input.filterableInput'); + const filterInput = findRef( + ".documentableFilter input.filterableInput", + ); if (filterInput != null) { // if we focus during this event handler, the `f` key gets typed into the input setTimeout(() => filterInput.focus(), 1); } } } - }) - - // show/hide side menu on mobile view - const sideMenuToggler = document.getElementById("mobile-sidebar-toggle"); - sideMenuToggler.addEventListener('click', _e => { - document.getElementById("leftColumn").classList.toggle("show") - document.getElementById("content").classList.toggle("sidebar-shown") - const toc = document.getElementById("toc"); - if(toc) { - toc.classList.toggle("sidebar-shown") - } - sideMenuToggler.classList.toggle("menu-shown") - }) - - // show/hide mobile menu on mobile view - const mobileMenuOpenIcon = document.getElementById("mobile-menu-toggle"); - const mobileMenuCloseIcon = document.getElementById("mobile-menu-close"); - mobileMenuOpenIcon.addEventListener('click', _e => { - document.getElementById("mobile-menu").classList.add("show") - }) - mobileMenuCloseIcon.addEventListener('click', _e => { - document.getElementById("mobile-menu").classList.remove("show") - }) - + }); // when document is loaded graph needs to be shown +} + +const DYNAMIC_PAGE_LOAD = "dynamicPageLoad"; +window.addEventListener(DYNAMIC_PAGE_LOAD, () => { + attachAllListeners(); +}); + +window.addEventListener("dynamicPageLoad", () => { + const sideMenuOpen = sessionStorage.getItem("sideMenuOpen"); + if (sideMenuOpen) { + if (document.querySelector("#leftColumn").classList.contains("show")) { + document.querySelector("#content").classList.add("sidebar-shown"); + } + sessionStorage.removeItem("sideMenuOpen"); + } else { + const leftColumn = document.querySelector(".show"); + if (leftColumn) leftColumn.classList.remove("show"); + + const mobileSidebarToggleButton = document.querySelector(".menu-shown"); + if (mobileSidebarToggleButton) + mobileSidebarToggleButton.classList.remove("menu-shown"); + + const content = document.querySelector(".sidebar-shown"); + if (content) content.classList.remove("sidebar-shown"); + } +}); + +window.addEventListener("DOMContentLoaded", () => { + hljs.registerLanguage("scala", highlightDotty); + hljs.registerAliases(["dotty", "scala3"], "scala"); + window.dispatchEvent(new Event(DYNAMIC_PAGE_LOAD)); +}); + +const elements = document.querySelectorAll(".documentableElement"); + +// show/hide side menu on mobile view +const sideMenuToggler = document.getElementById("mobile-sidebar-toggle"); +sideMenuToggler.addEventListener("click", (_e) => { + document.getElementById("leftColumn").classList.toggle("show"); + document.getElementById("content").classList.toggle("sidebar-shown"); + const toc = document.getElementById("toc"); + if (toc && toc.childElementCount > 0) { + toc.classList.toggle("sidebar-shown"); + } + sideMenuToggler.classList.toggle("menu-shown"); +}); + +// show/hide mobile menu on mobile view +document + .getElementById("mobile-menu-toggle") + .addEventListener("click", (_e) => { + document.getElementById("mobile-menu").classList.add("show"); + }); +document.getElementById("mobile-menu-close").addEventListener("click", (_e) => { + document.getElementById("mobile-menu").classList.remove("show"); +}); + +window.addEventListener("popstate", (e) => { + if (e.state === null) { + return; + } + loadPageState(document, e.state); + window.dispatchEvent(new Event(DYNAMIC_PAGE_LOAD)); }); var zoom; var transform; function showGraph() { - document.getElementById("inheritance-diagram").classList.add("shown") + document.getElementById("inheritance-diagram").classList.add("shown"); if ($("svg#graph").children().length == 0) { - var dotNode = document.querySelector("#dot") + var dotNode = document.querySelector("#dot"); if (dotNode) { var svg = d3.select("#graph"); - var radialGradient = svg.append("defs").append("radialGradient").attr("id", "Gradient"); - radialGradient.append("stop").attr("stop-color", "var(--yellow9)").attr("offset", "30%"); - radialGradient.append("stop").attr("stop-color", "var(--background-default)").attr("offset", "100%"); + var radialGradient = svg + .append("defs") + .append("radialGradient") + .attr("id", "Gradient"); + radialGradient + .append("stop") + .attr("stop-color", "var(--yellow9)") + .attr("offset", "30%"); + radialGradient + .append("stop") + .attr("stop-color", "var(--background-default)") + .attr("offset", "100%"); var inner = svg.append("g"); // Set up zoom support - zoom = d3.zoom() - .on("zoom", function ({ transform }) { - inner.attr("transform", transform); - }); + zoom = d3.zoom().on("zoom", function ({ transform }) { + inner.attr("transform", transform); + }); svg.call(zoom); var render = new dagreD3.render(); var g = graphlibDot.read(dotNode.text); - g.graph().rankDir = 'BT'; + g.graph().rankDir = "BT"; g.nodes().forEach(function (v) { g.setNode(v, { labelType: "html", @@ -201,12 +440,12 @@ function showGraph() { class: g.node(v).class, id: g.node(v).id, rx: "4px", - ry: "4px" + ry: "4px", }); }); g.setNode("node0Cluster", { style: "fill: url(#Gradient);", - id: "node0Cluster" + id: "node0Cluster", }); g.setParent("node0", "node0Cluster"); @@ -217,7 +456,8 @@ function showGraph() { }); render.arrows().hollowPoint = function normal(parent, id, edge, type) { - var marker = parent.append("marker") + var marker = parent + .append("marker") .attr("id", id) .attr("viewBox", "0 0 10 10") .attr("refX", 9) @@ -227,7 +467,8 @@ function showGraph() { .attr("markerHeight", 12) .attr("orient", "auto"); - var path = marker.append("path") + var path = marker + .append("path") .attr("d", "M 0 0 L 10 5 L 0 10 z") .style("stroke-width", 1) .style("stroke-dasharray", "1,0") @@ -249,7 +490,10 @@ function showGraph() { midY = bounds.y + height / 2; if (width == 0 || height == 0) return; // nothing to fit var scale = Math.min(fullWidth / width, fullHeight / height) * 0.99; // 0.99 to make a little padding - var translate = [fullWidth / 2 - scale * midX, fullHeight / 2 - scale * midY]; + var translate = [ + fullWidth / 2 - scale * midX, + fullHeight / 2 - scale * midY, + ]; transform = d3.zoomIdentity .translate(translate[0], translate[1]) @@ -263,8 +507,14 @@ function showGraph() { var node0Cluster = d3.select("g#node0Cluster")._groups[0][0]; var node0ClusterRect = node0Cluster.children[0]; node0Cluster.setAttribute("transform", node0.getAttribute("transform")); - node0ClusterRect.setAttribute("width", +node0Rect.getAttribute("width") + 80); - node0ClusterRect.setAttribute("height", +node0Rect.getAttribute("height") + 80); + node0ClusterRect.setAttribute( + "width", + +node0Rect.getAttribute("width") + 80, + ); + node0ClusterRect.setAttribute( + "height", + +node0Rect.getAttribute("height") + 80, + ); node0ClusterRect.setAttribute("x", node0Rect.getAttribute("x") - 40); node0ClusterRect.setAttribute("y", node0Rect.getAttribute("y") - 40); } @@ -272,13 +522,48 @@ function showGraph() { } function hideGraph() { - document.getElementById("inheritance-diagram").classList.remove("shown") + document.getElementById("inheritance-diagram").classList.remove("shown"); } function zoomOut() { var svg = d3.select("#graph"); - svg - .transition() - .duration(2000) - .call(zoom.transform, transform); + svg.transition().duration(2000).call(zoom.transform, transform); } + +const members = [...document.querySelectorAll("[id]")]; +members.forEach((member) => { + window.addEventListener("resize", () => { + const navbarHeight = document.querySelector("#header").clientHeight; + const filtersHeight = document.querySelector( + ".documentableFilter", + )?.clientHeight; + if (navbarHeight && filtersHeight) { + member.style.scrollMarginTop = `${navbarHeight + filtersHeight}px`; + } + }); +}); + +members.forEach((member) => { + window.addEventListener("DOMContentLoaded", () => { + const navbarHeight = document.querySelector("#header").clientHeight; + const filtersHeight = document.querySelector( + ".documentableFilter", + )?.clientHeight; + if (navbarHeight && filtersHeight) { + member.style.scrollMarginTop = `${navbarHeight + filtersHeight}px`; + } + }); +}); + +window.addEventListener(DYNAMIC_PAGE_LOAD, () => { + const docsLink = document.querySelector("#docs-nav-button"); + const apiLink = document.querySelector("#api-nav-button"); + + docsLink && + apiLink && + [docsLink, apiLink].forEach((button) => { + button.addEventListener("click", () => { + sessionStorage.setItem("sideMenuOpen", true); + }); + }); +}); \ No newline at end of file diff --git a/scaladoc/resources/dotty_res/styles/apistyles.css b/scaladoc/resources/dotty_res/styles/apistyles.css index a1fcd495cbb6..e69de29bb2d1 100644 --- a/scaladoc/resources/dotty_res/styles/apistyles.css +++ b/scaladoc/resources/dotty_res/styles/apistyles.css @@ -1,3 +0,0 @@ -#content > div { - width: 100%; -} diff --git a/scaladoc/resources/dotty_res/styles/scalastyle.css b/scaladoc/resources/dotty_res/styles/scalastyle.css index 546524e6779a..a14af7f7ae2d 100644 --- a/scaladoc/resources/dotty_res/styles/scalastyle.css +++ b/scaladoc/resources/dotty_res/styles/scalastyle.css @@ -33,15 +33,19 @@ input { min-height: 100%; } #leftColumn { - position: fixed; - width: var(--side-width); - height: 100%; - border-right: none; - background: var(--body-bg); - display: flex; - flex-direction: column; - z-index: 5; - border-right: solid 1px var(--leftbar-border); + display: flex; + flex-direction: column; + align-items: center; + position: absolute; + width: calc(39 * var(--base-spacing)); + height: calc(100% - (8 * var(--base-spacing)) - (6 * var(--base-spacing))); + left: 0px; + top: calc(8 * var(--base-spacing)); + background: var(--background-nav); + border: 1px solid var(--border-default); + border-top: none; + border-left: none; + transition: left 0.2s linear; } #main-content { min-height: calc(100vh - var(--footer-height) - 24px); @@ -130,18 +134,12 @@ td, th { border: 1px solid var(--border-medium); padding: 0.5rem; + color: var(--text-primary); } th { border-bottom: 2px solid var(--border-medium); } -/* Left bar toggler, only on small screens */ -#leftToggler { - display: none; - color: var(--icon-color); - cursor: pointer; -} - /* Left bar */ #paneSearch { display: none; @@ -731,30 +729,6 @@ footer .mode { display: flex; } */ -.documentableAnchor:before { - content: "\e901"; /* arrow down */ - font-family: "dotty-icons" !important; - transform: rotate(-45deg); - font-size: 20px; - color: var(--link-fg); - display: none; - flex-direction: row; - align-items: center; - justify-content: center; - position: absolute; - top: 6px; - left: -32px; -} - -.documentableAnchor:hover:before { - color: var(--link-hover-fg); -} - -.documentableAnchor:active:before { - color: var(--link-hover-fg); - top: 8px; -} - .memberDocumentation { font-size: 15px; line-height: 1.5; @@ -1062,20 +1036,6 @@ footer .socials { display: none; } - #leftToggler { - display: unset; - position: absolute; - top: 5px; - left: 12px; - z-index: 5; - font-size: 30px; - } - #leftColumn.open ~ #main #leftToggler { - position: fixed; - left: unset; - right: 16vw; - color: var(--leftbar-fg); - } .icon-toggler::before { content: "\e90a"; /* menu icon */ } diff --git a/scaladoc/resources/dotty_res/styles/theme/color-tokens.css b/scaladoc/resources/dotty_res/styles/theme/color-tokens.css index 7acfe1fe624b..aa07e9510b7e 100644 --- a/scaladoc/resources/dotty_res/styles/theme/color-tokens.css +++ b/scaladoc/resources/dotty_res/styles/theme/color-tokens.css @@ -13,11 +13,18 @@ /* border */ --border-default: var(--grey5); - --border-background-color: var(--grey1); + --border-strong: var(--grey7) + --border-background-color: var(--muave1); /* background */ --background-default: var(--grey1); - --background-neutral: var(--grey2); + --background-subtle: var(--grey2); + --background-neutral: var(--grey3); + + /* layout backgrounds */ + --background-header: var(--background-default); + --background-nav: var(--background-default); + --background-main: var(--background-default); /* action */ --action-primary-content-default: var(--grey11); @@ -31,7 +38,7 @@ --action-primary-background-active: var(--grey3); --action-primary-background-selected: var(--grey4); - --action-primary-border-default: var(--grey5); + --action-primary-border-default: var(--grey4); /* semantic */ --semantic-content-grey: var(--grey11); @@ -47,8 +54,9 @@ --semantic-background-red: var(--red3); /* code */ - --code-props-content: var(--crimson11); - --code-props-background: var(--red3); + --code-props-content: var(--grey12); + --code-props-background: var(--grey2); + --code-props-border: var(--grey5); --code-syntax-highlighting-comment: var(--grey11); --code-syntax-highlighting-quote: var(--grey11); @@ -113,13 +121,20 @@ --icon-hover: var(--grey12); /* border */ - --border-default: var(--grey6); + --border-default: var(--grey5); + --border-strong: var(--grey7); --border-background-color: var(--grey1); /* background */ --background-default: var(--grey1); + --background-subtle: var(--grey2); --background-neutral: var(--grey3); + /* layout backgrounds */ + --background-header: var(--background-neutral); + --background-nav: var(--background-default); + --background-main: var(--background-subtle); + /* action */ --action-primary-content-default: var(--grey11); --action-primary-content-hover: var(--grey12); @@ -131,11 +146,13 @@ --action-primary-background-hover: var(--grey4); --action-primary-background-active: var(--grey4); --action-primary-background-selected: var(--grey5); - --action-primary-border-default: var(--grey6); + + --action-primary-border-default: var(--grey5); /* code */ - --code-props-content: var(--crimson11); - --code-props-background: var(--red3); + --code-props-content: var(--grey12); + --code-props-background: var(--grey3); + --code-props-border: var(--grey6); --code-syntax-highlighting-comment: var(--grey11); --code-syntax-highlighting-quote: var(--grey11); @@ -197,4 +214,4 @@ --switch-button: var(--grey12); --switch-background-default: var(--grey9); --switch-background-selected: var(--indigo11); -} +} \ No newline at end of file diff --git a/scaladoc/resources/dotty_res/styles/theme/colors.css b/scaladoc/resources/dotty_res/styles/theme/colors.css index 57e426b65d47..62158dcddf96 100644 --- a/scaladoc/resources/dotty_res/styles/theme/colors.css +++ b/scaladoc/resources/dotty_res/styles/theme/colors.css @@ -3,18 +3,32 @@ */ :root { /* grey colors */ - --grey1: #ffffff; - --grey2: #f9f8f9; - --grey3: #f4f2f4; - --grey4: #eeedef; - --grey5: #e9e8ea; - --grey6: #e4e2e4; - --grey7: #dcdbdd; - --grey8: #c8c7cb; - --grey9: #908e96; - --grey10: #86848d; - --grey11: #6f6e77; - --grey12: #1a1523; + --grey1: #fcfcfc; + --grey2: #f8f8f8; + --grey3: #f3f3f3; + --grey4: #ededed; + --grey5: #e8e8e8; + --grey6: #e2e2e2; + --grey7: #dbdbdb; + --grey8: #c7c7c7; + --grey9: #8f8f8f; + --grey10: #858585; + --grey11: #6f6f6f; + --grey12: #171717; + + /* mauve colors */ + --mauve1: #ffffff; + --mauve2: #f9f8f9; + --mauve3: #f4f2f4; + --mauve4: #eeedef; + --mauve5: #e9e8ea; + --mauve6: #e4e2e4; + --mauve7: #dcdbdd; + --mauve8: #c8c7cb; + --mauve9: #908e96; + --mauve10: #86848d; + --mauve11: #6f6e77; + --mauve12: #1a1523; /* red colors */ --red1: #fffcfc; @@ -165,18 +179,32 @@ dark theme */ :root.theme-dark { - --grey1: #161618; - --grey2: #1c1c1f; - --grey3: #232326; - --grey4: #28282c; - --grey5: #2e2e32; - --grey6: #34343a; - --grey7: #3e3e44; - --grey8: #504f57; - --grey9: #706f78; - --grey10: #7e7d86; - --grey11: #a09fa6; - --grey12: #ededef; + --grey1: #161616; + --grey2: #1c1c1c; + --grey3: #232323; + --grey4: #282828; + --grey5: #2e2e2e; + --grey6: #343434; + --grey7: #3e3e3e; + --grey8: #505050; + --grey9: #707070; + --grey10: #7e7e7e; + --grey11: #a0a0a0; + --grey12: #ededed; + + /* muave colors */ + --mauve1: #161618; + --mauve2: #1c1c1f; + --mauve3: #232326; + --mauve4: #28282c; + --mauve5: #2e2e32; + --mauve6: #34343a; + --mauve7: #3e3e44; + --mauve8: #504f57; + --mauve9: #706f78; + --mauve10: #7e7d86; + --mauve11: #a09fa6; + --mauve12: #ededef; /* red colors */ --red1: #1f1315; diff --git a/scaladoc/resources/dotty_res/styles/theme/components/api-filters.css b/scaladoc/resources/dotty_res/styles/theme/components/api-filters.css index 6bd64ce4153f..4a98488fa036 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/api-filters.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/api-filters.css @@ -3,8 +3,13 @@ flex-wrap: wrap; z-index: 1; row-gap: calc(2 * var(--base-spacing)); - position: relative; - margin-bottom: calc(6 * var(--base-spacing)); + position: sticky; + top: var(--header-height); + padding: calc(2 * var(--base-spacing)) 0; + padding-left: 28px; + margin-bottom: calc(3 * var(--base-spacing)); + margin-left: -28px; + background-color: var(--background-main); } .filtersContainer { @@ -14,7 +19,7 @@ } .filterableInput { - background-color: var(--background-default); + background-color: var(--background-main); color: var(--action-primary-content-active); background-image: url("../../../images/icon-buttons/search/dark/default.svg"); background-repeat: no-repeat; diff --git a/scaladoc/resources/dotty_res/styles/theme/components/api-member.css b/scaladoc/resources/dotty_res/styles/theme/components/api-member.css index 74ed72423292..c1a491815201 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/api-member.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/api-member.css @@ -1,14 +1,13 @@ .documentableElement { - background-color: var(--action-primary-background-default-solid); - padding: calc(3 * var(--base-spacing)); - border-radius: 4px; - margin-bottom: calc(3 * var(--base-spacing)); + padding: 13px 0 12px; color: var(--text-primary); position: relative; + border-top: 1px solid var(--border-default); } .documentableElement:last-child { margin-bottom: 0; + border-top: 1px solid var(--border-default); } .documentableElement .signature { @@ -16,7 +15,12 @@ line-height: 1.5; } -.documentableElement:hover { +.documentableElement .signature-long { + display: none; +} + +.documentableElement:not(.expand):hover, +.documentableElement-expander { cursor: pointer; } @@ -24,6 +28,20 @@ color: var(--text-secondary); } +.documentableElement .documentableBrief p:first-of-type, +.documentableElement.expand > div .cover .doc p:first-of-type { + margin-block: calc(.5 * var(--base-spacing)) 0; +} + +.documentableElement .doc p, +.documentableElement .doc .snippet { + margin-block: calc(2 * var(--base-spacing)); +} + +.documentableElement.expand .doc > :last-child { + margin-block-end: 0; +} + .documentableElement .annotations { display: none; } @@ -32,10 +50,30 @@ display: none; } +.documentableElement.expand { +color: var(--text-secondary); +} + .documentableElement.expand > div .cover { display: block; } +.attributes.attributes-small > dt { + padding-block: calc(1.5 * var(--base-spacing)); +} + +.attributes.attributes-small > dd { + padding-block: calc(1 * var(--base-spacing)); +} + +.documentableElement.expand { + padding-block-end: calc(4 * var(--base-spacing)); +} + +.documentableElement.expand > div .cover dd { + color: var(--text-primary); +} + .documentableElement.expand .annotations { display: inline-block; } @@ -44,15 +82,38 @@ display: none; } +.documentableElement.expand .signature { + color: var(--text-primary); +} + +.documentableElement.expand .signature-long { + display: inline; +} + +.documentableElement.expand .extender { + display: none; +} + .documentableElement .icon-button { position: absolute; - top: calc(3 * var(--base-spacing)); - right: calc(3 * var(--base-spacing)); - display: none; + left: calc(3.5 * var(--base-spacing) / -1); + top: calc(2 * var(--base-spacing)); +} + +@media (max-width: 480px) { + + .documentableElement .icon-button { + left: calc(3 * var(--base-spacing) / -1); + } + } .documentableElement:hover .icon-button { - display: block; + display: inline; +} + +.documentableElement .attributes { + margin-bottom: 0; } [t="k"] { diff --git a/scaladoc/resources/dotty_res/styles/theme/components/attributes.css b/scaladoc/resources/dotty_res/styles/theme/components/attributes.css index 0735f5c2071d..7bd2e3628a07 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/attributes.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/attributes.css @@ -1,10 +1,10 @@ .attributes { + display: grid; + grid-template-columns: 184px 1fr; width: 100%; overflow: hidden; padding: 0; margin: 0; - border-bottom: 1px solid var(--border-default); - margin-bottom: calc(6 * var(--base-spacing)); } .attributes .attributes { @@ -12,29 +12,60 @@ } .attributes p { - margin: 0; + margin-block: calc(3 * var(--base-spacing)); +} + +.attributes p:has(span:empty) { + display: none; +} + +.membersList .attributes p { + margin-block: calc(1 * var(--base-spacing)); +} + +.attributes p:first-child { + margin-block-start: 0; +} + +.attributes p:last-child { + margin-block-end: 0; } .attributes > dt { - float: left; - width: 30%; - padding: 0; - margin: 0; + padding: calc(2.5 * var(--base-spacing)) calc(6 * var(--base-spacing)) calc(2.5 * var(--base-spacing)) calc(2 * var(--base-spacing)); border-top: 1px solid var(--border-default); - padding-top: calc(2.5 * var(--base-spacing)); - padding-bottom: calc(2.5 * var(--base-spacing)); - text-align: center; + text-align: right; color: var(--text-secondary); + overflow: hidden; } .attributes > dd { - float: left; - width: 70%; - padding: 0; + padding-left: 10%; + margin: 0; + border-top: 1px solid var(--border-default); + overflow: hidden; + padding: calc(2 * var(--base-spacing)) 0; +} + +.attributes .memberList dt { + padding: calc(1.5 * var(--base-spacing)) calc(6 * var(--base-spacing)) calc(1.5 * var(--base-spacing)) calc(2 * var(--base-spacing)); + border-top: 1px solid var(--border-default); + text-align: right; + color: var(--text-secondary); + overflow: hidden; +} + +.attributes .memberList dd { + padding-left: 10%; margin: 0; border-top: 1px solid var(--border-default); - padding-top: calc(2.5 * var(--base-spacing)); - padding-bottom: calc(2.5 * var(--base-spacing)); + overflow: hidden; + padding: var(--base-spacing) 0; +} + +.attributes > dt:first-child, +.attributes > dd:first-of-type { + border-top: none; } .attributes > dd > .attributes > dt { @@ -51,6 +82,24 @@ width: 80%; } -.documentableElement .attributes { - margin-bottom: 0; -} +@media (max-width: 1376px) { + .attributes { + display: flex; + flex-flow: column; + } + + .attributes > dt { + padding-left: 0; + padding-bottom: 0; + border-bottom: 0; + text-align: left; + } + + .attributes > dd { + border-top: 0; + } + + .attributes.attributes-small { + padding-left: 40px; + } +} \ No newline at end of file diff --git a/scaladoc/resources/dotty_res/styles/theme/components/button/icon-button.css b/scaladoc/resources/dotty_res/styles/theme/components/button/icon-button.css index 75b2202b680f..a6450984131e 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/button/icon-button.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/button/icon-button.css @@ -8,6 +8,10 @@ width: 16px; } +.icon-button:hover { + cursor: pointer; +} + .icon-button::after { width: calc(2 * var(--base-spacing)); height: calc(2 * var(--base-spacing)); @@ -277,6 +281,8 @@ content: url("../../../../images/icon-buttons/arrow-down/dark/selected.svg"); } + + /* mobile menu toggler */ #mobile-menu-toggle.icon-button.menu-shown::after { @@ -726,3 +732,102 @@ .theme-dark .icon-button.close.selected::after { content: url("../../../../images/icon-buttons/close/dark/selected.svg"); } + + +/* member list expanding arrow */ + +.documentableElement .ar.icon-button::after { + content: url("../../../../images/icon-buttons/arrow-right/light/default.svg"); +} + +.documentableElement .ar.icon-button:hover::after { + content: url("../../../../images/icon-buttons/arrow-right/light/hover.svg"); +} + +.documentableElement .ar.icon-button:active::after { + content: url("../../../../images/icon-buttons/arrow-right/light/active.svg"); +} + +.documentableElement .ar.icon-button:disabled::after { + content: url("../../../../images/icon-buttons/arrow-right/light/disabled.svg"); +} + +.documentableElement .ar.icon-button:focus::after { + content: url("../../../../images/icon-buttons/arrow-right/light/focus.svg"); +} + +.documentableElement .ar.icon-button.selected::after { + content: url("../../../../images/icon-buttons/arrow-right/light/selected.svg"); +} + +.theme-dark .documentableElement .ar.icon-button::after { + content: url("../../../../images/icon-buttons/arrow-right/dark/default.svg"); +} + +.theme-dark .documentableElement .ar.icon-button:hover::after { + content: url("../../../../images/icon-buttons/arrow-right/dark/hover.svg"); +} + +.theme-dark .documentableElement .ar.icon-button:active::after { + content: url("../../../../images/icon-buttons/arrow-right/dark/active.svg"); +} + +.theme-dark .documentableElement .ar.icon-button:disabled::after { + content: url("../../../../images/icon-buttons/arrow-right/dark/disabled.svg"); +} + +.theme-dark .documentableElement .ar.icon-button:focus::after { + content: url("../../../../images/icon-buttons/arrow-right/dark/focus.svg"); +} + +.theme-dark .documentableElement .ar.icon-button.selected::after { + content: url("../../../../images/icon-buttons/arrow-right/dark/selected.svg"); +} + +.documentableElement .ar.icon-button.expanded::after { + content: url("../../../../images/icon-buttons/arrow-down/light/default.svg"); +} + +.documentableElement .ar.icon-button.expanded:hover::after { + content: url("../../../../images/icon-buttons/arrow-down/light/hover.svg"); +} + +.documentableElement .ar.icon-button.expanded:active::after { + content: url("../../../../images/icon-buttons/arrow-down/light/active.svg"); +} + +.documentableElement .ar.icon-button.expanded:disabled::after { + content: url("../../../../images/icon-buttons/arrow-down/light/disabled.svg"); +} + +.documentableElement .ar.icon-button.expanded:focus::after { + content: url("../../../../images/icon-buttons/arrow-down/light/focus.svg"); +} + +.documentableElement .ar.icon-button.expanded.selected::after { + content: url("../../../../images/icon-buttons/arrow-down/light/selected.svg"); +} + +.theme-dark .documentableElement .ar.icon-button.expanded::after { + content: url("../../../../images/icon-buttons/arrow-down/dark/default.svg"); +} + +.theme-dark .documentableElement .ar.icon-button.expanded:hover::after { + content: url("../../../../images/icon-buttons/arrow-down/dark/hover.svg"); +} + +.theme-dark .documentableElement .ar.icon-button.expanded:active::after { + content: url("../../../../images/icon-buttons/arrow-down/dark/active.svg"); +} + +.theme-dark .documentableElement .ar.icon-button.expanded:disabled::after { + content: url("../../../../images/icon-buttons/arrow-down/dark/disabled.svg"); +} + +.theme-dark .documentableElement .ar.icon-button.expanded:focus::after { + content: url("../../../../images/icon-buttons/arrow-down/dark/focus.svg"); +} + +.theme-dark .documentableElement .ar.icon-button.expanded.selected::after { + content: url("../../../../images/icon-buttons/arrow-down/dark/selected.svg"); +} \ No newline at end of file diff --git a/scaladoc/resources/dotty_res/styles/theme/components/button/label-only-button.css b/scaladoc/resources/dotty_res/styles/theme/components/button/label-only-button.css index bd44478f900d..aa312cf94a51 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/button/label-only-button.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/button/label-only-button.css @@ -9,6 +9,7 @@ .label-only-button:hover { background-color: var(--action-primary-background-hover); color: var(--action-primary-content-hover); + cursor: pointer; } .label-only-button:active { diff --git a/scaladoc/resources/dotty_res/styles/theme/components/button/text-button.css b/scaladoc/resources/dotty_res/styles/theme/components/button/text-button.css index 97c52807259b..1e62efa6df41 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/button/text-button.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/button/text-button.css @@ -23,6 +23,7 @@ a:active { .text-button:hover { color: var(--action-primary-content-hover) !important; + cursor: pointer; } .text-button:active { @@ -43,7 +44,8 @@ a:active { } .text-button::after { - margin-left: calc(1 * var(--base-spacing)); + margin-left: calc(.5 * var(--base-spacing)); + vertical-align: bottom; } /* button with arrow */ diff --git a/scaladoc/resources/dotty_res/styles/theme/components/code-snippet.css b/scaladoc/resources/dotty_res/styles/theme/components/code-snippet.css index b2e009b813ac..99010c5db5d1 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/code-snippet.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/code-snippet.css @@ -10,15 +10,24 @@ overflow-x: scroll; margin-bottom: calc(3 * var(--base-spacing)); margin-top: calc(3 * var(--base-spacing)); + border-radius: 4px; } .snippet pre { margin: 0; + line-height: 20px; } -.snippet:hover { - background-color: var(--action-primary-background-hover); - border: 1px solid var(--border-default); +.snippet pre code { + font-family: "FiraCode-Regular"; +} + +.snippet pre code > span { + padding-right: calc(2* var(--base-spacing)); +} + +dd .snippet { + margin: 0; } .snippet .hidden { @@ -52,10 +61,13 @@ } .snippet > pre > code > span::before { + margin-left: 0; content: attr(line-number); color: var(--code-syntax-highlighting-line-number); margin-right: calc(2 * var(--base-spacing)); - margin-left: calc(2 * var(--base-spacing)); + display: inline-block; + text-align: right; + min-width: calc(3 * var(--base-spacing)); } .snippet-showhide-container { @@ -93,10 +105,13 @@ } .snippet:hover { - background-color: var(--action-primary-background-hover); border: 1px solid var(--border-default); } +.snippet:hover::-webkit-scrollbar-thumb { + border: 3px solid var(--action-primary-background-default-solid); +} + .snippet .copy-button { position: absolute; top: calc(3 * var(--base-spacing)); @@ -256,14 +271,17 @@ .snippet::-webkit-scrollbar-thumb { background-color: var(--code-syntax-highlighting-scrollbar); - border-radius: 3px; + opacity: 0.75; + border-radius: 100px; + border: 3px solid var(--action-primary-background-default-solid); } .snippet::-webkit-scrollbar-thumb:hover { background-color: var(--code-syntax-highlighting-scrollbar-hover); + border: 3px solid var(--action-primary-background-default-solid);; } .snippet::-webkit-scrollbar { - width: 8px; - height: 8px; + width: 12px; + height: 12px; } diff --git a/scaladoc/resources/dotty_res/styles/theme/components/dropdown-menu.css b/scaladoc/resources/dotty_res/styles/theme/components/dropdown-menu.css index 17a3d9c4d9c1..40abc4f938c6 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/dropdown-menu.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/dropdown-menu.css @@ -23,5 +23,5 @@ /* version dropdown */ #version-dropdown { top: calc(6 * var(--base-spacing)); - left: calc(34.25 * var(--base-spacing)); + left: calc(19 * var(--base-spacing)); } diff --git a/scaladoc/resources/dotty_res/styles/theme/components/navigation-item.css b/scaladoc/resources/dotty_res/styles/theme/components/navigation-item.css index cc066a859a13..66549f9ddc90 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/navigation-item.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/navigation-item.css @@ -33,48 +33,57 @@ } .n0 > .nh > .ar { - left: calc(1.5 * var(--base-spacing)); + left: calc(1 * var(--base-spacing)); } .n0 > .nh > a { - left: calc(4.5 * var(--base-spacing)); + left: calc(3.5 * var(--base-spacing)); max-width: calc(31 * var(--base-spacing)); } .n1 > .nh > .ar { - left: calc(4.5 * var(--base-spacing)); + left: calc(3.5 * var(--base-spacing)); } .n1 > .nh > a { - left: calc(7.5 * var(--base-spacing)); - max-width: calc(28 * var(--base-spacing)); + left: calc(6 * var(--base-spacing)); + max-width: calc(28.5 * var(--base-spacing)); } .n2 > .nh > .ar { - left: calc(7.5 * var(--base-spacing)); + left: calc(6 * var(--base-spacing)); } .n2 > .nh > a { - left: calc(10.5 * var(--base-spacing)); - max-width: calc(25 * var(--base-spacing)); + left: calc(8.5 * var(--base-spacing)); + max-width: calc(26 * var(--base-spacing)); } .n3 > .nh > .ar { - left: calc(10.5 * var(--base-spacing)); + left: calc(7.5 * var(--base-spacing)); } .n3 > .nh > a { - left: calc(13.5 * var(--base-spacing)); - max-width: calc(22 * var(--base-spacing)); + left: calc(11 * var(--base-spacing)); + max-width: calc(23.5 * var(--base-spacing)); } .n4 > .nh > .ar { - left: calc(13.5 * var(--base-spacing)); + left: calc(11 * var(--base-spacing)); } .n4 > .nh > a { - left: calc(16.5 * var(--base-spacing)); - max-width: calc(19 * var(--base-spacing)); + left: calc(13.5 * var(--base-spacing)); + max-width: calc(21 * var(--base-spacing)); +} + +.n5 > .nh > .ar { + left: calc(15.5 * var(--base-spacing)); +} + +.n5 > .nh > a { + left: calc(16 * var(--base-spacing)); + max-width: calc(18.5 * var(--base-spacing)); } .nh:hover { diff --git a/scaladoc/resources/dotty_res/styles/theme/components/pill.css b/scaladoc/resources/dotty_res/styles/theme/components/pill.css index a6166d04755b..fa613a0bb2a0 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/pill.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/pill.css @@ -12,6 +12,12 @@ cursor: pointer; } +@media (max-width: 768px) { + .pill { + display: none; + } +} + .pill::after { content: url(../../../images/icon-buttons/arrow-down/light/default.svg); margin-left: calc(1.5 * var(--base-spacing)); @@ -37,6 +43,7 @@ .pill .filter-name { color: var(--text-secondary); margin-right: calc(0.5 * var(--base-spacing)); + padding-bottom: 2px; } .pill > .close { @@ -48,6 +55,11 @@ display: block; } +.pill-container { + display: flex; + align-items: center; +} + .pill-container ul { display: none; background-color: var(--action-primary-background-default-solid); @@ -58,6 +70,7 @@ border-radius: 4px; padding-top: calc(1 * var(--base-spacing)); padding-bottom: calc(1 * var(--base-spacing)); + top: 54px; } .pill-container ul li { diff --git a/scaladoc/resources/dotty_res/styles/theme/components/subtypes.css b/scaladoc/resources/dotty_res/styles/theme/components/subtypes.css index 4cbf85ba6a89..5adf7960a2ce 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/subtypes.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/subtypes.css @@ -4,24 +4,23 @@ padding-bottom: calc(1.5 * var(--base-spacing)); overflow-x: scroll; position: relative; + border-radius: 4px; } -.subtypes > span { - position: absolute; - top: calc(1 * var(--base-spacing)); - right: calc(1 * var(--base-spacing)); +.subtypes > div { + display: block; } .subtypes > div { display: none; - margin-bottom: 6px; } + +.subtypes > div:nth-child(1), .subtypes > div:nth-child(2), .subtypes > div:nth-child(3), .subtypes > div:nth-child(4), -.subtypes > div:nth-child(5), -.subtypes > div:nth-child(6) { +.subtypes > div:nth-child(5) { display: block; } diff --git a/scaladoc/resources/dotty_res/styles/theme/components/supertypes.css b/scaladoc/resources/dotty_res/styles/theme/components/supertypes.css index b960d9d78b7e..67bc012ea75c 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/supertypes.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/supertypes.css @@ -4,24 +4,22 @@ padding-bottom: calc(1.5 * var(--base-spacing)); overflow-x: scroll; position: relative; + border-radius: 4px; } .supertypes > span { - position: absolute; - top: calc(1 * var(--base-spacing)); - right: calc(1 * var(--base-spacing)); + display: block; } .supertypes > div { display: none; - margin-bottom: 6px; } +.supertypes > div:nth-child(1), .supertypes > div:nth-child(2), .supertypes > div:nth-child(3), .supertypes > div:nth-child(4), -.supertypes > div:nth-child(5), -.supertypes > div:nth-child(6) { +.supertypes > div:nth-child(5) { display: block; } diff --git a/scaladoc/resources/dotty_res/styles/theme/components/switcher.css b/scaladoc/resources/dotty_res/styles/theme/components/switcher.css index 095b631cf073..2bbc8180737c 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/switcher.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/switcher.css @@ -2,11 +2,11 @@ .switcher-container { display: flex; - flex-flow: row; - width: 100%; - height: calc(4.5 * var(--base-spacing)); - gap: 1px; - margin-bottom: calc(3.75 * var(--base-spacing)); + flex-flow: row; + width: 100%; + height: calc(4.5 * var(--base-spacing)); + gap: 1px; + padding: calc(3 * var(--base-spacing)) 0; } .switcher { diff --git a/scaladoc/resources/dotty_res/styles/theme/components/table-of-content.css b/scaladoc/resources/dotty_res/styles/theme/components/table-of-content.css index 0fec18c2cc26..a872a726e28f 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/table-of-content.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/table-of-content.css @@ -1,12 +1,26 @@ #toc { display: flex; flex-direction: column; - padding: 0 calc(1 * var(--base-spacing)); - width: calc(29 * var(--base-spacing)); + width: 232px; } #toc-container { - position: absolute; + width: 232px; + position: sticky; + top: calc(18 * var(--base-spacing)); + padding: var(--base-spacing); +} + +@media (max-height: 600px) and (orientation: landscape){ + #toc-container { + position: fixed; + top: 90px; + } +} + +.toc-list { + margin-block-start: calc(2 * var(--base-spacing)); + margin-block-end: 0; } #toc ul { @@ -18,53 +32,20 @@ margin-bottom: calc(2.5 * var(--base-spacing)); } -#toc li > ul { +#toc .toc-list li > ul { margin-top: calc(2.5 * var(--base-spacing)); - margin-left: calc(1.5 * var(--base-spacing)); -} - -@media (min-width: 1920px) { - #toc { - margin-left: calc(8.5 * var(--base-spacing)); - } -} - -@media (max-width: 1920px) { - #toc { - margin-left: calc(8.5 * var(--base-spacing)); - } + padding-left: calc(3 * var(--base-spacing)); } -@media (max-width: 1436px) { - #toc { - margin-left: calc(8.5 * var(--base-spacing)); - margin-right: calc(8.5 * var(--base-spacing)); - } - #toc-container { - margin-right: calc(8.5 * var(--base-spacing)); - } -} - -@media (max-width: 1376px) { - #toc { - margin-left: calc(6 * var(--base-spacing)); - } -} - -@media (max-width: 1366px) { +/*@media (max-width: 1366px) { #toc { left: calc(102 * var(--base-spacing)); } -} - -@media (max-width: 1024px) { - #toc { - margin-left: calc(6 * var(--base-spacing)); - margin-right: 0; - } - - #toc.sidebar-shown { - display: none; +}*/ +@media (max-height: 820px) and (orientation: landscape){ + .toc-nav { + max-height: calc(100vh - 200px); + overflow: auto; } } @@ -74,6 +55,7 @@ } } + .toc-title { color: var(--text-primary); margin-bottom: calc(2 * var(--base-spacing)); @@ -85,7 +67,7 @@ border-bottom: none; } -#toc li:hover > a { +#toc li a:hover { color: var(--action-primary-content-hover); } diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/arrowNavigation.css b/scaladoc/resources/dotty_res/styles/theme/layout/arrowNavigation.css index 2aa16f2829f6..8ce304bfee6b 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/arrowNavigation.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/arrowNavigation.css @@ -2,14 +2,25 @@ display: flex; margin-bottom: calc(6 * var(--base-spacing)); margin-top: calc(6 * var(--base-spacing)); + gap: 24px; } -.arrow-navigation > div:last-child { - margin-left: auto; +.arrow-navigation > div { + width: 100%; +} + +.arrow-navigation > div .arrow-navigation--next { + text-align: right; +} + +.arrow-navigation > div .arrow-navigation--next ~ a { + display: inline-block; + width: 100%; + text-align: right; } .arrow-navigation > div > span { display: block; margin-bottom: calc(1 * var(--base-spacing)); color: var(--text-secondary); -} +} \ No newline at end of file diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/banners.css b/scaladoc/resources/dotty_res/styles/theme/layout/banners.css new file mode 100644 index 000000000000..578ca0f2d3ab --- /dev/null +++ b/scaladoc/resources/dotty_res/styles/theme/layout/banners.css @@ -0,0 +1,57 @@ +aside { + padding: calc(2* var(--base-spacing)); + font-weight: 400; + font-size: 13px; + line-height: 16px; + margin-bottom: calc(6* var(--base-spacing)); + border-radius: 4px; + display: flex; + align-items: center; +} + +aside > .icon { + width: 16px; + height: 16px; + content: url("../../../images/banner-icons/warning.svg"); + padding-right: var(--base-spacing); +} + +.warning { + background-color: var(--semantic-background-yellow); +} + +.warning > .icon { + content: url("../../../images/banner-icons/warning.svg"); +} + +.success { + background-color: var(--semantic-background-grass); +} + +.success > .icon { + content: url("../../../images/banner-icons/success.svg"); +} + +.neutral { + background-color: var(--semantic-background-grey); +} + +.neutral > .icon { + content: url("../../../images/banner-icons/neutral.svg"); +} + +.info { + background-color: var(--semantic-background-sky); +} + +.info > .icon { + content: url("../../../images/banner-icons/info.svg"); +} + +.error { + background-color: var(--semantic-background-red); +} + +.error > .icon { + content: url("../../../images/banner-icons/error.svg"); +} diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/container.css b/scaladoc/resources/dotty_res/styles/theme/layout/container.css index 7335b781c8c3..d71c75e8bda0 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/container.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/container.css @@ -1,13 +1,18 @@ body { margin: 0; padding: 0; - background-color: var(--background-default); + background-color: var(--background-main); height: 100%; overflow: hidden; + scroll-behavior: smooth;; +} + +p { + margin-block: calc(3 * var(--base-spacing)); } [id] { - scroll-margin-top: calc(10 * var(--base-spacing)); + scroll-margin-top: calc(18 * var(--base-spacing)); } #container { diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/content.css b/scaladoc/resources/dotty_res/styles/theme/layout/content.css index 7f899b98e4e7..e09aa2697762 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/content.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/content.css @@ -1,142 +1,128 @@ #main { - overflow: scroll; + width: calc(100vw - 313px); height: 100%; + overflow-y: scroll; + overflow-x: hidden; + position: absolute; + right: 0; + scroll-behavior: smooth; } #content { - margin-left: calc(39 * var(--base-spacing)); display: flex; flex-flow: row; color: var(--text-primary); + + padding-top: calc(18 * var(--base-spacing)); + padding-bottom: calc(10 * var(--base-spacing)); + width: calc(100% - 68px * 2); + padding-inline: 68px; + gap: 68px;; + + -webkit-transition: margin-left 0.2s ease-in-out; + -moz-transition: margin-left 0.2s ease-in-out; + -o-transition: margin-left 0.2s ease-in-out; + transition: margin-left 0.2s ease-in-out; } #content > div:first-child { - overflow: hidden; + width: calc(100vw - 232px - 313px - 68px * 3); + max-width: 720px; } + #content.sidebar-shown { - -webkit-transition: width 0.3s ease-in-out; - -moz-transition: width 0.3s ease-in-out; - -o-transition: width 0.3s ease-in-out; - transition: width 0.3s ease-in-out; + -webkit-transition: margin-left 0.2s ease-in-out; + -moz-transition: margin-left 0.2s ease-in-out; + -o-transition: margin-left 0.2s ease-in-out; + transition: margin-left 0.2s ease-in-out; } -@media (min-width: 1920px) { +@media (min-width: 1921px) { + #content { - width: calc(90 * var(--base-spacing)); - padding-left: calc(36.75 * var(--base-spacing)); - padding-top: calc(18 * var(--base-spacing)); + justify-content: left; + padding-left: 294px; } } @media (max-width: 1920px) { + #content { - margin-left: 0; - width: calc(100% - calc(39 * var(--base-spacing))); - padding-top: calc(18 * var(--base-spacing)); - padding-bottom: calc(10 * var(--base-spacing)); justify-content: center; - float: right; } - #content > div:first-child { - width: calc(90 * var(--base-spacing)); - } -} -@media (max-width: 1436px) { - #content { - margin-left: 0; - width: calc(100% - calc(56 * var(--base-spacing))); - padding-top: calc(18 * var(--base-spacing)); - padding-bottom: calc(10 * var(--base-spacing)); - margin-right: calc(8.5 * var(--base-spacing)); - margin-left: calc(8.5 * var(--base-spacing)); - float: right; - } #content > div:first-child { - width: auto; + padding: 0; } } -@media (max-width: 1376px) { +@media (max-width: 1467px) { + #content { - margin-left: 0; - width: calc(100% - calc(51 * var(--base-spacing))); - padding-top: calc(18 * var(--base-spacing)); - padding-bottom: calc(10 * var(--base-spacing)); - margin-right: calc(6 * var(--base-spacing)); - margin-left: calc(6 * var(--base-spacing)); - float: right; + width: calc(100% - 48px * 2); + padding-inline: 48px; + gap: 48px; } + #content > div:first-child { - width: auto; + width: calc(100vw - 232px - 313px - 48px * 3); } } @media (max-width: 1024px) { - #content { - width: calc(100% - calc(12 * var(--base-spacing))); - margin-right: calc(6 * var(--base-spacing)); - margin-left: calc(6 * var(--base-spacing)); - padding-right: 0; + + #main { + width: 100vw; + position: absolute; + left: 0; } #content > div:first-child { - width: calc(100% - calc(29 * var(--base-spacing))); + width: calc(100vw - 232px - 48px * 3); } #content.sidebar-shown { - width: calc(100% - calc(51 * var(--base-spacing))); - padding-right: 0; + margin-left: 313px; } - #content.sidebar-shown > div:first-child { - width: auto; - } } @media (max-width: 768px) { + #content { - margin-right: calc(6 * var(--base-spacing)); - margin-left: calc(6 * var(--base-spacing)); - padding-right: 0; - width: calc(100% - calc(12 * var(--base-spacing))); + padding-bottom: calc(6 * var(--base-spacing)); } #content > div:first-child { - width: auto; + width: calc(100vw - 48px * 2); } - #content.sidebar-shown { - width: calc(100% - calc(51 * var(--base-spacing))); - padding-right: 0; - } +} + +@media (max-width: 428px) { - #content.sidebar-shown > div:first-child { - width: auto; + #content { + padding-bottom: 0; } + } @media (max-width: 428px) { + #content { - margin-right: calc(4 * var(--base-spacing)); - margin-left: calc(4 * var(--base-spacing)); - padding-right: 0; - width: calc(100% - calc(8 * var(--base-spacing))); + width: calc(100% - 64px); + padding-inline: 32px; } #content > div:first-child { - width: auto; + width: calc(100vw - 32px * 2); } #content.sidebar-shown { - width: calc(100% - calc(51 * var(--base-spacing))); - padding-right: 0; + margin-left: 100%; } - #content.sidebar-shown > div:first-child { - width: auto; - } } #content::after { @@ -147,16 +133,42 @@ #content h1:first-of-type { line-height: normal; + word-break: break-word; } -#content h1, -#content h2 { +#content h1 { color: var(--text-primary); margin-block-end: 0; margin-block-start: 0; } +#content h2 { + color: var(--text-primary); + margin-block-start: calc(6* var(--base-spacing)); + margin-block-end: calc(3* var(--base-spacing)); +} + +#content .cover > h2 { + color: var(--text-secondary); + margin: 0; + padding: 12px var(--base-spacing); +} + +#content .cover > h2:first-of-type { + margin: 8px 0 0; +} + +@media (max-width: 1376px) { + #content .cover > h2 { + padding-left: 0; + } +} + /* content first paragraph */ +.first-p { + margin-block-start: calc(2* var(--base-spacing)); +} + #content .first-p { color: var(--text-secondary); } @@ -166,14 +178,59 @@ color: var(--text-primary); } +#content li > ul { + padding-left: calc(4 * var(--base-spacing)); +} + +/* content table */ +#content table { + color: var(--text-primary); + border-collapse: collapse; +} + +#content table { + text-align: left; +} + +#content table * { + padding: calc(2 * var(--base-spacing)) var(--base-spacing) 17px; +} + +#content table tr { + border-bottom: 1px solid var(--border-default); +} + +#content table tr:last-child { + border-bottom: none; +} + + +/* contributors table */ +#contributors table tr td:not(:last-child), +#contributors table th:not(:last-child) { + text-align: right; +} + +#contributors table td { + vertical-align: top; +} + /* content link */ #content a { - color: var(--text-primary); - border-bottom: 1px solid var(--grey8); + color: var(--text-primary); + text-decoration: underline solid 1px; + -webkit-text-decoration-line: underline; /* Safari */ + text-decoration-line: underline; + text-underline-offset: 2px; + transition: text-decoration-color .2s ease-in-out; } #content a:hover { - border-bottom: 1px solid var(--text-secondary); + text-decoration-color: transparent; +} + +#content .cover-header { + margin-block-end: calc(2 * var(--base-spacing)); } #content .cover-header a, @@ -184,8 +241,72 @@ #content :not(pre) > code { color: var(--code-props-content); font-family: "FiraCode-Regular"; + border: 1px solid var(--code-props-border); + padding: 3px 5px 1px 5px; + border-radius: 4px; + background-color: var(--code-props-background); + display: inline-block; + line-height: 16px; + font-size: 13px; + word-break: break-word; +} + +#content .body-large code { + font-size: 16px; + line-height: 24px; +} + +#content a code, +#content .body-large a code, +#content :is(h1, h2, h3, h4, h5, h6) code { + font-family: inherit; + font-size: inherit; + background: none; + display: inline; + border: none; + padding: 0; +} + +#content a code:before, +#content .body-large a code:before, +#content :is(h1, h2, h3, h4, h5, h6) code:before { + content: '"'; +} + +#content a code:after, +#content .body-large a code:after, +#content :is(h1, h2, h3, h4, h5, h6) code:after { + content: '"'; +} + +#content main :is(ul, ol) { + margin-block-start: calc(2 * var(--base-spacing)); + margin-block-end: calc(3 * var(--base-spacing)); + padding: 0 0 0 24px; } +#content main :is(ul, ol) li { + margin-block-start: calc(2 * var(--base-spacing)); +} + +#content main :is(ul, ol) li p { + margin: 0; +} + +main ul li::marker { + content: '• '; +} + +main ol li::marker { + content: counters(list-item,'.') ') '; +} + +main :is(ul, ol) li .snippet { + margin-block-start: calc(2 * var(--base-spacing)); + margin-block-end: calc(2 * var(--base-spacing)); +} + + .breadcrumbs { display: none; } @@ -331,7 +452,7 @@ } .cover-header .icon { - width: 72px; + max-width: 72px; } .fqname { @@ -362,12 +483,23 @@ background-color: var(--action-primary-background-default-solid); padding: calc(3 * var(--base-spacing)); border-radius: 4px; + padding-left: 40px; + text-indent: -16px; } #content > div > * { margin-bottom: calc(3 * var(--base-spacing)); } +#content > div > .main-signature { + margin-bottom: 0; +} + +#attributes > h2 { + margin-bottom: calc(3*var(--base-spacing)); +} + + .tab { position: relative; } @@ -415,16 +547,29 @@ display: block; } -#content section { - margin-top: calc(6 * var(--base-spacing)); +.member-group-header{ + height: 80px; + display: flex; + align-items: center; +} + + +.documentableList-expander { + display: flex; + cursor: pointer; + flex-direction: row-reverse; + justify-content: space-between; + align-items: center; } + #content section:last-child { margin-bottom: calc(6 * var(--base-spacing)); } .membersList { position: relative; + min-height: 80vh; } #concise-view-switch { @@ -453,3 +598,7 @@ .membersList.concise .documentableElement.expand .modifiers { display: unset; } + +.show-all-code { + margin-top: calc(2* var(--base-spacing)); +} \ No newline at end of file diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/footer.css b/scaladoc/resources/dotty_res/styles/theme/layout/footer.css index 2a5d2f914796..7c169af00591 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/footer.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/footer.css @@ -10,7 +10,7 @@ color: var(--text-primary); box-sizing: border-box; flex-wrap: wrap; - z-index: 3; + z-index: 100; } #footer .left-container { @@ -43,29 +43,13 @@ display: none; } -@media (max-width: 480px) { - #footer { - height: calc(9 * var(--base-spacing)); - } - - #footer .right-container .text { - display: none; - } - - #footer .text-mobile { - display: flex; - width: 100%; - justify-content: center; - margin-top: calc(1 * var(--base-spacing)); - } -} - #footer.mobile-footer { display: none; justify-content: center; } -@media (max-width: 390px) { +@media (max-width: 480px) { + #footer { display: none; } @@ -73,13 +57,26 @@ #footer.mobile-footer { display: flex; position: unset; + height: calc(9 * var(--base-spacing)); } #footer .text-mobile { display: none; } + #footer.mobile-footer .text-mobile { + display: flex; + width: 100%; + justify-content: center; + margin-top: calc(1 * var(--base-spacing)); + } + + #footer .right-container .text { + display: none; + } + #footer.mobile-footer > .text-mobile { display: flex; } -} + +} \ No newline at end of file diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/header.css b/scaladoc/resources/dotty_res/styles/theme/layout/header.css index 977d11a2cb17..6447f111bc8f 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/header.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/header.css @@ -11,9 +11,9 @@ left: 0px; right: 0px; top: 0px; - background: var(--background-default); + background: var(--background-header); border-bottom: 1px solid var(--border-default); - z-index: 2; + z-index: 101; } #header .project-name { @@ -68,13 +68,14 @@ overflow: hidden; white-space: nowrap; text-overflow: ellipsis; + width: calc(9 * var(--base-spacing)); } -@media (max-width: 950px) { - .projectVersion { - width: calc(6 * var(--base-spacing)); - } +.single { + padding-left: 16px; +} +@media (max-width: 950px) { .header-container-right .text-button { margin-left: calc(2 * var(--base-spacing)); } @@ -86,10 +87,6 @@ display: block; } - .projectVersion { - width: auto; - } - .header-container-right .text-button { display: none; } @@ -103,8 +100,4 @@ } } -@media (max-width: 500px) { - .projectVersion { - width: calc(6 * var(--base-spacing)); - } -} + diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/leftMenu.css b/scaladoc/resources/dotty_res/styles/theme/layout/leftMenu.css index 9786694119fe..d62a05dc47c9 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/leftMenu.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/leftMenu.css @@ -2,7 +2,7 @@ display: flex; flex-direction: column; align-items: center; - padding: calc(3 * var(--base-spacing)) 0px; + /*padding: calc(3 * var(--base-spacing)) 0px;*/ position: absolute; width: calc(39 * var(--base-spacing)); height: calc(100% - (8 * var(--base-spacing)) - (6 * var(--base-spacing))); @@ -13,17 +13,17 @@ border-top: none; border-left: none; transition: left 0.2s linear; -} - -@media (max-width: 480px) { - #leftColumn { - height: calc(100% - (8 * var(--base-spacing)) - (9 * var(--base-spacing))); - } + z-index: 100; } @media (max-width: 1024px) { + #leftColumn { left: calc(-39 * var(--base-spacing)); + transition-property: left; + transition-duration: 0.2s; + transition-timing-function: ease-in-out; + } #leftColumn.show { @@ -31,15 +31,32 @@ } } -@media (max-width: 428px) { +@media (max-width: 768px) { + +} + + +@media (max-width: 480px) { + #leftColumn { height: calc(100% - (8 * var(--base-spacing))); - width: 100%; + } + +} + +@media (max-width: 428px) { + + #leftColumn { + width: 0; left: -100%; z-index: 1; + opacity: 0; + transition-property: left, opacity, width; } #leftColumn.show { + width: 100%; left: 0; + opacity: 1; } -} +} \ No newline at end of file diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/mobileMenu.css b/scaladoc/resources/dotty_res/styles/theme/layout/mobileMenu.css index eb6e9e242044..a7c08eedb4be 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/mobileMenu.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/mobileMenu.css @@ -6,7 +6,7 @@ left: 0; height: 100%; width: 100%; - z-index: 3; + z-index: 103; } #mobile-menu.show { @@ -31,7 +31,7 @@ margin-left: calc(2 * var(--base-spacing)); } -@media (min-width: 768px) { +@media (min-width: 769px) { #mobile-menu { display: none; } diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/noResult.css b/scaladoc/resources/dotty_res/styles/theme/layout/noResult.css new file mode 100644 index 000000000000..31fa9cd210b1 --- /dev/null +++ b/scaladoc/resources/dotty_res/styles/theme/layout/noResult.css @@ -0,0 +1,34 @@ +#no-results-container { + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + text-align: center; + margin: calc(10 * var(--base-spacing)); +} + +#no-results-container .no-result-icon { + height: 30px; + width: 30px; +} + +#no-results-container .no-result-icon::before { + content: url('../../../images/no-results-icon.svg'); +} + +#no-results-container .no-result-header { +margin-block-start: 27px; + margin-block-end: var(--base-spacing); + color: var(--text-primary); +} + +#no-results-container .no-result-content { + margin-top: 0; + margin-bottom: 34px; + color: var(--text-secondary); +} + +#no-results-container .no-result-content p{ + margin: 0; +} + diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/searchBar.css b/scaladoc/resources/dotty_res/styles/theme/layout/searchBar.css index aea08d736d2e..400caaf70ce3 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/searchBar.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/searchBar.css @@ -1,7 +1,7 @@ #searchbar-container { width: 100%; height: 100%; - z-index: 4; + z-index: 104; background-color: rgba(0, 0, 0, 0.569); display: flex; justify-content: center; @@ -10,7 +10,7 @@ } #scaladoc-searchbar { - z-index: 5; + z-index: 104; width: calc(125 * var(--base-spacing)); position: relative; top: calc(3 * var(--base-spacing)); @@ -71,6 +71,7 @@ @media (max-width: 768px) { .scaladoc-searchbar-cancel-button { display: inline; + min-width: 44px; } } @@ -93,7 +94,7 @@ border-radius: 4px; margin-top: calc(1.5 * var(--base-spacing)); border: 1px solid var(--border-default); - max-height: calc(100% - calc(24 * var(--base-spacing))); + max-height: calc(100vh - calc(19.5 * var(--base-spacing))); overflow: scroll; } @@ -103,7 +104,8 @@ flex-flow: column; align-items: center; justify-content: center; - height: calc(108.5 * var(--base-spacing)); + height: 80%; + overflow: scroll; } .searchbar-hints h1 { @@ -183,6 +185,7 @@ .scaladoc-searchbar-row { display: flex; + flex-wrap: wrap; color: var(--text-secondary); padding: calc(1.5 * var(--base-spacing)) calc(2 * var(--base-spacing)); } @@ -215,10 +218,8 @@ } .scaladoc-searchbar-row .micon { - margin: calc(0.5 * var(--base-spacing)) calc(1 * var(--base-spacing)) 0px 0px; + margin-right: calc(1 * var(--base-spacing)); color: var(--text-secondary); - position: relative; - top: -2px; } .scaladoc-searchbar-location { @@ -229,6 +230,19 @@ display: block; } +.scaladoc-searchbar-extra-info { + display: none; + width: 100%; + padding-top: 1em; + white-space: nowrap; + overflow-x: hidden; + text-overflow: ellipsis; +} + +.scaladoc-searchbar-row:hover .scaladoc-searchbar-extra-info { + display: block; +} + /* searchbar footer */ #searchbar-footer { position: absolute; @@ -291,6 +305,10 @@ display: none; } + .scaladoc-searchbar-row:hover .scaladoc-searchbar-extra-info { + display: none; + } + #scaladoc-searchbar-results { border: none; margin-left: calc(3 * var(--base-spacing)); diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/sideMenu.css b/scaladoc/resources/dotty_res/styles/theme/layout/sideMenu.css index f3571e76e1d0..364aa133b114 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/sideMenu.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/sideMenu.css @@ -3,8 +3,7 @@ overflow-x: hidden; width: 100%; height: calc(100% - calc(11 * var(--base-spacing))); - padding-left: calc(1.5 * var(--base-spacing)); - padding-right: calc(1.5 * var(--base-spacing)); + padding: 0 calc(1.5 * var(--base-spacing)) calc(3 * var(--base-spacing)) calc(1.5 * var(--base-spacing)); box-sizing: border-box; } diff --git a/scaladoc/resources/dotty_res/styles/theme/spacing.css b/scaladoc/resources/dotty_res/styles/theme/spacing.css index bf7e6b96c491..64d7687d5a7a 100644 --- a/scaladoc/resources/dotty_res/styles/theme/spacing.css +++ b/scaladoc/resources/dotty_res/styles/theme/spacing.css @@ -1,3 +1,4 @@ :root { --base-spacing: 8px; + --header-height: 64px; } diff --git a/scaladoc/resources/dotty_res/styles/theme/typography.css b/scaladoc/resources/dotty_res/styles/theme/typography.css index 7d80ee32d037..cd8730f31dc2 100644 --- a/scaladoc/resources/dotty_res/styles/theme/typography.css +++ b/scaladoc/resources/dotty_res/styles/theme/typography.css @@ -1,3 +1,8 @@ +* { + /*text-rendering: geometricPrecision;*/ + font-weight: initial; +} + @font-face { font-family: "Inter-Bold"; src: url("../../fonts/Inter-Bold.ttf") format("truetype"); @@ -26,85 +31,89 @@ .h700 { font-size: 40px; line-height: 40px; - font-family: "Inter-Bold"; + font-family: "Inter-Bold", sans-serif; } .h600 { font-size: 32px; line-height: 40px; - font-family: "Inter-SemiBold"; + font-family: "Inter-SemiBold", sans-serif; +} + +.h600 .single { + padding-left: 16px; } .h500 { font-size: 28px; line-height: 32px; - font-family: "Inter-Medium"; + font-family: "Inter-Medium", sans-serif; } .h400 { font-size: 24px; line-height: 32px; - font-family: "Inter-Medium"; + font-family: "Inter-Medium", sans-serif; } .h300 { font-size: 20px; line-height: 24px; - font-family: "Inter-Bold"; + font-family: "Inter-Bold", sans-serif; } .h200 { font-size: 16px; line-height: 24px; - font-family: "Inter-SemiBold"; + font-family: "Inter-SemiBold", sans-serif; } .h100 { font-size: 13px; line-height: 16px; - font-family: "Inter-SemiBold"; + font-family: "Inter-SemiBold", sans-serif; } .h50 { font-size: 9px; line-height: 12px; - font-family: "Inter-SemiBold"; + font-family: "Inter-SemiBold", sans-serif; } .body-large { font-size: 20px; - line-height: 24px; - font-family: "Inter-Regular"; + line-height: 32px; + font-family: "Inter-Regular", sans-serif; } .body-medium { font-size: 16px; line-height: 24px; - font-family: "Inter-Regular"; + font-family: "Inter-Regular", sans-serif; } .body-small { font-size: 13px; line-height: 16px; - font-family: "Inter-Regular"; + font-family: "Inter-Regular", sans-serif; } .mono-medium { font-size: 16px; line-height: 24px; - font-family: "FiraCode-Regular"; + font-family: "FiraCode-Regular", monospace; } .mono-small-inline { font-size: 13px; line-height: 16px; - font-family: "FiraCode-Regular"; + font-family: "FiraCode-Regular", monospace; } .mono-small-block { - font-size: 15px; + font-size: 13px; line-height: 20px; - font-family: "FiraCode-Regular"; + font-family: "FiraCode-Regular", monospace; } :root { diff --git a/scaladoc/src/dotty/tools/scaladoc/ScalaModuleProvider.scala b/scaladoc/src/dotty/tools/scaladoc/ScalaModuleProvider.scala index 22a97633a7a9..c4776f2840c2 100644 --- a/scaladoc/src/dotty/tools/scaladoc/ScalaModuleProvider.scala +++ b/scaladoc/src/dotty/tools/scaladoc/ScalaModuleProvider.scala @@ -10,8 +10,31 @@ object ScalaModuleProvider: def mkModule()(using ctx: DocContext): Module = val (result, rootDoc) = ScaladocTastyInspector().result() val (rootPck, rest) = result.partition(_.name == "API") - val packageMembers = (rest ++ rootPck.flatMap(_.members)) + val (emptyPackages, nonemptyPackages) = (rest ++ rootPck.flatMap(_.members)) .filter(p => p.members.nonEmpty || p.docs.nonEmpty).sortBy(_.name) + .partition(_.name == "") + + val groupedMembers = + def groupMembers(ms: List[Member], n: Int = 0): List[Member] = + ms.groupBy(_.name.split('.')(n)).values.map { + case m :: ms if m.name.count(_ == '.') == n => + m.withMembers(groupMembers(ms, n + 1) ++ m.members) + case ms => + groupMembers(ms, n + 1) match + case m :: Nil => m + case ms => + val name = ms.head.name.split('.').take(n + 1).mkString(".") + Member( + name = name, + fullName = name, + dri = DRI(location = name), + kind = Kind.Package, + members = ms, + ) + }.toList.sortBy(_.name) + groupMembers(nonemptyPackages).reverse + + val packageMembers = groupedMembers ++ emptyPackages.flatMap(_.members) def flattenMember(m: Member): Seq[(DRI, Member)] = (m.dri -> m) +: m.members.flatMap(flattenMember) diff --git a/scaladoc/src/dotty/tools/scaladoc/api.scala b/scaladoc/src/dotty/tools/scaladoc/api.scala index 0beb48a8e8c4..90a03658c90e 100644 --- a/scaladoc/src/dotty/tools/scaladoc/api.scala +++ b/scaladoc/src/dotty/tools/scaladoc/api.scala @@ -172,6 +172,7 @@ case class Member( knownChildren: Seq[LinkToType] = Nil, companion: Option[(Kind, DRI)] = None, deprecated: Option[Annotation] = None, + experimental: Option[Annotation] = None ): def needsOwnPage: Boolean = def properKind(kind: Kind): Boolean = kind match diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/DocRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/DocRenderer.scala index 3afc3fd91e5d..58898339db5d 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/DocRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/DocRenderer.scala @@ -57,12 +57,12 @@ class DocRender(signatureRenderer: SignatureRenderer)(using DocContext): case Title(text, level) => val content = renderElement(text) level match - case 1 => h1(cls := "h500")(content) - case 2 => h2(cls := "h300")(content) - case 3 => h3(cls := "h200")(content) - case 4 => h4(cls := "h100")(content) - case 5 => h5(cls := "h50")(content) - case 6 => h6(cls := "h50")(content) + case 1 => h1(cls := "h600")(content) + case 2 => h2(cls := "h500")(content) + case 3 => h3(cls := "h400")(content) + case 4 => h4(cls := "h300")(content) + case 5 => h5(cls := "h200")(content) + case 6 => h6(cls := "h100")(content) case Paragraph(text) => p(renderElement(text)) case Code(data: String) => raw(SnippetRenderer.renderSnippet(data)) case HorizontalRule => hr diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala index 8cd4a668f500..719033959b47 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala @@ -19,11 +19,33 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do extends Renderer(rootPackage, members, extension = "html"): override def pageContent(page: Page, parents: Vector[Link]): AppliedTag = - html( - mkHead(page), + val PageContent(content, toc) = renderContent(page) + val contentStr = + content.toString.stripPrefix("\n").stripPrefix("
").stripSuffix("\n").stripSuffix("
") + val document = Jsoup.parse(contentStr) + val docHead = raw(document.head().html()) + val docBody = raw(document.body().html()) + + val attrs: List[AppliedAttr] = (page.content match + case ResolvedTemplate(loadedTemplate, _) => + val path = loadedTemplate.templateFile.file.toPath + ctx.sourceLinks.repoSummary(path) match + case Some(DefinedRepoSummary("github", org, repo)) => + ctx.sourceLinks.fullPath(relativePath(path)).fold(Nil) { contributorsFilename => + List[AppliedAttr]( + Attr("data-githubContributorsUrl") := s"https://api.github.com/repos/$org/$repo", + Attr("data-githubContributorsFilename") := s"$contributorsFilename", + ) + } + case _ => Nil + case _ => Nil) + :+ (Attr("data-pathToRoot") := pathToRoot(page.link.dri)) + + html(attrs: _*)( + head((mkHead(page) :+ docHead):_*), body( - if !page.hasFrame then renderContent(page).content - else mkFrame(page.link, parents, renderContent(page)) + if !page.hasFrame then docBody + else mkFrame(page.link, parents, docBody, toc) ) ) @@ -56,7 +78,7 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do val resources = staticSiteResources ++ allResources(allPages) ++ onlyRenderedResources resources.flatMap(renderResource) - def mkHead(page: Page): AppliedTag = + def mkHead(page: Page): Seq[TagArg] = val resources = page.content match case t: ResolvedTemplate => t.resolved.resources ++ (if t.hasFrame then commonResourcesPaths ++ staticSiteOnlyResourcesPaths else Nil) @@ -67,7 +89,7 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do case t: ResolvedTemplate => if t.hasFrame then earlyCommonResourcePaths else Nil case _ => earlyCommonResourcePaths - head( + Seq( meta(charset := "utf-8"), meta(util.HTML.name := "viewport", content := "width=device-width, initial-scale=1, maximum-scale=1"), title(page.link.name), @@ -80,20 +102,6 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do linkResources(page.link.dri, earlyResources, deferJs = false).toList, linkResources(page.link.dri, resources, deferJs = true).toList, script(raw(s"""var pathToRoot = "${pathToRoot(page.link.dri)}";""")), - (page.content match - case ResolvedTemplate(loadedTemplate, _) => - val path = loadedTemplate.templateFile.file.toPath - ctx.sourceLinks.repoSummary(path) match - case Some(DefinedRepoSummary("github", org, repo)) => - val tag: TagArg = ctx.sourceLinks.fullPath(relativePath(path)).fold("") { githubContributors => - Seq( - script(raw(s"""var githubContributorsUrl = "https://api.github.com/repos/$org/$repo";""")), - script(raw(s"""var githubContributorsFilename = "$githubContributors";""")) - ) - } - tag // for some reason inference fails so had to state the type explicitly - case _ => "" - case _ => ""), ctx.args.versionsDictionaryUrl match case Some(url) => script(raw(s"""var versionsDictionaryUrl = "$url";""")) case None => "" @@ -105,10 +113,16 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do case _ => Nil } - def renderNested(nav: Page, nestLevel: Int): (Boolean, AppliedTag) = + def renderNested(nav: Page, nestLevel: Int, prefix: String = ""): (Boolean, AppliedTag) = val isApi = nav.content.isInstanceOf[Member] val isSelected = nav.link.dri == pageLink.dri val isTopElement = nestLevel == 0 + val name = nav.content match { + case m: Member if m.kind == Kind.Package => + m.name.stripPrefix(prefix).stripPrefix(".") + case _ => nav.link.name + } + val newPrefix = if prefix == "" then name else s"$prefix.$name" def linkHtml(expanded: Boolean = false, withArrow: Boolean = false) = val attrs: Seq[String] = Seq( @@ -124,14 +138,14 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do Seq( span(cls := s"nh " + attrs.mkString(" "))( if withArrow then Seq(button(cls := s"ar icon-button ${if isSelected || expanded then "expanded" else ""}")) else Nil, - a(href := pathToPage(pageLink.dri, nav.link.dri))(icon, span(nav.link.name)) + a(href := (if isSelected then "#" else pathToPage(pageLink.dri, nav.link.dri)))(icon, span(name)) ) ) nav.children.filterNot(_.hidden) match case Nil => isSelected -> div(cls := s"ni n$nestLevel ${if isSelected then "expanded" else ""}")(linkHtml()) case children => - val nested = children.map(renderNested(_, nestLevel + 1)) + val nested = children.map(renderNested(_, nestLevel + 1, newPrefix)) val expanded = nested.exists(_._1) val attr = if expanded || isSelected then Seq(cls := s"ni n$nestLevel expanded") else Seq(cls := s"ni n$nestLevel") @@ -174,10 +188,13 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do li(ul(renderTocRec(level + 1, prefix))) +: renderTocRec(level, suffix) } - renderTocRec(1, toc).headOption.map(toc => nav(cls := "toc-nav")(ul(cls := "toc-list")(toc))) + if toc.nonEmpty then + val minLevel = toc.minBy(_.level).level + Some(nav(cls := "toc-nav")(ul(cls := "toc-list")(renderTocRec(minLevel, toc)))) + else None - private def mkFrame(link: Link, parents: Vector[Link], content: => PageContent): AppliedTag = + private def mkFrame(link: Link, parents: Vector[Link], content: AppliedTag, toc: Seq[TocEntry]): AppliedTag = val projectLogoElem = projectLogo.flatMap { case Resource.File(path, _) => @@ -201,12 +218,8 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do val (apiNavOpt, docsNavOpt): (Option[(Boolean, Seq[AppliedTag])], Option[(Boolean, Seq[AppliedTag])]) = buildNavigation(link) - def textFooter: String | AppliedTag = - args.projectFooter.fold("") { f => - span(id := "footer-text")( - raw(f) - ) - } + def textFooter: String = + args.projectFooter.getOrElse("") def quickLinks(mobile: Boolean = false): TagArg = val className = if mobile then "mobile-menu-item" else "text-button" @@ -277,27 +290,23 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do ), div(cls := "right-container")( socialLinks, - div(cls := "text")( - "© 2002-2021 · LAMP/EPFL" - ) + div(cls := "text")(textFooter) ), - div(cls := "text-mobile")( - "© 2002-2021 · LAMP/EPFL" - ) + div(cls := "text-mobile")(textFooter) ), div(id := "scaladoc-searchBar"), div(id := "main")( parentsHtml, div(id := "content", cls := "body-medium")( - content.content, - renderTableOfContents(content.toc).fold(Nil) { toc => - div(id := "toc", cls:="body-small")( - div(id := "toc-container") ( - span(cls := "toc-title h200")("In this article"), - toc - ), - ) - }, + div(content), + div(id := "toc", cls:="body-small")( + renderTableOfContents(toc).fold(Nil) { toc => + div(id := "toc-container")( + span(cls := "toc-title h200")("In this article"), + toc, + ) + }, + ), ), div(id := "footer", cls := "body-small mobile-footer")( div(cls := "left-container")( @@ -316,13 +325,9 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do a(href := "https://gitter.im/scala/scala") ( button(cls := "icon-button gitter"), ), - div(cls := "text")( - "© 2002-2021 · LAMP/EPFL" - ) + div(cls := "text")(textFooter) ), - div(cls := "text-mobile")( - "© 2002-2021 · LAMP/EPFL" - ) + div(cls := "text-mobile")(textFooter) ), ), ) diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala index 6316e14a399f..e50d87e99837 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala @@ -2,14 +2,16 @@ package dotty.tools.scaladoc package renderers import scala.collection.immutable.SortedMap -import scala.util.chaining._ -import util.HTML._ -import scala.jdk.CollectionConverters._ +import scala.util.chaining.* +import util.HTML.{div, *} + +import scala.jdk.CollectionConverters.* import dotty.tools.scaladoc.translators.FilterAttributes import dotty.tools.scaladoc.tasty.comments.markdown.DocFlexmarkRenderer -import com.vladsch.flexmark.util.ast.{Node => MdNode} +import com.vladsch.flexmark.util.ast.Node as MdNode import dotty.tools.scaladoc.tasty.comments.wiki.WikiDocElement -import translators._ +import org.jsoup.Jsoup +import translators.* class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) extends DocRender(signatureRenderer): import signatureRenderer._ @@ -37,10 +39,10 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext tableRow("Inherited from:", signatureRenderer.renderLink(name + hiddenNameSuffix, dri)) case _ => Nil - def docAttributes(m: Member): Seq[AppliedTag] = + def flattenedDocPart(on: SortedMap[String, DocPart]): Seq[AppliedTag] = + on.flatMap { case (name, value) => tableRow(name, renderDocPart(value)) }.toSeq - def flattened(on: SortedMap[String, DocPart]): Seq[AppliedTag] = - on.flatMap { case (name, value) => tableRow(name, renderDocPart(value))}.toSeq + def docAttributes(m: Member): Seq[AppliedTag] = def list(name: String, on: List[DocPart]): Seq[AppliedTag] = if on.isEmpty then Nil else tableRow(name, div(on.map(e => div(renderDocPart(e))))) @@ -51,28 +53,26 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext def authors(authors: List[DocPart]) = if summon[DocContext].args.includeAuthors then list("Authors:", authors) else Nil m.docs.fold(Nil)(d => - flattened(d.typeParams) ++ - flattened(d.valueParams) ++ - opt("Returns:", d.result) ++ - list("Throws:", d.throws) ++ - opt("Constructor:", d.constructor) ++ + opt("Returns", d.result) ++ + list("Throws", d.throws) ++ + opt("Constructor", d.constructor) ++ authors(d.authors) ++ - list("See also:", d.see) ++ - opt("Version:", d.version) ++ - opt("Since:", d.since) ++ - list("Todo:", d.todo) ++ - list("Note:", d.note) ++ - list("Example:", d.example) + list("See also", d.see) ++ + opt("Version", d.version) ++ + opt("Since", d.since) ++ + list("Todo", d.todo) ++ + list("Note", d.note) ++ + list("Example", d.example) ) def companion(m: Member): Seq[AppliedTag] = m.companion.fold(Nil){ (kind, dri) => val kindName = kind.name - tableRow("Companion:", signatureRenderer.renderLink(kindName, dri)) + tableRow("Companion", signatureRenderer.renderLink(kindName, dri)) } def source(m: Member): Seq[AppliedTag] = summon[DocContext].sourceLinks.pathTo(m).fold(Nil){ link => - tableRow("Source:", a(href := link)(m.sources.fold("(source)")(_.path.getFileName().toString()))) + tableRow("Source", a(href := link)(m.sources.fold("(source)")(_.path.getFileName().toString()))) } def deprecation(m: Member): Seq[AppliedTag] = m.deprecated.fold(Nil){ a => @@ -98,16 +98,24 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext tableRow("Deprecated", content*) } - def memberInfo(m: Member, withBrief: Boolean = false): Seq[AppliedTag] = + def experimental(m: Member) = m.experimental.fold(Nil)(_ => tableRow("Experimental", Seq("true"))) + + def typeParams(m: Member): Seq[AppliedTag] = m.docs.fold(Nil)(d => flattenedDocPart(d.typeParams)) + def valueParams(m: Member): Seq[AppliedTag] = m.docs.fold(Nil)(d => flattenedDocPart(d.valueParams)) + + def memberInfo(m: Member, withBrief: Boolean = false, full: Boolean = false): Seq[AppliedTag] = val comment = m.docs val bodyContents = m.docs.fold(Nil)(e => renderDocPart(e.body) :: Nil) - val classLikeInfo: TagArg = classLikeParts(m) + val classLikeInfo: TagArg = classLikeParts(m, full) + val memberTypeParams = typeParams(m) + val memberValueParams = valueParams(m) val attributes = Seq( docAttributes(m), companion(m), deprecation(m), + experimental(m), defintionClasses(m), inheritedFrom(m), source(m), @@ -122,8 +130,31 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext Option.when(bodyContents.nonEmpty || attributes.nonEmpty)( div(cls := "cover")( div(cls := "doc")(bodyContents), - h2(cls := "h500")("Attributes"), - dl(cls := "attributes")(attributes*) + Option.when(full)( + section(id := "attributes")( + Option.when(memberTypeParams.nonEmpty)(Seq( + h2(cls := "h500")("Type parameters"), + dl(cls := "attributes")(memberTypeParams*) + )).toSeq.flatten, + Option.when(memberValueParams.nonEmpty)(Seq( + h2(cls := "h500")("Value parameters"), + dl(cls := "attributes")(memberValueParams*) + )).toSeq.flatten, + h2(cls := "h500")("Attributes"), + dl(cls := "attributes")(attributes*) + ) + ).getOrElse( + Option.when(memberTypeParams.nonEmpty)(Seq( + h2(cls := "h200")("Type parameters"), + dl(cls := "attributes attributes-small")(memberTypeParams *) + )).toSeq.flatten ++ + Option.when(memberValueParams.nonEmpty)(Seq( + h2(cls := "h200")("Value parameters"), + dl(cls := "attributes attributes-small")(memberValueParams *) + )).toSeq.flatten :+ + h2(cls := "h200")("Attributes") :+ + dl(cls := "attributes attributes-small")(attributes *) + ) ) ) ).flatten @@ -147,14 +178,52 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext ) val signature: MemberSignature = signatureProvider.rawSignature(member)() - Seq( - div(cls := "signature mono-small-inline")( - span(cls := "modifiers")(signature.prefix.map(renderElement(_))), - span(cls := "kind")(signature.kind.map(renderElement(_))), - signature.name.map(renderElement(_, nameClasses*)), - span(signature.suffix.map(renderElement(_))) - ), - ) + val isSubtype = signature.suffix.exists { + case Keyword(keyword) => keyword.contains("extends") + case _ => false + } + if !isSubtype then + Seq( + div(cls := "signature")( + (Seq[TagArg]( + span(cls := "modifiers")(signature.prefix.map(renderElement(_))), + span(cls := "kind")(signature.kind.map(renderElement(_))), + signature.name.map(renderElement(_, nameClasses*)) + ) ++ signature.suffix.map(renderElement(_)))* + ), + ) + else + val (beforeExtends, afterExtends) = signature.suffix.splitAt(signature.suffix.indexOf(Keyword("extends"))) + val (shortSuffix, longSuffix) = splitTypeSuffixSignature(beforeExtends, afterExtends) + Seq( + div(cls := "signature")( + span(cls := "signature-short")( + (Seq[TagArg]( + span(cls := "modifiers")(signature.prefix.map(renderElement(_))), + span(cls := "kind")(signature.kind.map(renderElement(_))), + signature.name.map(renderElement(_, nameClasses *)) + ) ++ shortSuffix.map(renderElement(_)))* + ), + span(cls := "signature-long")( + longSuffix.map(renderElement(_))* + ) + ), + ) + end memberSignature + + def splitTypeSuffixSignature(shortAcc: List[SignaturePart], tail: List[SignaturePart], nestedTypeLevel: Int = 0): (List[SignaturePart], List[SignaturePart]) = + tail match + case Nil => + (shortAcc, Nil) + case (head @ Plain("[")) :: rest => + splitTypeSuffixSignature(shortAcc :+ head, rest, nestedTypeLevel + 1) + case (head @ Plain("]")) :: rest => + splitTypeSuffixSignature(shortAcc :+ head, rest, nestedTypeLevel - 1) + case (head @ Keyword(", ")) :: rest if nestedTypeLevel == 0 => + (shortAcc :+ head, rest) + case head :: rest => + splitTypeSuffixSignature(shortAcc :+ head, rest, nestedTypeLevel) + def memberIcon(member: Member) = member.kind match { case _ => @@ -185,9 +254,11 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext val annots = annotations(member) div(topLevelAttr:_*)( - Option.when(annots.nonEmpty || originInf.nonEmpty || memberInf.nonEmpty)(button(cls := "icon-button show-content")).toList, - annots.map(div(_)).toList, - div(cls := "header monospace")(memberSignature(member)), + div(cls := "documentableElement-expander")( + Option.when(annots.nonEmpty || originInf.nonEmpty || memberInf.nonEmpty)(button(cls := "icon-button ar show-content")).toList, + annots.map(div(_)).toList, + div(cls := "header monospace mono-medium")(memberSignature(member)), + ), Option.when(originInf.nonEmpty || memberInf.nonEmpty)( div(cls := "docs")( span(cls := "modifiers"), // just to have padding on left @@ -201,29 +272,45 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext private case class MGroup(header: AppliedTag, members: Seq[Member], groupName: String) - private def actualGroup(name: String, members: Seq[Member | MGroup]): Seq[AppliedTag] = + private def makeSubgroupHeader(name: String): AppliedTag = + h4(cls := "groupHeader h200")(name) + + private def actualGroup(name: String, members: Seq[Member | MGroup], headerConstructor: String => AppliedTag = makeSubgroupHeader, wrapInSection: Boolean = true): Seq[AppliedTag] = if members.isEmpty then Nil else - div(cls := "documentableList expand")( - button(cls := "icon-button show-content expand"), - h3(cls := "groupHeader h200")(name), - members.sortBy { - case m: Member => m.name - case MGroup(_, _, name) => name - }.map { - case element: Member => - member(element) - case MGroup(header, members, _) => - div( - header, - members.map(member) - ) - } - ) :: Nil + val groupBody = div(cls := "documentableList expand")( + div(cls := "documentableList-expander")( + button(cls := "icon-button show-content expand"), + headerConstructor(name) + ), + members.sortBy { + case m: Member => m.name + case MGroup(_, _, name) => name + }.map { + case element: Member => + member(element) + case MGroup(header, members, _) => + div( + header, + members.map(member) + ) + } + ) + if wrapInSection then + section(id := name.replace(' ', '-'))( + groupBody + ) :: Nil + else + groupBody :: Nil + private def isDeprecated(m: Member | MGroup): Boolean = m match case m: Member => m.deprecated.nonEmpty case g: MGroup => g.members.exists(isDeprecated) + private def isExperimental(m: Member | MGroup): Boolean = m match + case m: Member => m.experimental.nonEmpty + case g: MGroup => g.members.exists(isExperimental) + private def isInherited(m: Member | MGroup): Boolean = m match case m: Member => m.inheritedFrom.nonEmpty case g: MGroup => g.members.exists(isInherited) @@ -235,7 +322,8 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext private type SubGroup = (String, Seq[Member | MGroup]) private def buildGroup(name: String, subgroups: Seq[SubGroup]): Tab = val all = subgroups.map { case (name, members) => - val (allInherited, allDefined) = members.partition(isInherited) + val (experimental, nonExperimental) = members.partition(isExperimental) + val (allInherited, allDefined) = nonExperimental.partition(isInherited) val (depDefined, defined) = allDefined.partition(isDeprecated) val (depInherited, inherited) = allInherited.partition(isDeprecated) val normalizedName = name.toLowerCase @@ -251,7 +339,8 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext definedWithGroup ++ List( actualGroup(s"Deprecated ${normalizedName}", depDefined), actualGroup(s"Inherited ${normalizedName}", inherited), - actualGroup(s"Deprecated and Inherited ${normalizedName}", depInherited) + actualGroup(s"Deprecated and Inherited ${normalizedName}", depInherited), + actualGroup(name = s"Experimental ${normalizedName}", experimental) ) } @@ -261,8 +350,9 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext name, name, Seq( - button(cls := "icon-button show-content expand"), - h2(tabAttr(name), cls := "h300")(name) + div(cls := "member-group-header")( + h3(tabAttr(name), cls := "h400")(name) + ) ) ++ children, "expand" ) @@ -294,7 +384,13 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext members.map(member) )) } - Tab("Grouped members", "custom_groups", content, "expand") + Tab("Grouped members", "grouped_members", + Seq( + div(cls := "member-group-header")( + h3(tabAttr("grouped_members"), cls := "h400")("Grouped members") + ) + ) ++ content, + "expand") def buildMembers(s: Member): AppliedTag = def partitionIntoGroups(members: Seq[Member]) = @@ -323,19 +419,11 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext }.toSeq div(cls := "membersList expand")( - h2(cls := "h500")("Members list"), - div(cls := "body-small", id := "concise-view-switch")( - span("Concise view"), - label(cls := "switch")( - input(Attr("type") := "checkbox")(), - span(cls := "slider")() - ) - ), renderTabs( singleSelection = false, - buildGroup("Packages", Seq( - ("", rest.filter(m => m.kind == Kind.Package)), - )), + Tab("Packages", "packages", + actualGroup("Packages", rest.filter(m => m.kind == Kind.Package), name => h3(cls := "groupHeader h400")(name), false), + "expand"), grouppedMember(s, membersInGroups), buildGroup("Type members", Seq( ("Classlikes", rest.filter(m => m.kind.isInstanceOf[Classlike])), @@ -370,14 +458,16 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext if tabs.isEmpty then Nil else Seq(div(cls := (if singleSelection then "tabs single" else "tabs"))( div(cls := "contents")(tabs.map(t => - div(tabAttr(t.id), cls := s"tab ${t.cls}")(t.content) + section(id := t.name.replace(' ', '-'))( + div(tabAttr(t.id), cls := s"tab ${t.cls}")(t.content) + ) )) )) - def classLikeParts(m: Member): TagArg = + def classLikeParts(m: Member, full: Boolean = true): TagArg = if !m.kind.isInstanceOf[Classlike] then Nil else val graphHtml = m.graph match - case graph if graph.edges.nonEmpty => + case graph if graph.edges.nonEmpty && full => Seq(div( id := "inheritance-diagram", cls := "diagram-class showGraph")( button(`type` := "button", cls := "label-only-button", onclick := "zoomOut()")("Reset zoom"), button(`type` := "button", cls := "label-only-button", onclick := "hideGraph()")("Hide graph"), @@ -391,16 +481,16 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext def signatureList(list: Seq[LinkToType], className: String = "", expandable: Boolean): Seq[AppliedTag] = if list.isEmpty then Nil - else Seq(div(cls := s"mono-small-inline $className")( - if(expandable) then span(cls := "icon-button show-content") else span(), + else Seq(div(cls := s"mono-small-block $className")( list.map(link => - div(link.kind.name," ", link.signature.map(renderElement(_))) - ))) + div(link.kind.name," ", link.signature.map(renderElement(_)))), + if(expandable) then span(cls := "show-all-code show-content text-button h100")("Show all") else span() + )) def selfTypeList(list: List[LinkToType]): Seq[AppliedTag] = if list.isEmpty then Nil else Seq( - div(cls := "mono-small-inline supertypes")( + div(cls := "mono-small-block supertypes")( span(), list.map { link => div(link.signature.map(renderElement(_))) @@ -442,30 +532,49 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext } def fullMember(m: Member): PageContent = + val wideClass = m.companion.map(_ => "multi").getOrElse("single") val intro = m.kind match - case Kind.RootPackage =>Seq(h1(summon[DocContext].args.name)) + case Kind.RootPackage =>Seq(h1(cls := s"h600")(summon[DocContext].args.name)) case _ => Seq( div(cls := "cover-header")( memberIcon(m), - h1(cls := "h600")(m.name) + h1(cls := s"h600 $wideClass")(m.name) ), div(cls := "fqname body-large")( span(m.fullName) ) ) ++ companionBadge(m) ++ Seq( - div(cls := "main-signature mono-medium")( + div(cls := "main-signature mono-small-block")( annotations(m).getOrElse(Nil), memberSignature(m) ) ) + + val memberContent = div( + intro, + memberInfo(m, full = true), + if m.members.length > 0 then + Seq(section(id := "members-list")( + h2(cls := "h500")("Members list"), + buildDocumentableFilter, + buildMembers(m) + )) + else Nil + ) + + val memberDocument = Jsoup.parse(memberContent.toString) + + val toc = memberDocument.select("section[id]").asScala.toSeq + .flatMap { elem => + val header = elem.selectFirst("h1, h2, h3, h4, h5, h6") + Option(header).map { h => + TocEntry(h.tag().getName, h.text(), s"#${elem.id()}") + } + } + PageContent( - div( - intro, - memberInfo(m, withBrief = false), - buildDocumentableFilter, - buildMembers(m) - ), - Seq.empty // For now, we don't support table of contents in members + memberContent, + toc ) diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala index 07409944fb1b..dc2157131e0b 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala @@ -45,10 +45,10 @@ abstract class Renderer(rootPackage: Member, val members: Map[DRI, Member], prot // Below code is for walking in order the tree and modifing its nodes basing on its neighbours // We add dummy guards - val allTemplates: Seq[Option[LoadedTemplate]] = None +: siteContext.allTemplates.map(Some(_)) :+ None + val notHidden: Seq[Option[LoadedTemplate]] = None +: siteContext.allTemplates.filterNot(_.hidden).map(Some(_)) :+ None // Let's gather the list of maps for each template with its in-order neighbours - val newSettings: List[Map[String, Object]] = allTemplates.sliding(size = 3, step = 1).map { + val newSettings: List[Map[String, Object]] = notHidden.sliding(size = 3, step = 1).map { case None :: None :: Nil => Map.empty case prev :: mid :: next :: Nil => @@ -84,7 +84,9 @@ abstract class Renderer(rootPackage: Member, val members: Map[DRI, Member], prot def updateSettings(templates: Seq[LoadedTemplate], additionalSettings: ListBuffer[Map[String, Object]]): List[LoadedTemplate] = val updatedTemplates = List.newBuilder[LoadedTemplate] for template <- templates do - val head: Map[String, Object] = additionalSettings.remove(0) + val head: Map[String, Object] = + if template.hidden then Map.empty + else additionalSettings.remove(0) val current: Map[String, Object] = template.templateFile.settings.getOrElse("page", Map.empty).asInstanceOf[Map[String, Object]] val updatedTemplateFile = template.templateFile.copy(settings = template.templateFile.settings.updated("page", head ++ current)) updatedTemplates += template.copy( diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala index 78dd7fb7faf7..d6cd701225ba 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala @@ -108,6 +108,7 @@ trait Resources(using ctx: DocContext) extends Locations, Writer: "styles/searchbar.css", "styles/social-links.css", "styles/versions-dropdown.css", + "styles/content-contributors.css", "styles/fontawesome.css", "hljs/highlight.pack.js", "hljs/LICENSE", @@ -120,7 +121,8 @@ trait Resources(using ctx: DocContext) extends Locations, Writer: "scripts/components/Input.js", "scripts/components/FilterGroup.js", "scripts/components/Filter.js", - "scripts/scaladoc-scalajs.js" + "scripts/scaladoc-scalajs.js", + "scripts/contributors.js", ).map(dottyRes) val urls = List( @@ -154,14 +156,23 @@ trait Resources(using ctx: DocContext) extends Locations, Writer: val signatureProvider = ScalaSignatureProvider() def flattenToText(signature: Signature): String = signature.getName - def mkEntry(dri: DRI, name: String, text: String, extensionTarget: String, descr: String, kind: String) = jsonObject( + def mkEntry( + dri: DRI, + name: String, + text: String, + extensionTarget: String, + descr: String, + extraDescr: String, + kind: String, + ) = jsonObject( "l" -> jsonString(relativeInternalOrAbsoluteExternalPath(dri)), "e" -> (if dri.externalLink.isDefined then rawJSON("true") else rawJSON("false")), "i" -> jsonString(extensionTarget), "n" -> jsonString(name), "t" -> jsonString(text), "d" -> jsonString(descr), - "k" -> jsonString(kind) + "k" -> jsonString(kind), + "x" -> jsonString(extraDescr), ) def extensionTarget(member: Member): String = @@ -169,25 +180,65 @@ trait Resources(using ctx: DocContext) extends Locations, Writer: case Kind.Extension(on, _) => flattenToText(on.signature) case _ => "" + def docPartRenderPlain(d: DocPart): String = + import dotty.tools.scaladoc.tasty.comments.wiki._ + import com.vladsch.flexmark.util.ast.{Node => MdNode} + def renderPlain(wd: WikiDocElement): String = + wd match + case Paragraph(text) => renderPlain(text) + case Chain(items) => items.map(renderPlain).mkString("") + case Italic(text) => renderPlain(text) + case Bold(text) => renderPlain(text) + case Underline(text) => renderPlain(text) + case Superscript(text) => renderPlain(text) + case Subscript(text) => renderPlain(text) + case Link(link, title) => title.map(renderPlain).getOrElse( + link match + case DocLink.ToURL(url) => url + case DocLink.ToDRI(_, name) => name + case _ => "" + ) + case Monospace(text) => renderPlain(text) + case Text(text) => text + case Summary(text) => renderPlain(text) + case _ => "" + d match + case s: Seq[WikiDocElement @unchecked] => + if s.length == 0 then "" + else renderPlain(s.head) + case _ => "" + def processPage(page: Page, pageFQName: List[String]): Seq[(JSON, Seq[String])] = val (res, pageName) = page.content match case m: Member if m.kind != Kind.RootPackage => def processMember(member: Member, fqName: List[String]): Seq[(JSON, Seq[String])] = val signature: MemberSignature = signatureProvider.rawSignature(member)() val sig = Signature(Plain(member.name)) ++ signature.suffix - val descr = fqName.mkString(".") - val entry = mkEntry(member.dri, member.name, flattenToText(sig), extensionTarget(member), descr, member.kind.name) + val descr = if member.kind == Kind.Package then "" else fqName.mkString(".") + val extraDescr = member.docs.map(d => docPartRenderPlain(d.body)).getOrElse("") + val entry = mkEntry( + member.dri, + member.name, + flattenToText(sig), + extensionTarget(member), + descr, + extraDescr, + member.kind.name, + ) val children = member .membersBy(m => m.kind != Kind.Package && !m.kind.isInstanceOf[Classlike]) .filter(m => m.origin == Origin.RegularlyDefined && m.inheritedFrom.fold(true)(_.isSourceSuperclassHidden)) - val updatedFqName = fqName :+ member.name + val updatedFqName = if member.kind == Kind.Package then List(member.name) else fqName :+ member.name Seq((entry, updatedFqName)) ++ children.flatMap(processMember(_, updatedFqName)) (processMember(m, pageFQName), m.name) case _ => - (Seq((mkEntry(page.link.dri, page.link.name, page.link.name, "", "", "static"), pageFQName)), "") + (Seq((mkEntry(page.link.dri, page.link.name, page.link.name, "", "", "", "static"), pageFQName)), "") - val updatedFqName = if !pageName.isEmpty then pageFQName :+ pageName else pageFQName + val updatedFqName = page.content match + case m: Member if m.kind == Kind.Package => List(m.name) + case _ if pageName.isEmpty => pageFQName + case _ => pageFQName :+ pageName res ++ page.children.flatMap(processPage(_, updatedFqName)) val entries = pages.flatMap(processPage(_, Nil)) @@ -218,6 +269,7 @@ trait Resources(using ctx: DocContext) extends Locations, Writer: dottyRes("images/class.svg"), dottyRes("images/class_comp.svg"), dottyRes("images/class-dark.svg"), + dottyRes("images/no-results-icon.svg"), dottyRes("images/object.svg"), dottyRes("images/object_comp.svg"), dottyRes("images/object-dark.svg"), @@ -482,6 +534,11 @@ trait Resources(using ctx: DocContext) extends Locations, Writer: dottyRes("images/package-big.svg"), dottyRes("images/thick.svg"), dottyRes("images/thick-dark.svg"), + dottyRes("images/banner-icons/error.svg"), + dottyRes("images/banner-icons/info.svg"), + dottyRes("images/banner-icons/neutral.svg"), + dottyRes("images/banner-icons/success.svg"), + dottyRes("images/banner-icons/warning.svg"), searchData(pages), scastieConfiguration(), ) diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/SignatureRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/SignatureRenderer.scala index 1e9717fefd48..5c6235b14e7d 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/SignatureRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/SignatureRenderer.scala @@ -15,7 +15,7 @@ trait SignatureRenderer: def currentDri: DRI def link(dri: DRI): Option[String] - def renderElement(e: SignaturePart, modifiers: AppliedAttr*) = renderElementWith(e, modifiers*) + def renderElement(e: SignaturePart, modifiers: AppliedAttr*): AppliedTag = renderElementWith(e, modifiers*) def renderLink(name: String, dri: DRI, modifiers: AppliedAttr*) = renderLinkContent(name, dri, modifiers:_*) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala index 3ceecb5f4fcc..471d338522f0 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala @@ -66,5 +66,10 @@ trait BasicSupport: a.symbol.packageName == "java.lang" && a.symbol.className.contains("Deprecated") }.map(parseAnnotation) + def isExperimental(): Option[Annotation] = + sym.annotations.find { a => + a.symbol.packageName == "scala.annotation" && a.symbol.className.contains("experimental") + }.map(parseAnnotation) + def isLeftAssoc: Boolean = !sym.name.endsWith(":") end extension diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala index 9f87a23dee06..7ecc4827836a 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala @@ -110,6 +110,7 @@ trait ClassLikeSupport: modifiers = modifiers, graph = graph, deprecated = classDef.symbol.isDeprecated(), + experimental = classDef.symbol.isExperimental() ).copy( directParents = classDef.getParentsAsLinkToTypes, parents = supertypes, @@ -382,7 +383,16 @@ trait ClassLikeSupport: .filterNot(m => m == Modifier.Lazy || m == Modifier.Final) case _ => methodSymbol.getExtraModifiers() - mkMember(methodSymbol, methodKind, method.returnTpt.tpe.asSignature)(modifiers = modifiers, origin = origin, deprecated = methodSymbol.isDeprecated()) + mkMember( + methodSymbol, + methodKind, + method.returnTpt.tpe.asSignature + )( + modifiers = modifiers, + origin = origin, + deprecated = methodSymbol.isDeprecated(), + experimental = methodSymbol.isExperimental() + ) def mkParameter( argument: ValDef, @@ -450,7 +460,11 @@ trait ClassLikeSupport: Some(Link(l.tpe.typeSymbol.owner.name, l.tpe.typeSymbol.owner.dri)) case _ => None } - mkMember(typeDef.symbol, Kind.Exported(kind), tpeTree.asSignature)(deprecated = typeDef.symbol.isDeprecated(), origin = Origin.ExportedFrom(origin)) + mkMember(typeDef.symbol, Kind.Exported(kind), tpeTree.asSignature)( + deprecated = typeDef.symbol.isDeprecated(), + origin = Origin.ExportedFrom(origin), + experimental = typeDef.symbol.isExperimental() + ) } else mkMember(typeDef.symbol, kind, tpeTree.asSignature)(deprecated = typeDef.symbol.isDeprecated()) @@ -468,7 +482,11 @@ trait ClassLikeSupport: .filterNot(m => m == Modifier.Lazy || m == Modifier.Final) case _ => valDef.symbol.getExtraModifiers() - mkMember(valDef.symbol, kind, memberInfo.res.asSignature)(modifiers = modifiers, deprecated = valDef.symbol.isDeprecated()) + mkMember(valDef.symbol, kind, memberInfo.res.asSignature)( + modifiers = modifiers, + deprecated = valDef.symbol.isDeprecated(), + experimental = valDef.symbol.isExperimental() + ) def mkMember(symbol: Symbol, kind: Kind, signature: DSignature)( modifiers: Seq[Modifier] = symbol.getExtraModifiers(), @@ -476,6 +494,7 @@ trait ClassLikeSupport: inheritedFrom: Option[InheritedFrom] = None, graph: HierarchyGraph = HierarchyGraph.empty, deprecated: Option[Annotation] = None, + experimental: Option[Annotation] = None ) = Member( name = symbol.normalizedName, fullName = symbol.fullName, @@ -490,7 +509,8 @@ trait ClassLikeSupport: inheritedFrom = inheritedFrom, graph = graph, docs = symbol.documentation, - deprecated = deprecated + deprecated = deprecated, + experimental = experimental ) object EvidenceOnlyParameterList diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala index 89c538d8d32d..95db8983626a 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala @@ -58,7 +58,7 @@ object Preparser { case CodeBlockEndRegex(before, marker, after) :: ls => if (!before.trim.isEmpty && !after.trim.isEmpty) go(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = true) - if (!before.trim.isEmpty) + else if (!before.trim.isEmpty) go(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = true) else if (!after.trim.isEmpty) go(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = false) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala index e6100a3c733d..1fa1a604c85a 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala @@ -1,35 +1,41 @@ package dotty.tools.scaladoc package tasty.comments.markdown -import com.vladsch.flexmark.html._ -import com.vladsch.flexmark.html.renderer._ -import com.vladsch.flexmark.parser._ -import com.vladsch.flexmark.ext.wikilink._ +import com.vladsch.flexmark.html.* +import com.vladsch.flexmark.html.renderer.* +import com.vladsch.flexmark.parser.* +import com.vladsch.flexmark.ext.wikilink.* import com.vladsch.flexmark.ext.wikilink.internal.WikiLinkLinkRefProcessor -import com.vladsch.flexmark.util.ast._ -import com.vladsch.flexmark.util.options._ +import com.vladsch.flexmark.util.ast.* +import com.vladsch.flexmark.util.options.* import com.vladsch.flexmark.util.sequence.BasedSequence -import com.vladsch.flexmark.util.html.{ AttributeImpl, Attributes } -import com.vladsch.flexmark._ +import com.vladsch.flexmark.util.html.{AttributeImpl, Attributes} +import com.vladsch.flexmark.* import com.vladsch.flexmark.ast.FencedCodeBlock +import scala.collection.mutable + object SectionRenderingExtension extends HtmlRenderer.HtmlRendererExtension: def rendererOptions(opt: MutableDataHolder): Unit = () case class AnchorLink(link: String) extends BlankLine(BasedSequence.EmptyBasedSequence()) object SectionHandler extends CustomNodeRenderer[Section]: + val repeatedIds: mutable.Map[(NodeRendererContext, BasedSequence), Int] = mutable.Map() val idGenerator = new HeaderIdGenerator.Factory().create() override def render(node: Section, c: NodeRendererContext, html: HtmlWriter): Unit = val Section(header, body) = node - val id = idGenerator.getId(header.getText) + val idSuffix = repeatedIds.getOrElseUpdate((c, header.getText), 0) + val ifSuffixStr = if(idSuffix == 0) then "" else idSuffix.toString + repeatedIds.update((c, header.getText), repeatedIds((c, header.getText)) + 1) + val id = idGenerator.getId(header.getText.append(ifSuffixStr)) val anchor = AnchorLink(s"#$id") val attributes = Attributes() val headerClass: String = header.getLevel match case 1 => "h500" - case 2 => "h300" - case 3 => "h200" - case 4 => "h100" + case 2 => "h500" + case 3 => "h400" + case 4 => "h300" case _ => "h50" attributes.addValue(AttributeImpl.of("class", headerClass)) val embeddedAttributes = EmbeddedAttributeProvider.EmbeddedNodeAttributes(header, attributes) diff --git a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala index e9bf20d2a709..acbfe87b5d25 100644 --- a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala +++ b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala @@ -63,7 +63,7 @@ case class SignatureBuilder(content: Signature = Nil) extends ScalaSignatureUtil case Nil => this case extendType :: withTypes => val extendPart = keyword(" extends ").signature(extendType.signature) - withTypes.foldLeft(extendPart)((bdr2, tpe) => bdr2.keyword(" with ").signature(tpe.signature)) + withTypes.foldLeft(extendPart)((bdr2, tpe) => bdr2.keyword(", ").signature(tpe.signature)) def modifiersAndVisibility(t: Member) = val (prefixMods, suffixMods) = t.modifiers.partition(_.prefix) diff --git a/scaladoc/src/dotty/tools/scaladoc/util/html.scala b/scaladoc/src/dotty/tools/scaladoc/util/html.scala index 790891b95d99..e66ba3a4b706 100644 --- a/scaladoc/src/dotty/tools/scaladoc/util/html.scala +++ b/scaladoc/src/dotty/tools/scaladoc/util/html.scala @@ -88,6 +88,7 @@ object HTML: val th = Tag("th") val tr = Tag("tr") val td = Tag("td") + val section = Tag("section") val cls = Attr("class") val href = Attr("href") diff --git a/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala index cf36ba320e24..f137e9e6b13e 100644 --- a/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala @@ -25,11 +25,14 @@ class NavigationTest extends BaseHtmlTest: NavMenuTestEntry("A directory", "dir/index.html", Seq( NavMenuTestEntry("Nested in a directory", "dir/nested.html", Nil) )), - NavMenuTestEntry("Adoc", "Adoc.html", Seq()) + NavMenuTestEntry("Adoc", "#", Seq()) ) val apiNav = Seq( - NavMenuTestEntry("tests.site", "site.html", Seq( + NavMenuTestEntry("tests.site", "#", Seq( + NavMenuTestEntry("some.other", "site/some/other.html", Seq( + NavMenuTestEntry("SomeOtherPackage", "site/some/other/SomeOtherPackage.html", Nil), + )), NavMenuTestEntry("BrokenLink", "site/BrokenLink.html", Nil), NavMenuTestEntry("BrokenLinkWiki", "site/BrokenLinkWiki.html", Nil), NavMenuTestEntry("OtherPackageLink", "site/OtherPackageLink.html", Nil), @@ -38,9 +41,6 @@ class NavigationTest extends BaseHtmlTest: NavMenuTestEntry("SamePackageLinkWiki", "site/SamePackageLinkWiki.html", Nil), NavMenuTestEntry("SomeClass", "site/SomeClass.html", Nil) )), - NavMenuTestEntry("tests.site.some.other", "site/some/other.html", Seq( - NavMenuTestEntry("SomeOtherPackage", "site/some/other/SomeOtherPackage.html", Nil), - )) ) testNavMenu("docs/Adoc.html", docsNav) diff --git a/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala b/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala index 12664498b29b..203ab9cf5ed1 100644 --- a/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala +++ b/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala @@ -139,7 +139,7 @@ class TemplateFileTests: |

Test page

|

Hello world!!

|
- |

Test page end

+ |

Test page end

|
|
""".stripMargin diff --git a/sjs-compiler-tests/test/scala/dotty/tools/dotc/ScalaJSCompilationTests.scala b/sjs-compiler-tests/test/scala/dotty/tools/dotc/ScalaJSCompilationTests.scala index ca4f292568bb..0f4eb633b770 100644 --- a/sjs-compiler-tests/test/scala/dotty/tools/dotc/ScalaJSCompilationTests.scala +++ b/sjs-compiler-tests/test/scala/dotty/tools/dotc/ScalaJSCompilationTests.scala @@ -6,6 +6,7 @@ import org.junit.{ Test, BeforeClass, AfterClass } import org.junit.experimental.categories.Category import scala.concurrent.duration._ +import reporting.TestReporter import vulpix._ @Category(Array(classOf[ScalaJSCompilationTests])) @@ -23,6 +24,7 @@ class ScalaJSCompilationTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests // Negative tests ------------------------------------------------------------ diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index 98cba90bdccf..66b56d30a6a4 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -288,7 +288,7 @@ object TastyFormat { * compatibility, but remains backwards compatible, with all * preceeding `MinorVersion`. */ - final val MinorVersion: Int = 2 + final val MinorVersion: Int = 3 /** Natural Number. The `ExperimentalVersion` allows for * experimentation with changes to TASTy without committing @@ -304,7 +304,7 @@ object TastyFormat { * is able to read final TASTy documents if the file's * `MinorVersion` is strictly less than the current value. */ - final val ExperimentalVersion: Int = 0 + final val ExperimentalVersion: Int = 1 /**This method implements a binary relation (`<:<`) between two TASTy versions. * diff --git a/tests/coverage/pos/Constructor.scoverage.check b/tests/coverage/pos/Constructor.scoverage.check index 1aaf414006cd..678da472fd4c 100644 --- a/tests/coverage/pos/Constructor.scoverage.check +++ b/tests/coverage/pos/Constructor.scoverage.check @@ -59,15 +59,15 @@ C Class covtest.C -62 -63 +60 +64 5 -x -Select +f +Apply false 0 false -x +f(x) 3 Constructor.scala @@ -76,15 +76,15 @@ C Class covtest.C -60 -64 +62 +63 5 -f -Apply +x +Select false 0 false -f(x) +x 4 Constructor.scala @@ -127,15 +127,15 @@ O$ Object covtest.O$ -112 -113 +110 +114 10 -y -Ident +g +Apply false 0 false -y +g(y) 7 Constructor.scala @@ -144,13 +144,13 @@ O$ Object covtest.O$ -110 -114 +112 +113 10 -g -Apply +y +Ident false 0 false -g(y) +y diff --git a/tests/coverage/pos/ContextFunctions.scoverage.check b/tests/coverage/pos/ContextFunctions.scoverage.check index 26391cfad536..5f2a5f8e14f3 100644 --- a/tests/coverage/pos/ContextFunctions.scoverage.check +++ b/tests/coverage/pos/ContextFunctions.scoverage.check @@ -76,6 +76,40 @@ Imperative Class covtest.Imperative readPerson +252 +295 +13 + +Apply +false +0 +false +OnError((e) => readName2(using e)(using s)) + +4 +ContextFunctions.scala +covtest +Imperative +Class +covtest.Imperative +$anonfun +267 +294 +13 +apply +Apply +false +0 +false +readName2(using e)(using s) + +5 +ContextFunctions.scala +covtest +Imperative +Class +covtest.Imperative +readPerson 192 206 11 diff --git a/tests/coverage/pos/Enum.scoverage.check b/tests/coverage/pos/Enum.scoverage.check index b5bf6e7ed6a9..7e9b69be2d31 100644 --- a/tests/coverage/pos/Enum.scoverage.check +++ b/tests/coverage/pos/Enum.scoverage.check @@ -72,142 +72,23 @@ def surfaceWeight 3 Enum.scala covtest -$anon -Class -covtest.$anon - -485 -512 -17 - +EnumTypes$ +Object +covtest.EnumTypes$ +test +1043 +1077 +30 +println Apply false 0 false -Planet(3.303e+23, 2.4397e6) +println("Example 1: \\n"+emptyList) 4 Enum.scala covtest -$anon -Class -covtest.$anon - -538 -565 -18 - -Apply -false -0 -false -Planet(4.869e+24, 6.0518e6) - -5 -Enum.scala -covtest -$anon -Class -covtest.$anon - -591 -619 -19 - -Apply -false -0 -false -Planet(5.976e+24, 6.37814e6) - -6 -Enum.scala -covtest -$anon -Class -covtest.$anon - -645 -672 -20 - -Apply -false -0 -false -Planet(6.421e+23, 3.3972e6) - -7 -Enum.scala -covtest -$anon -Class -covtest.$anon - -698 -725 -21 - -Apply -false -0 -false -Planet(1.9e+27, 7.1492e7) - -8 -Enum.scala -covtest -$anon -Class -covtest.$anon - -751 -778 -22 - -Apply -false -0 -false -Planet(5.688e+26, 6.0268e7) - -9 -Enum.scala -covtest -$anon -Class -covtest.$anon - -804 -831 -23 - -Apply -false -0 -false -Planet(8.686e+25, 2.5559e7) - -10 -Enum.scala -covtest -$anon -Class -covtest.$anon - -857 -884 -24 - -Apply -false -0 -false -Planet(1.024e+26, 2.4746e7) - -11 -Enum.scala -covtest EnumTypes$ Object covtest.EnumTypes$ @@ -220,26 +101,26 @@ Apply false 0 false -"Example 1: \n"+emptyList +"Example 1: \\n"+emptyList -12 +5 Enum.scala covtest EnumTypes$ Object covtest.EnumTypes$ test -1043 -1077 -30 +1082 +1103 +31 println Apply false 0 false -println("Example 1: \n"+emptyList) +println(s"${list}\\n") -13 +6 Enum.scala covtest EnumTypes$ @@ -254,43 +135,43 @@ Apply false 0 false -s"${list}\n" +s"${list}\\n" -14 +7 Enum.scala covtest EnumTypes$ Object covtest.EnumTypes$ -test -1082 -1103 -31 -println -Apply +calculateEarthWeightOnPlanets +1195 +1222 +34 +surfaceGravity +Select false 0 false -println(s"${list}\n") +Planet.Earth.surfaceGravity -15 +8 Enum.scala covtest EnumTypes$ Object covtest.EnumTypes$ calculateEarthWeightOnPlanets -1195 -1222 -34 -surfaceGravity -Select +1229 +1320 +35 +foreach +Apply false 0 false -Planet.Earth.surfaceGravity +for p <- Planet.values do\n println(s"Your weight on $p is ${p.surfaceWeight(mass)}") -16 +9 Enum.scala covtest EnumTypes$ @@ -307,24 +188,24 @@ false false Planet.values -17 +10 Enum.scala covtest EnumTypes$ Object covtest.EnumTypes$ $anonfun -1296 -1317 +1263 +1320 36 -surfaceWeight +println Apply false 0 false -p.surfaceWeight(mass) +println(s"Your weight on $p is ${p.surfaceWeight(mass)}") -18 +11 Enum.scala covtest EnumTypes$ @@ -341,42 +222,24 @@ false false s"Your weight on $p is ${p.surfaceWeight(mass)}" -19 +12 Enum.scala covtest EnumTypes$ Object covtest.EnumTypes$ $anonfun -1263 -1320 +1296 +1317 36 -println -Apply -false -0 -false -println(s"Your weight on $p is ${p.surfaceWeight(mass)}") - -20 -Enum.scala -covtest -EnumTypes$ -Object -covtest.EnumTypes$ -calculateEarthWeightOnPlanets -1229 -1320 -35 -foreach +surfaceWeight Apply false 0 false -for p <- Planet.values do - println(s"Your weight on $p is ${p.surfaceWeight(mass)}") +p.surfaceWeight(mass) -21 +13 Enum.scala covtest EnumTypes$ @@ -393,7 +256,7 @@ false false def calculateEarthWeightOnPlanets -22 +14 Enum.scala covtest EnumTypes$ @@ -410,7 +273,7 @@ false false println("Example 2:") -23 +15 Enum.scala covtest EnumTypes$ @@ -427,7 +290,7 @@ false false calculateEarthWeightOnPlanets(80) -24 +16 Enum.scala covtest EnumTypes$ diff --git a/tests/coverage/pos/EnumJava.scala b/tests/coverage/pos/EnumJava.scala new file mode 100644 index 000000000000..3a37cc096685 --- /dev/null +++ b/tests/coverage/pos/EnumJava.scala @@ -0,0 +1,6 @@ +package covtest + +enum MyLogLevel extends java.lang.Enum[MyLogLevel]: + case Warn extends MyLogLevel + case Error extends MyLogLevel + case Fatal extends MyLogLevel diff --git a/tests/coverage/pos/EnumJava.scoverage.check b/tests/coverage/pos/EnumJava.scoverage.check new file mode 100644 index 000000000000..1bdba951d6ae --- /dev/null +++ b/tests/coverage/pos/EnumJava.scoverage.check @@ -0,0 +1,20 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ diff --git a/tests/coverage/pos/Escaping.scala b/tests/coverage/pos/Escaping.scala new file mode 100644 index 000000000000..bdde94290251 --- /dev/null +++ b/tests/coverage/pos/Escaping.scala @@ -0,0 +1,4 @@ +package covtest.`\n` + +class `\r\n\f`: + def `\r\n\f`(`\\`: String) = `\\`.length diff --git a/tests/coverage/pos/Escaping.scoverage.check b/tests/coverage/pos/Escaping.scoverage.check new file mode 100644 index 000000000000..ecb907a9d222 --- /dev/null +++ b/tests/coverage/pos/Escaping.scoverage.check @@ -0,0 +1,54 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ +0 +Escaping.scala +covtest.\n +\r\n\f +Class +covtest.\n.\r\n\f +\r\n\f +69 +80 +3 +length +Apply +false +0 +false +`\\\\`.length + +1 +Escaping.scala +covtest.\n +\r\n\f +Class +covtest.\n.\r\n\f +\r\n\f +40 +48 +3 +\r\n\f +DefDef +false +0 +false +def `\\r\\ + diff --git a/tests/coverage/pos/For.scala b/tests/coverage/pos/For.scala new file mode 100644 index 000000000000..21ef5407c60f --- /dev/null +++ b/tests/coverage/pos/For.scala @@ -0,0 +1,14 @@ +package covtest + +def testForLoop: Unit = + for i <- 1 to 10 do + println(i) + +def testForAdvanced: Unit = + def f(x: Int): Boolean = true + for j <- 1 to 10 if f(j) do + println(j) + +def testForeach: Unit = + // An anonymous function is created here, but the user code must still be instrumented! + Nil.foreach(_ => println("user code here")) diff --git a/tests/coverage/pos/For.scoverage.check b/tests/coverage/pos/For.scoverage.check new file mode 100644 index 000000000000..9fdc9a7e9d80 --- /dev/null +++ b/tests/coverage/pos/For.scoverage.check @@ -0,0 +1,309 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ +0 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +testForLoop +43 +77 +3 +foreach +Apply +false +0 +false +for i <- 1 to 10 do\n println(i) + +1 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +testForLoop +52 +59 +3 +to +Apply +false +0 +false +1 to 10 + +2 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +testForLoop +52 +53 +3 +intWrapper +Apply +false +0 +false +1 + +3 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +$anonfun +67 +77 +4 +println +Apply +false +0 +false +println(i) + +4 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +testForLoop +17 +32 +2 +testForLoop +DefDef +false +0 +false +def testForLoop + +5 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +f +109 +114 +7 +f +DefDef +false +0 +false +def f + +6 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +testForAdvanced +141 +183 +8 +foreach +Apply +false +0 +false +for j <- 1 to 10 if f(j) do\n println(j) + +7 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +testForAdvanced +145 +165 +8 +withFilter +Apply +false +0 +false +j <- 1 to 10 if f(j) + +8 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +testForAdvanced +150 +157 +8 +to +Apply +false +0 +false +1 to 10 + +9 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +testForAdvanced +150 +151 +8 +intWrapper +Apply +false +0 +false +1 + +10 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +$anonfun +161 +165 +8 +f +Apply +false +0 +false +f(j) + +11 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +$anonfun +173 +183 +9 +println +Apply +false +0 +false +println(j) + +12 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +testForAdvanced +79 +98 +6 +testForAdvanced +DefDef +false +0 +false +def testForAdvanced + +13 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +testForeach +301 +344 +13 +foreach +Apply +false +0 +false +Nil.foreach(_ => println("user code here")) + +14 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +testForeach +301 +304 +13 +Nil +Ident +false +0 +false +Nil + +15 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +$anonfun +318 +343 +13 +println +Apply +false +0 +false +println("user code here") + +16 +For.scala +covtest +For$package$ +Object +covtest.For$package$ +testForeach +185 +200 +11 +testForeach +DefDef +false +0 +false +def testForeach + diff --git a/tests/coverage/pos/Givens.scoverage.check b/tests/coverage/pos/Givens.scoverage.check index ac8fcd9e379e..15f8f02c378f 100644 --- a/tests/coverage/pos/Givens.scoverage.check +++ b/tests/coverage/pos/Givens.scoverage.check @@ -178,15 +178,15 @@ Givens Class covtest.Givens test2 -461 -476 +448 +477 22 -getMessage +printContext Apply false 0 false -getMessage(123) +printContext(getMessage(123)) 10 Givens.scala @@ -195,15 +195,15 @@ Givens Class covtest.Givens test2 -448 -477 +461 +476 22 -printContext +getMessage Apply false 0 false -printContext(getMessage(123)) +getMessage(123) 11 Givens.scala diff --git a/tests/coverage/pos/Inlined.scoverage.check b/tests/coverage/pos/Inlined.scoverage.check index 6a4aac8ced8d..85393db11a8f 100644 --- a/tests/coverage/pos/Inlined.scoverage.check +++ b/tests/coverage/pos/Inlined.scoverage.check @@ -45,8 +45,8 @@ testInlined 288 330 10 - -Block +assertFailed +Apply true 0 false @@ -77,6 +77,23 @@ Object covtest.Inlined$package$ testInlined 155 +159 +6 +List +Ident +false +0 +false +List + +4 +Inlined.scala +covtest +Inlined$package$ +Object +covtest.Inlined$package$ +testInlined +155 169 6 length @@ -86,7 +103,7 @@ false false List(l).length -4 +5 Inlined.scala covtest Inlined$package$ @@ -103,7 +120,7 @@ false false scala.runtime.Scala3RunTime.assertFailed() -5 +6 Inlined.scala covtest Inlined$package$ @@ -113,14 +130,14 @@ testInlined 288 330 10 - -Block +assertFailed +Apply true 0 false scala.runtime.Scala3RunTime.assertFailed() -6 +7 Inlined.scala covtest Inlined$package$ @@ -137,7 +154,24 @@ false false List(l) +8 +Inlined.scala +covtest +Inlined$package$ +Object +covtest.Inlined$package$ +testInlined +180 +184 7 +List +Ident +false +0 +false +List + +9 Inlined.scala covtest Inlined$package$ @@ -154,7 +188,7 @@ false false List(l).length -8 +10 Inlined.scala covtest Inlined$package$ @@ -171,7 +205,7 @@ false false scala.runtime.Scala3RunTime.assertFailed() -9 +11 Inlined.scala covtest Inlined$package$ @@ -181,14 +215,14 @@ testInlined 288 330 10 - -Block +assertFailed +Apply true 0 false scala.runtime.Scala3RunTime.assertFailed() -10 +12 Inlined.scala covtest Inlined$package$ diff --git a/tests/coverage/pos/InlinedFromLib.scala b/tests/coverage/pos/InlinedFromLib.scala new file mode 100644 index 000000000000..1b05e11b7558 --- /dev/null +++ b/tests/coverage/pos/InlinedFromLib.scala @@ -0,0 +1,9 @@ +package covtest + +// assert is a `transparent inline` in Predef, +// but its source path should not appear in the coverage report. +def testInlined(): Unit = + val l = 1 + assert(l == 1) + assert(l == List(l).length) + assert(List(l).length == 1) diff --git a/tests/coverage/pos/InlinedFromLib.scoverage.check b/tests/coverage/pos/InlinedFromLib.scoverage.check new file mode 100644 index 000000000000..d7b2a42cd3b3 --- /dev/null +++ b/tests/coverage/pos/InlinedFromLib.scoverage.check @@ -0,0 +1,292 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ +0 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +169 +183 +6 +assertFailed +Apply +false +0 +false +assert(l == 1) + +1 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +169 +183 +6 +assertFailed +Apply +true +0 +false +assert(l == 1) + +2 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +169 +183 +6 + +Literal +true +0 +false +assert(l == 1) + +3 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +198 +205 +7 +apply +Apply +false +0 +false +List(l) + +4 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +198 +202 +7 +List +Ident +false +0 +false +List + +5 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +198 +212 +7 +length +Select +false +0 +false +List(l).length + +6 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +186 +213 +7 +assertFailed +Apply +false +0 +false +assert(l == List(l).length) + +7 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +186 +213 +7 +assertFailed +Apply +true +0 +false +assert(l == List(l).length) + +8 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +186 +213 +7 + +Literal +true +0 +false +assert(l == List(l).length) + +9 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +223 +230 +8 +apply +Apply +false +0 +false +List(l) + +10 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +223 +227 +8 +List +Ident +false +0 +false +List + +11 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +223 +237 +8 +length +Select +false +0 +false +List(l).length + +12 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +216 +243 +8 +assertFailed +Apply +false +0 +false +assert(List(l).length == 1) + +13 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +216 +243 +8 +assertFailed +Apply +true +0 +false +assert(List(l).length == 1) + +14 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +216 +243 +8 + +Literal +true +0 +false +assert(List(l).length == 1) + +15 +InlinedFromLib.scala +covtest +InlinedFromLib$package$ +Object +covtest.InlinedFromLib$package$ +testInlined +129 +144 +4 +testInlined +DefDef +false +0 +false +def testInlined + diff --git a/tests/coverage/pos/Lift.scala b/tests/coverage/pos/Lift.scala new file mode 100644 index 000000000000..86f8e4e1fe0e --- /dev/null +++ b/tests/coverage/pos/Lift.scala @@ -0,0 +1,8 @@ +package covtest + +class SomeFunctions: + def f(x: Int) = () + def g() = 0 + def c = SomeFunctions() + + def test = c.f(g()) diff --git a/tests/coverage/pos/Lift.scoverage.check b/tests/coverage/pos/Lift.scoverage.check new file mode 100644 index 000000000000..a1c656cbdb67 --- /dev/null +++ b/tests/coverage/pos/Lift.scoverage.check @@ -0,0 +1,156 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ +0 +Lift.scala +covtest +SomeFunctions +Class +covtest.SomeFunctions +f +40 +45 +3 +f +DefDef +false +0 +false +def f + +1 +Lift.scala +covtest +SomeFunctions +Class +covtest.SomeFunctions +g +61 +66 +4 +g +DefDef +false +0 +false +def g + +2 +Lift.scala +covtest +SomeFunctions +Class +covtest.SomeFunctions +c +83 +98 +5 + +Apply +false +0 +false +SomeFunctions() + +3 +Lift.scala +covtest +SomeFunctions +Class +covtest.SomeFunctions +c +75 +80 +5 +c +DefDef +false +0 +false +def c + +4 +Lift.scala +covtest +SomeFunctions +Class +covtest.SomeFunctions +test +113 +121 +7 +f +Apply +false +0 +false +c.f(g()) + +5 +Lift.scala +covtest +SomeFunctions +Class +covtest.SomeFunctions +test +113 +114 +7 +c +Select +false +0 +false +c + +6 +Lift.scala +covtest +SomeFunctions +Class +covtest.SomeFunctions +test +117 +120 +7 +g +Apply +false +0 +false +g() + +7 +Lift.scala +covtest +SomeFunctions +Class +covtest.SomeFunctions +test +102 +110 +7 +test +DefDef +false +0 +false +def test + diff --git a/tests/coverage/pos/MatchCaseClasses.scoverage.check b/tests/coverage/pos/MatchCaseClasses.scoverage.check index 0e469fc86b7c..5440c2e3098d 100644 --- a/tests/coverage/pos/MatchCaseClasses.scoverage.check +++ b/tests/coverage/pos/MatchCaseClasses.scoverage.check @@ -25,6 +25,23 @@ MatchCaseClasses$ Object covtest.MatchCaseClasses$ f +135 +147 +7 + +Block +false +0 +false +case Pat1(0) + +1 +MatchCaseClasses.scala +covtest +MatchCaseClasses$ +Object +covtest.MatchCaseClasses$ +f 151 163 7 @@ -35,24 +52,24 @@ false false println("a") -1 +2 MatchCaseClasses.scala covtest MatchCaseClasses$ Object covtest.MatchCaseClasses$ f -135 -147 -7 +168 +180 +8 Block false 0 false -case Pat1(0) +case Pat1(_) -2 +3 MatchCaseClasses.scala covtest MatchCaseClasses$ @@ -69,24 +86,24 @@ false false println("b") -3 +4 MatchCaseClasses.scala covtest MatchCaseClasses$ Object covtest.MatchCaseClasses$ f -168 -180 -8 +201 +221 +9 Block false 0 false -case Pat1(_) +case p @ Pat2(1, -1) -4 +5 MatchCaseClasses.scala covtest MatchCaseClasses$ @@ -103,24 +120,24 @@ false false println("c") -5 +6 MatchCaseClasses.scala covtest MatchCaseClasses$ Object covtest.MatchCaseClasses$ f -201 -221 -9 +242 +265 +10 Block false 0 false -case p @ Pat2(1, -1) +case Pat2(_, y: String) -6 +7 MatchCaseClasses.scala covtest MatchCaseClasses$ @@ -137,7 +154,7 @@ false false println(y) -7 +8 MatchCaseClasses.scala covtest MatchCaseClasses$ @@ -154,24 +171,24 @@ false false println("d") -8 +9 MatchCaseClasses.scala covtest MatchCaseClasses$ Object covtest.MatchCaseClasses$ f -242 -265 -10 +309 +321 +13 Block false 0 false -case Pat2(_, y: String) +case p: Pat2 -9 +10 MatchCaseClasses.scala covtest MatchCaseClasses$ @@ -188,24 +205,24 @@ false false println("e") -10 +11 MatchCaseClasses.scala covtest MatchCaseClasses$ Object covtest.MatchCaseClasses$ f -309 -321 -13 +342 +348 +14 Block false 0 false -case p: Pat2 +case _ -11 +12 MatchCaseClasses.scala covtest MatchCaseClasses$ @@ -222,23 +239,6 @@ false false println("other") -12 -MatchCaseClasses.scala -covtest -MatchCaseClasses$ -Object -covtest.MatchCaseClasses$ -f -342 -348 -14 - -Block -false -0 -false -case _ - 13 MatchCaseClasses.scala covtest diff --git a/tests/coverage/pos/PolymorphicExtensions.scala b/tests/coverage/pos/PolymorphicExtensions.scala index a2bafc451606..51c55f424ac9 100644 --- a/tests/coverage/pos/PolymorphicExtensions.scala +++ b/tests/coverage/pos/PolymorphicExtensions.scala @@ -6,6 +6,10 @@ object PolyExt: extension [A](i: Int) def get(x: A): A = x + def tap[U](f: Int => U): Int = ??? "str".foo(0) // ({foo("str")}[type])(0) i.e. Apply(TypeApply( Apply(foo, "str"), type ), List(0)) 123.get(0) // {(get[type])(123)}(0) i.e. Apply(Apply(TypeApply(...), List(123)), List(0)) + + def foo: Int = 42 + def bar: Int = foo.tap(println) diff --git a/tests/coverage/pos/PolymorphicExtensions.scoverage.check b/tests/coverage/pos/PolymorphicExtensions.scoverage.check index e244b529ce0a..495ddf037d3e 100644 --- a/tests/coverage/pos/PolymorphicExtensions.scoverage.check +++ b/tests/coverage/pos/PolymorphicExtensions.scoverage.check @@ -58,16 +58,16 @@ covtest PolyExt$ Object covtest.PolyExt$ - -138 -147 -9 -foo -Apply +tap +170 +173 +8 +??? +Ident false 0 false -"str".foo +??? 3 PolymorphicExtensions.scala @@ -75,27 +75,61 @@ covtest PolyExt$ Object covtest.PolyExt$ +tap +139 +146 +8 +tap +DefDef +false +0 +false +def tap + +4 +PolymorphicExtensions.scala +covtest +PolyExt$ +Object +covtest.PolyExt$ -138 -150 -9 - +177 +189 +10 +foo Apply false 0 false "str".foo(0) -4 +5 PolymorphicExtensions.scala covtest PolyExt$ Object covtest.PolyExt$ -238 -248 +177 +186 10 +foo +Apply +false +0 +false +"str".foo + +6 +PolymorphicExtensions.scala +covtest +PolyExt$ +Object +covtest.PolyExt$ + +277 +287 +11 get Apply false @@ -103,3 +137,105 @@ false false 123.get(0) +7 +PolymorphicExtensions.scala +covtest +PolyExt$ +Object +covtest.PolyExt$ +foo +370 +377 +13 +foo +DefDef +false +0 +false +def foo + +8 +PolymorphicExtensions.scala +covtest +PolyExt$ +Object +covtest.PolyExt$ +bar +405 +421 +14 +tap +Apply +false +0 +false +foo.tap(println) + +9 +PolymorphicExtensions.scala +covtest +PolyExt$ +Object +covtest.PolyExt$ +bar +405 +412 +14 +tap +Apply +false +0 +false +foo.tap + +10 +PolymorphicExtensions.scala +covtest +PolyExt$ +Object +covtest.PolyExt$ +bar +405 +408 +14 +foo +Ident +false +0 +false +foo + +11 +PolymorphicExtensions.scala +covtest +PolyExt$ +Object +covtest.PolyExt$ +$anonfun +413 +420 +14 +println +Apply +false +0 +false +println + +12 +PolymorphicExtensions.scala +covtest +PolyExt$ +Object +covtest.PolyExt$ +bar +390 +397 +14 +bar +DefDef +false +0 +false +def bar + diff --git a/tests/coverage/pos/PolymorphicMethods.scoverage.check b/tests/coverage/pos/PolymorphicMethods.scoverage.check index 77615cc6f0bc..5bcfe254ffe2 100644 --- a/tests/coverage/pos/PolymorphicMethods.scoverage.check +++ b/tests/coverage/pos/PolymorphicMethods.scoverage.check @@ -60,14 +60,14 @@ Object covtest.PolyMeth$ 147 -158 +170 6 - +f Apply false 0 false -C[String]() +C[String]().f("str", 0) 3 PolymorphicMethods.scala @@ -77,14 +77,14 @@ Object covtest.PolyMeth$ 147 -170 +158 6 -f + Apply false 0 false -C[String]().f("str", 0) +C[String]() 4 PolymorphicMethods.scala diff --git a/tests/coverage/pos/Select.scoverage.check b/tests/coverage/pos/Select.scoverage.check index 94b693f0e639..183ba7395de0 100644 --- a/tests/coverage/pos/Select.scoverage.check +++ b/tests/coverage/pos/Select.scoverage.check @@ -75,16 +75,16 @@ covtest B Class covtest.B - -134 -135 -9 - +print +166 +179 +11 +print Apply false 0 false -A +super.print() 4 Select.scala @@ -93,15 +93,15 @@ B Class covtest.B print -166 -179 -11 -print +184 +206 +12 +println Apply false 0 false -super.print() +println(this.instance) 5 Select.scala @@ -127,23 +127,6 @@ B Class covtest.B print -184 -206 -12 -println -Apply -false -0 -false -println(this.instance) - -7 -Select.scala -covtest -B -Class -covtest.B -print 139 157 10 @@ -154,7 +137,7 @@ false false override def print -8 +7 Select.scala covtest Select$package$ @@ -171,7 +154,7 @@ false false A() -9 +8 Select.scala covtest Select$package$ @@ -188,7 +171,7 @@ false false new A -10 +9 Select.scala covtest Select$package$ @@ -196,16 +179,16 @@ Object covtest.Select$package$ test 263 -273 +281 18 -instance -Select +print +Apply false 0 false -a.instance +a.instance.print() -11 +10 Select.scala covtest Select$package$ @@ -213,16 +196,16 @@ Object covtest.Select$package$ test 263 -281 +273 18 -print -Apply +instance +Select false 0 false -a.instance.print() +a.instance -12 +11 Select.scala covtest Select$package$ @@ -239,7 +222,7 @@ false false a.print() -13 +12 Select.scala covtest Select$package$ diff --git a/tests/coverage/pos/SimpleMethods.scala b/tests/coverage/pos/SimpleMethods.scala index 510a86799b32..876a952f63be 100644 --- a/tests/coverage/pos/SimpleMethods.scala +++ b/tests/coverage/pos/SimpleMethods.scala @@ -15,6 +15,9 @@ class C: if false then true else false + def partialCond: Unit = + if false then () + def new1: C = new {} def tryCatch: Unit = diff --git a/tests/coverage/pos/SimpleMethods.scoverage.check b/tests/coverage/pos/SimpleMethods.scoverage.check index f2d9d61a1cd4..dc68258f9a66 100644 --- a/tests/coverage/pos/SimpleMethods.scoverage.check +++ b/tests/coverage/pos/SimpleMethods.scoverage.check @@ -177,10 +177,44 @@ covtest C Class covtest.C -new1 +partialCond +271 +273 +18 + +Literal +true +0 +false +() + +10 +SimpleMethods.scala +covtest +C +Class +covtest.C +partialCond 229 -237 +244 17 +partialCond +DefDef +false +0 +false +def partialCond + +11 +SimpleMethods.scala +covtest +C +Class +covtest.C +new1 +277 +285 +20 new1 DefDef false @@ -188,16 +222,16 @@ false false def new1 -10 +12 SimpleMethods.scala covtest C Class covtest.C tryCatch -282 -284 -20 +330 +332 +23 Literal true @@ -205,16 +239,16 @@ true false () -11 +13 SimpleMethods.scala covtest C Class covtest.C tryCatch -301 -318 -22 +349 +366 +25 Block false @@ -222,16 +256,16 @@ false false case e: Exception -12 +14 SimpleMethods.scala covtest C Class covtest.C tryCatch -253 -265 -19 +301 +313 +22 tryCatch DefDef false diff --git a/tests/coverage/pos/StructuralTypes.scoverage.check b/tests/coverage/pos/StructuralTypes.scoverage.check index 1e9650ce17a3..6108c83b08d6 100644 --- a/tests/coverage/pos/StructuralTypes.scoverage.check +++ b/tests/coverage/pos/StructuralTypes.scoverage.check @@ -24,16 +24,16 @@ covtest Record Class covtest.Record -$anonfun -159 -163 +selectDynamic +148 +172 5 -_1 -Select +find +Apply false 0 false -_._1 +elems.find(_._1 == name) 1 StructuralTypes.scala @@ -41,16 +41,16 @@ covtest Record Class covtest.Record -selectDynamic -148 -172 +$anonfun +159 +163 5 -find -Apply +_1 +Select false 0 false -elems.find(_._1 == name) +_._1 2 StructuralTypes.scala diff --git a/tests/coverage/pos/TypeLambdas.scoverage.check b/tests/coverage/pos/TypeLambdas.scoverage.check index f97d5285cf06..4085c3e41f18 100644 --- a/tests/coverage/pos/TypeLambdas.scoverage.check +++ b/tests/coverage/pos/TypeLambdas.scoverage.check @@ -25,15 +25,15 @@ TypeLambdas$ Object covtest.TypeLambdas$ test -310 -318 +306 +319 13 --> +apply Apply false 0 false -1 -> "1" +Map(1 -> "1") 1 TypeLambdas.scala @@ -43,14 +43,14 @@ Object covtest.TypeLambdas$ test 306 -319 +309 13 -apply -Apply +Map +Ident false 0 false -Map(1 -> "1") +Map 2 TypeLambdas.scala @@ -59,6 +59,23 @@ TypeLambdas$ Object covtest.TypeLambdas$ test +310 +318 +13 +-> +Apply +false +0 +false +1 -> "1" + +3 +TypeLambdas.scala +covtest +TypeLambdas$ +Object +covtest.TypeLambdas$ +test 324 334 14 @@ -69,7 +86,7 @@ false false println(m) -3 +4 TypeLambdas.scala covtest TypeLambdas$ @@ -86,7 +103,7 @@ false false println(tuple) -4 +5 TypeLambdas.scala covtest TypeLambdas$ diff --git a/tests/coverage/run/currying/test.scoverage.check b/tests/coverage/run/currying/test.scoverage.check index d60d2f088a78..591bf44c17fd 100644 --- a/tests/coverage/run/currying/test.scoverage.check +++ b/tests/coverage/run/currying/test.scoverage.check @@ -93,15 +93,15 @@ Test$ Object .Test$ main -285 -296 +277 +297 11 -f1 +println Apply false 0 false -f1(0)(1)(2) +println(f1(0)(1)(2)) 5 currying/test.scala @@ -110,15 +110,15 @@ Test$ Object .Test$ main -277 -297 +285 +296 11 -println +f1 Apply false 0 false -println(f1(0)(1)(2)) +f1(0)(1)(2) 6 currying/test.scala @@ -127,15 +127,15 @@ Test$ Object .Test$ main -310 -312 +302 +322 12 -f2 -Ident +println +Apply false 0 false -f2 +println(f2(0)(1)(2)) 7 currying/test.scala @@ -145,14 +145,14 @@ Object .Test$ main 310 -315 +321 12 apply Apply false 0 false -f2(0) +f2(0)(1)(2) 8 currying/test.scala @@ -179,14 +179,14 @@ Object .Test$ main 310 -321 +315 12 apply Apply false 0 false -f2(0)(1)(2) +f2(0) 10 currying/test.scala @@ -195,15 +195,15 @@ Test$ Object .Test$ main -302 -322 +310 +312 12 -println -Apply +f2 +Ident false 0 false -println(f2(0)(1)(2)) +f2 11 currying/test.scala @@ -212,15 +212,15 @@ Test$ Object .Test$ main -335 -337 +327 +365 13 -g1 -Ident +println +Apply false 0 false -g1 +println(g1(using 0)(using 1)(using 2)) 12 currying/test.scala @@ -246,15 +246,15 @@ Test$ Object .Test$ main -327 -365 -13 +370 +408 +14 println Apply false 0 false -println(g1(using 0)(using 1)(using 2)) +println(g2(using 0)(using 1)(using 2)) 14 currying/test.scala @@ -280,23 +280,6 @@ Test$ Object .Test$ main -370 -408 -14 -println -Apply -false -0 -false -println(g2(using 0)(using 1)(using 2)) - -16 -currying/test.scala - -Test$ -Object -.Test$ -main 235 243 10 diff --git a/tests/coverage/run/erased/test.check b/tests/coverage/run/erased/test.check index 3e287ad0ce91..8a87e0e34c16 100644 --- a/tests/coverage/run/erased/test.check +++ b/tests/coverage/run/erased/test.check @@ -1,3 +1,4 @@ foo(a)(b) +foo(a)(b) identity(idem) -foo(a)(idem) \ No newline at end of file +foo(a)(idem) diff --git a/tests/coverage/run/erased/test.scala b/tests/coverage/run/erased/test.scala index 9419d68f955c..15a067e9ed50 100644 --- a/tests/coverage/run/erased/test.scala +++ b/tests/coverage/run/erased/test.scala @@ -1,5 +1,7 @@ import scala.language.experimental.erasedDefinitions +erased def parameterless: String = "y" + erased def e(x: String): String = "x" def foo(erased a: String)(b: String): String = println(s"foo(a)($b)") @@ -11,5 +13,6 @@ def identity(s: String): String = @main def Test: Unit = + foo(parameterless)("b") foo(e("a"))("b") foo(e("a"))(identity("idem")) diff --git a/tests/coverage/run/erased/test.scoverage.check b/tests/coverage/run/erased/test.scoverage.check index 3cd9ff86c40a..f31c1a2418a9 100644 --- a/tests/coverage/run/erased/test.scoverage.check +++ b/tests/coverage/run/erased/test.scoverage.check @@ -25,15 +25,15 @@ test$package$ Object .test$package$ foo -149 -162 -4 -s +181 +203 +6 +println Apply false 0 false -s"foo(a)($b)" +println(s"foo(a)($b)") 1 erased/test.scala @@ -42,15 +42,15 @@ test$package$ Object .test$package$ foo -141 -163 -4 -println +189 +202 +6 +s Apply false 0 false -println(s"foo(a)($b)") +s"foo(a)($b)" 2 erased/test.scala @@ -59,9 +59,9 @@ test$package$ Object .test$package$ foo -92 -99 -3 +132 +139 +5 foo DefDef false @@ -76,15 +76,15 @@ test$package$ Object .test$package$ identity -213 -228 -8 -s +245 +269 +10 +println Apply false 0 false -s"identity($s)" +println(s"identity($s)") 4 erased/test.scala @@ -93,15 +93,15 @@ test$package$ Object .test$package$ identity -205 -229 -8 -println +253 +268 +10 +s Apply false 0 false -println(s"identity($s)") +s"identity($s)" 5 erased/test.scala @@ -110,9 +110,9 @@ test$package$ Object .test$package$ identity -169 -181 -7 +209 +221 +9 identity DefDef false @@ -127,15 +127,15 @@ test$package$ Object .test$package$ Test -264 -270 -13 -e +300 +323 +15 +foo Apply false 0 false -e("a") +foo(parameterless)("b") 7 erased/test.scala @@ -144,9 +144,9 @@ test$package$ Object .test$package$ Test -260 -276 -13 +326 +342 +16 foo Apply false @@ -161,15 +161,15 @@ test$package$ Object .test$package$ Test -291 -307 -14 -identity +345 +374 +17 +foo Apply false 0 false -identity("idem") +foo(e("a"))(identity("idem")) 9 erased/test.scala @@ -178,15 +178,15 @@ test$package$ Object .test$package$ Test -279 -308 -14 -foo +357 +373 +17 +identity Apply false 0 false -foo(e("a"))(identity("idem")) +identity("idem") 10 erased/test.scala @@ -195,14 +195,13 @@ test$package$ Object .test$package$ Test -235 -249 -12 +275 +289 +14 Test DefDef false 0 false -@main -def Test +@main\ndef Test diff --git a/tests/coverage/run/extend-case-class/test.check b/tests/coverage/run/extend-case-class/test.check new file mode 100644 index 000000000000..aa960ae5a323 --- /dev/null +++ b/tests/coverage/run/extend-case-class/test.check @@ -0,0 +1,2 @@ +1 +6178 \ No newline at end of file diff --git a/tests/coverage/run/extend-case-class/test.scala b/tests/coverage/run/extend-case-class/test.scala new file mode 100644 index 000000000000..be9c666b4f55 --- /dev/null +++ b/tests/coverage/run/extend-case-class/test.scala @@ -0,0 +1,10 @@ +// see issue 15835 +import java.math.MathContext +case class DecimalConf(mathContext: MathContext, scaleLimit: Int, digitsLimit: Int) +object DecimalConf extends DecimalConf(MathContext.UNLIMITED, 6178, 308) + +@main +def Test: Unit = + val c = DecimalConf(MathContext.DECIMAL32, 1, 0) + println(c.scaleLimit) + println(DecimalConf.scaleLimit) diff --git a/tests/coverage/run/extend-case-class/test.scoverage.check b/tests/coverage/run/extend-case-class/test.scoverage.check new file mode 100644 index 000000000000..69da960e4f6a --- /dev/null +++ b/tests/coverage/run/extend-case-class/test.scoverage.check @@ -0,0 +1,71 @@ +# Coverage data, format version: 3.0 +# Statement data: +# - id +# - source path +# - package name +# - class name +# - class type (Class, Object or Trait) +# - full class name +# - method name +# - start offset +# - end offset +# - line number +# - symbol name +# - tree name +# - is branch +# - invocations count +# - is ignored +# - description (can be multi-line) +# ' ' sign +# ------------------------------------------ +0 +extend-case-class/test.scala + +test$package$ +Object +.test$package$ +Test +282 +303 +8 +println +Apply +false +0 +false +println(c.scaleLimit) + +1 +extend-case-class/test.scala + +test$package$ +Object +.test$package$ +Test +306 +337 +9 +println +Apply +false +0 +false +println(DecimalConf.scaleLimit) + +2 +extend-case-class/test.scala + +test$package$ +Object +.test$package$ +Test +206 +220 +6 +Test +DefDef +false +0 +false +@main\ndef Test + diff --git a/tests/coverage/run/inheritance/test.scoverage.check b/tests/coverage/run/inheritance/test.scoverage.check index 5744f4b5eb3b..4b75764fcef2 100644 --- a/tests/coverage/run/inheritance/test.scoverage.check +++ b/tests/coverage/run/inheritance/test.scoverage.check @@ -21,23 +21,6 @@ 0 inheritance/test.scala -B -Class -.B - -56 -63 -1 - -Apply -false -0 -false -A(x, 0) - -1 -inheritance/test.scala - C1 Class .C1 @@ -52,24 +35,7 @@ false false println("block") -2 -inheritance/test.scala - -C1 -Class -.C1 - -81 -105 -2 - -Apply -false -0 -false -B({println("block"); 1}) - -3 +1 inheritance/test.scala C2 @@ -86,24 +52,24 @@ false false A(2,2) -4 +2 inheritance/test.scala -C2 -Class -.C2 - -123 -134 -3 - +test$package$ +Object +.test$package$ +Test +161 +176 +7 +println Apply false 0 false -B(A(2,2).x) +println(C1().x) -5 +3 inheritance/test.scala test$package$ @@ -120,24 +86,24 @@ false false C1() -6 +4 inheritance/test.scala test$package$ Object .test$package$ Test -161 -176 -7 +211 +226 +9 println Apply false 0 false -println(C1().x) +println(C2().x) -7 +5 inheritance/test.scala test$package$ @@ -154,24 +120,7 @@ false false C2() -8 -inheritance/test.scala - -test$package$ -Object -.test$package$ -Test -211 -226 -9 -println -Apply -false -0 -false -println(C2().x) - -9 +6 inheritance/test.scala test$package$ @@ -186,6 +135,5 @@ DefDef false 0 false -@main -def Test +@main\ndef Test diff --git a/tests/coverage/run/inline-def/test.scoverage.check b/tests/coverage/run/inline-def/test.scoverage.check index ec336127385a..784c0a00b62b 100644 --- a/tests/coverage/run/inline-def/test.scoverage.check +++ b/tests/coverage/run/inline-def/test.scoverage.check @@ -21,57 +21,6 @@ 0 inline-def/test.scala -A -Class -.A - -66 -67 -4 - -Apply -false -0 -false -B - -1 -inline-def/test.scala - -A -Class -.A -foo$retainedBody -134 -148 -7 -toString -Apply -false -0 -false -"foo".toString - -2 -inline-def/test.scala - -A -Class -.A -foo$retainedBody -134 -134 -7 -foo$retainedBody -DefDef -false -0 -false - - -3 -inline-def/test.scala - test$package$ Object .test$package$ @@ -86,7 +35,7 @@ false false A() -4 +1 inline-def/test.scala test$package$ @@ -103,7 +52,24 @@ false false println(a.x) -5 +2 +inline-def/test.scala + +test$package$ +Object +.test$package$ +Test +246 +260 +14 +println +Apply +false +0 +false +println(a.foo) + +3 inline-def/test.scala test$package$ @@ -120,24 +86,24 @@ false false "foo".toString -6 +4 inline-def/test.scala test$package$ Object .test$package$ Test -246 -260 -14 +263 +277 +15 println Apply false 0 false -println(a.foo) +println(a.bar) -7 +5 inline-def/test.scala test$package$ @@ -154,24 +120,24 @@ false false "bar".toString -8 +6 inline-def/test.scala test$package$ Object .test$package$ Test -263 -277 -15 +295 +309 +17 println Apply false 0 false -println(a.bar) +println(b.foo) -9 +7 inline-def/test.scala test$package$ @@ -188,24 +154,7 @@ false false b.foo -10 -inline-def/test.scala - -test$package$ -Object -.test$package$ -Test -295 -309 -17 -println -Apply -false -0 -false -println(b.foo) - -11 +8 inline-def/test.scala test$package$ @@ -220,6 +169,5 @@ DefDef false 0 false -@main -def Test +@main\ndef Test diff --git a/tests/coverage/run/interpolation/test.scoverage.check b/tests/coverage/run/interpolation/test.scoverage.check index 1f16d03cc7df..6b38152cdcc1 100644 --- a/tests/coverage/run/interpolation/test.scoverage.check +++ b/tests/coverage/run/interpolation/test.scoverage.check @@ -25,15 +25,15 @@ Test$ Object .Test$ simple -68 -76 +60 +78 3 -length +s Apply false 0 false -b.length +s"$a, ${b.length}" 1 interpolation/test.scala @@ -42,15 +42,15 @@ Test$ Object .Test$ simple -60 -78 +68 +76 3 -s +length Apply false 0 false -s"$a, ${b.length}" +b.length 2 interpolation/test.scala @@ -127,15 +127,15 @@ Test$ Object .Test$ main -229 -244 -10 -zipWithIndex -Select +195 +199 +9 +List +Ident false 0 false -xs.zipWithIndex +List 7 interpolation/test.scala @@ -143,16 +143,16 @@ interpolation/test.scala Test$ Object .Test$ -$anonfun -267 -276 +main +229 +278 10 -s +map Apply false 0 false -s"$i: $s" +xs.zipWithIndex.map((s, i) => println(s"$i: $s")) 8 interpolation/test.scala @@ -160,16 +160,16 @@ interpolation/test.scala Test$ Object .Test$ -$anonfun -259 -277 +main +229 +244 10 -println -Apply +zipWithIndex +Select false 0 false -println(s"$i: $s") +xs.zipWithIndex 9 interpolation/test.scala @@ -177,16 +177,16 @@ interpolation/test.scala Test$ Object .Test$ -main -229 -278 +$anonfun +259 +277 10 -map +println Apply false 0 false -xs.zipWithIndex.map((s, i) => println(s"$i: $s")) +println(s"$i: $s") 10 interpolation/test.scala @@ -194,16 +194,16 @@ interpolation/test.scala Test$ Object .Test$ -main -292 -308 -12 -simple +$anonfun +267 +276 +10 +s Apply false 0 false -simple(1, "abc") +s"$i: $s" 11 interpolation/test.scala @@ -229,15 +229,15 @@ Test$ Object .Test$ main -322 -331 -13 -hexa +292 +308 +12 +simple Apply false 0 false -hexa(127) +simple(1, "abc") 13 interpolation/test.scala @@ -263,15 +263,15 @@ Test$ Object .Test$ main -345 -354 -14 -raw +322 +331 +13 +hexa Apply false 0 false -raw"a\nb" +hexa(127) 15 interpolation/test.scala @@ -288,7 +288,7 @@ Apply false 0 false -println(raw"a\nb") +println(raw"a\\nb") 16 interpolation/test.scala @@ -297,6 +297,23 @@ Test$ Object .Test$ main +345 +354 +14 +raw +Apply +false +0 +false +raw"a\\nb" + +17 +interpolation/test.scala + +Test$ +Object +.Test$ +main 130 138 8 diff --git a/tests/coverage/run/java-methods/test.scoverage.check b/tests/coverage/run/java-methods/test.scoverage.check index c1038d4a4dad..7d3752c8db20 100644 --- a/tests/coverage/run/java-methods/test.scoverage.check +++ b/tests/coverage/run/java-methods/test.scoverage.check @@ -93,15 +93,15 @@ test$package$ Object .test$package$ Test -173 -193 +165 +194 8 -identity +println Apply false 0 false -obj.identity[Int](0) +println(obj.identity[Int](0)) 5 java-methods/test.scala @@ -110,15 +110,15 @@ test$package$ Object .test$package$ Test -165 -194 +173 +193 8 -println +identity Apply false 0 false -println(obj.identity[Int](0)) +obj.identity[Int](0) 6 java-methods/test.scala @@ -152,6 +152,5 @@ DefDef false 0 false -@main -def Test +@main\ndef Test diff --git a/tests/coverage/run/lifting-bool/test.scoverage.check b/tests/coverage/run/lifting-bool/test.scoverage.check index 93321474a6a7..9d2a3d0f0162 100644 --- a/tests/coverage/run/lifting-bool/test.scoverage.check +++ b/tests/coverage/run/lifting-bool/test.scoverage.check @@ -161,15 +161,15 @@ test$package$ Object .test$package$ Test -349 -366 +341 +367 12 -s +println Apply false 0 false -s"$a $b $c $d $e" +println(s"$a $b $c $d $e") 9 lifting-bool/test.scala @@ -178,15 +178,15 @@ test$package$ Object .test$package$ Test -341 -367 +349 +366 12 -println +s Apply false 0 false -println(s"$a $b $c $d $e") +s"$a $b $c $d $e" 10 lifting-bool/test.scala @@ -229,15 +229,15 @@ test$package$ Object .test$package$ Test -432 -443 +422 +466 17 -notCalled +f Apply false 0 false -notCalled() +f(true || notCalled(), false && notCalled()) 13 lifting-bool/test.scala @@ -246,8 +246,8 @@ test$package$ Object .test$package$ Test -454 -465 +432 +443 17 notCalled Apply @@ -263,15 +263,15 @@ test$package$ Object .test$package$ Test -422 -466 +454 +465 17 -f +notCalled Apply false 0 false -f(true || notCalled(), false && notCalled()) +notCalled() 15 lifting-bool/test.scala @@ -305,6 +305,5 @@ DefDef false 0 false -@main -def Test +@main\ndef Test diff --git a/tests/coverage/run/lifting/test.scoverage.check b/tests/coverage/run/lifting/test.scoverage.check index e8b470202d40..536c5ab0cf1b 100644 --- a/tests/coverage/run/lifting/test.scoverage.check +++ b/tests/coverage/run/lifting/test.scoverage.check @@ -42,15 +42,15 @@ Vals Class .Vals -46 -57 -2 -apply -Apply +22 +26 +1 +List +Ident false 0 false -List(1,2,3) +List 2 lifting/test.scala @@ -72,21 +72,55 @@ l :: List(1,2,3) 3 lifting/test.scala +Vals +Class +.Vals + +46 +57 +2 +apply +Apply +false +0 +false +List(1,2,3) + +4 +lifting/test.scala + +Vals +Class +.Vals + +46 +50 +2 +List +Ident +false +0 +false +List + +5 +lifting/test.scala + A Class .A msg 104 -116 +136 5 + Apply false 0 false -"string" + a +"string" + a + "." + b + "." + c -4 +6 lifting/test.scala A @@ -94,16 +128,16 @@ Class .A msg 104 -122 +132 5 + Apply false 0 false -"string" + a + "." +"string" + a + "." + b + "." -5 +7 lifting/test.scala A @@ -120,7 +154,7 @@ false false "string" + a + "." + b -6 +8 lifting/test.scala A @@ -128,16 +162,16 @@ Class .A msg 104 -132 +122 5 + Apply false 0 false -"string" + a + "." + b + "." +"string" + a + "." -7 +9 lifting/test.scala A @@ -145,16 +179,16 @@ Class .A msg 104 -136 +116 5 + Apply false 0 false -"string" + a + "." + b + "." + c +"string" + a -8 +10 lifting/test.scala A @@ -171,7 +205,7 @@ false false def msg -9 +11 lifting/test.scala A @@ -188,7 +222,7 @@ false false def integer -10 +12 lifting/test.scala A @@ -205,7 +239,7 @@ false false def ex -11 +13 lifting/test.scala test$package$ @@ -222,7 +256,7 @@ false false A() -12 +14 lifting/test.scala test$package$ @@ -239,41 +273,41 @@ false false def f -13 +15 lifting/test.scala test$package$ Object .test$package$ Test -276 -285 +264 +286 14 -integer -Select +msg +Apply false 0 false -a.integer +a.msg(i, 0, a.integer) -14 +16 lifting/test.scala test$package$ Object .test$package$ Test -264 -286 +276 +285 14 -msg -Apply +integer +Select false 0 false -a.msg(i, 0, a.integer) +a.integer -15 +17 lifting/test.scala test$package$ @@ -290,7 +324,7 @@ false false println(x) -16 +18 lifting/test.scala test$package$ @@ -298,24 +332,24 @@ Object .test$package$ Test 306 -310 +334 16 -ex -Select +msg +Apply false 0 false -a.ex +a.ex.msg(i, 0, a.ex.integer) -17 +19 lifting/test.scala test$package$ Object .test$package$ Test -321 -325 +306 +310 16 ex Select @@ -324,7 +358,7 @@ false false a.ex -18 +20 lifting/test.scala test$package$ @@ -332,33 +366,33 @@ Object .test$package$ Test 321 -333 +325 16 -integer +ex Select false 0 false -a.ex.integer +a.ex -19 +21 lifting/test.scala test$package$ Object .test$package$ Test -306 -334 +321 +333 16 -msg -Apply +integer +Select false 0 false -a.ex.msg(i, 0, a.ex.integer) +a.ex.integer -20 +22 lifting/test.scala test$package$ @@ -375,41 +409,41 @@ false false println(x) -21 +23 lifting/test.scala test$package$ Object .test$package$ Test -360 -363 +354 +370 18 -f +msg Apply false 0 false -f() +a.msg(f(), 0, i) -22 +24 lifting/test.scala test$package$ Object .test$package$ Test -354 -370 +360 +363 18 -msg +f Apply false 0 false -a.msg(f(), 0, i) +f() -23 +25 lifting/test.scala test$package$ @@ -426,7 +460,7 @@ false false println(x) -24 +26 lifting/test.scala test$package$ @@ -441,6 +475,5 @@ DefDef false 0 false -@main -def Test +@main\ndef Test diff --git a/tests/coverage/run/parameterless/test.scoverage.check b/tests/coverage/run/parameterless/test.scoverage.check index dae3f6775e60..91a9b1d6597f 100644 --- a/tests/coverage/run/parameterless/test.scoverage.check +++ b/tests/coverage/run/parameterless/test.scoverage.check @@ -195,6 +195,23 @@ test$package$ Object .test$package$ Test +265 +275 +21 +println +Apply +false +0 +false +println(f) + +11 +parameterless/test.scala + +test$package$ +Object +.test$package$ +Test 273 274 21 @@ -205,24 +222,24 @@ false false f -11 +12 parameterless/test.scala test$package$ Object .test$package$ Test -265 -275 -21 +278 +288 +22 println Apply false 0 false -println(f) +println(g) -12 +13 parameterless/test.scala test$package$ @@ -239,24 +256,24 @@ false false g -13 +14 parameterless/test.scala test$package$ Object .test$package$ Test -278 -288 -22 +291 +303 +23 println Apply false 0 false -println(g) +println(O.f) -14 +15 parameterless/test.scala test$package$ @@ -273,24 +290,24 @@ false false O.f -15 +16 parameterless/test.scala test$package$ Object .test$package$ Test -291 -303 -23 +306 +318 +24 println Apply false 0 false -println(O.f) +println(O.g) -16 +17 parameterless/test.scala test$package$ @@ -307,23 +324,6 @@ false false O.g -17 -parameterless/test.scala - -test$package$ -Object -.test$package$ -Test -306 -318 -24 -println -Apply -false -0 -false -println(O.g) - 18 parameterless/test.scala @@ -339,6 +339,5 @@ DefDef false 0 false -@main -def Test +@main\ndef Test diff --git a/tests/coverage/run/trait/test.scoverage.check b/tests/coverage/run/trait/test.scoverage.check index d48e14eacf08..8dbf64238cfa 100644 --- a/tests/coverage/run/trait/test.scoverage.check +++ b/tests/coverage/run/trait/test.scoverage.check @@ -38,23 +38,6 @@ def x 1 trait/test.scala -Impl2 -Class -.Impl2 - -91 -101 -6 - -Apply -false -0 -false -T2("test") - -2 -trait/test.scala - Impl3 Class .Impl3 @@ -69,24 +52,24 @@ false false Impl2() -3 +2 trait/test.scala -Impl3 -Class -.Impl3 - -130 -143 -7 - +test$package$ +Object +.test$package$ +Test +170 +188 +11 +println Apply false 0 false -T2(Impl2().p) +println(Impl1().x) -4 +3 trait/test.scala test$package$ @@ -103,7 +86,7 @@ false false Impl1() -5 +4 trait/test.scala test$package$ @@ -120,24 +103,24 @@ false false Impl1().x -6 +5 trait/test.scala test$package$ Object .test$package$ Test -170 -188 -11 +196 +214 +12 println Apply false 0 false -println(Impl1().x) +println(Impl2().p) -7 +6 trait/test.scala test$package$ @@ -154,24 +137,24 @@ false false Impl2() -8 +7 trait/test.scala test$package$ Object .test$package$ Test -196 -214 -12 +225 +243 +13 println Apply false 0 false -println(Impl2().p) +println(Impl3().p) -9 +8 trait/test.scala test$package$ @@ -188,24 +171,7 @@ false false Impl3() -10 -trait/test.scala - -test$package$ -Object -.test$package$ -Test -225 -243 -13 -println -Apply -false -0 -false -println(Impl3().p) - -11 +9 trait/test.scala test$package$ @@ -220,6 +186,5 @@ DefDef false 0 false -@main -def Test +@main\ndef Test diff --git a/tests/coverage/run/varargs/test_1.scoverage.check b/tests/coverage/run/varargs/test_1.scoverage.check index 3a242f7a97a4..2c4edea68fcc 100644 --- a/tests/coverage/run/varargs/test_1.scoverage.check +++ b/tests/coverage/run/varargs/test_1.scoverage.check @@ -76,15 +76,15 @@ test_1$package$ Object .test_1$package$ Test -142 -147 +133 +153 10 -f +repeated Apply false 0 false -f("") +repeated(f(""), "b") 4 varargs/test_1.scala @@ -93,15 +93,15 @@ test_1$package$ Object .test_1$package$ Test -133 -153 +142 +147 10 -repeated +f Apply false 0 false -repeated(f(""), "b") +f("") 5 varargs/test_1.scala @@ -178,15 +178,15 @@ test_1$package$ Object .test_1$package$ Test -291 -301 +268 +302 16 -f +multiple Apply false 0 false -f("first") +JavaVarargs_1.multiple(f("first")) 10 varargs/test_1.scala @@ -195,15 +195,15 @@ test_1$package$ Object .test_1$package$ Test -268 -302 +291 +301 16 -multiple +f Apply false 0 false -JavaVarargs_1.multiple(f("first")) +f("first") 11 varargs/test_1.scala @@ -229,15 +229,15 @@ test_1$package$ Object .test_1$package$ Test -345 -355 +322 +371 18 -f +multiple Apply false 0 false -f("first") +JavaVarargs_1.multiple(f("first"), "a", "b", "c") 13 varargs/test_1.scala @@ -246,15 +246,15 @@ test_1$package$ Object .test_1$package$ Test -322 -371 +345 +355 18 -multiple +f Apply false 0 false -JavaVarargs_1.multiple(f("first"), "a", "b", "c") +f("first") 14 varargs/test_1.scala @@ -288,6 +288,5 @@ DefDef false 0 false -@main -def Test +@main\ndef Test diff --git a/tests/disabled/neg-custom-args/captures/capt-wf.scala b/tests/disabled/neg-custom-args/captures/capt-wf.scala index 54fe545f443b..bfe349747776 100644 --- a/tests/disabled/neg-custom-args/captures/capt-wf.scala +++ b/tests/disabled/neg-custom-args/captures/capt-wf.scala @@ -1,7 +1,7 @@ // No longer valid class C -type Cap = C @retains(*) -type Top = Any @retains(*) +type Cap = C @retains(caps.*) +type Top = Any @retains(caps.*) type T = (x: Cap) => List[String @retains(x)] => Unit // error val x: (x: Cap) => Array[String @retains(x)] = ??? // error @@ -17,3 +17,5 @@ def test: Unit = val y = f(C()) // ok val y2 = f2(C()) // ok () + var x11 = f + val x12: {x11} Any = x11 diff --git a/tests/disabled/neg-custom-args/captures/try2.scala b/tests/disabled/neg-custom-args/captures/try2.scala index dd3cc890a197..876dc1ec12f1 100644 --- a/tests/disabled/neg-custom-args/captures/try2.scala +++ b/tests/disabled/neg-custom-args/captures/try2.scala @@ -5,7 +5,7 @@ import annotation.ability @ability erased val canThrow: * = ??? class CanThrow[E <: Exception] extends Retains[canThrow.type] -type Top = Any @retains(*) +type Top = Any @retains(caps.*) infix type throws[R, E <: Exception] = (erased CanThrow[E]) ?=> R diff --git a/tests/explicit-nulls/pos/i16236.scala b/tests/explicit-nulls/pos/i16236.scala new file mode 100644 index 000000000000..a64f5bc176ce --- /dev/null +++ b/tests/explicit-nulls/pos/i16236.scala @@ -0,0 +1,10 @@ +// Copy of tests/pos/i16236.scala +trait A + +def consume[T](t: T): Unit = () + +def fails(p: (Double & A) | Null): Unit = consume(p) // was: assertion failed: & A + +def switchedOrder(p: (A & Double) | Null): Unit = consume(p) // ok +def nonPrimitive(p: (String & A) | Null): Unit = consume(p) // ok +def notNull(p: (Double & A)): Unit = consume(p) // ok diff --git a/tests/init/neg/closureLeak.check b/tests/init/neg/closureLeak.check index db3ed8eea37b..7019f2274ab6 100644 --- a/tests/init/neg/closureLeak.check +++ b/tests/init/neg/closureLeak.check @@ -8,7 +8,7 @@ | -> l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] | ^^^^^^^^^^^^^^^^^ | - | Promoting the value to hot failed due to the following problem: + | Promoting the value to hot (transitively initialized) failed due to the following problem: | Cannot prove the method argument is hot. Only hot values are safe to leak. | Found = ThisRef[class Outer]. | Non initialized field(s): value p. Promotion trace: diff --git a/tests/init/neg/cycle-structure.check b/tests/init/neg/cycle-structure.check index 79eab40be867..fb7b54c7cac2 100644 --- a/tests/init/neg/cycle-structure.check +++ b/tests/init/neg/cycle-structure.check @@ -1,7 +1,7 @@ -- Error: tests/init/neg/cycle-structure.scala:3:13 -------------------------------------------------------------------- 3 | val x = B(this) // error | ^^^^^^^ - | Problematic object instantiation: arg 1 is not hot. Calling trace: + | Problematic object instantiation: arg 1 is not hot (transitively initialized). Calling trace: | -> case class A(b: B) { [ cycle-structure.scala:1 ] | ^ | -> val x = B(this) // error [ cycle-structure.scala:3 ] @@ -16,7 +16,7 @@ -- Error: tests/init/neg/cycle-structure.scala:9:13 -------------------------------------------------------------------- 9 | val x = A(this) // error | ^^^^^^^ - | Problematic object instantiation: arg 1 is not hot. Calling trace: + | Problematic object instantiation: arg 1 is not hot (transitively initialized). Calling trace: | -> case class B(a: A) { [ cycle-structure.scala:7 ] | ^ | -> val x = A(this) // error [ cycle-structure.scala:9 ] diff --git a/tests/init/neg/i15363.check b/tests/init/neg/i15363.check index e6d0d74e9618..84cf268ef8a1 100644 --- a/tests/init/neg/i15363.check +++ b/tests/init/neg/i15363.check @@ -1,7 +1,7 @@ -- Error: tests/init/neg/i15363.scala:3:10 ----------------------------------------------------------------------------- 3 | val b = new B(this) // error | ^^^^^^^^^^^ - | Problematic object instantiation: arg 1 is not hot. Calling trace: + | Problematic object instantiation: arg 1 is not hot (transitively initialized). Calling trace: | -> class A: [ i15363.scala:1 ] | ^ | -> val b = new B(this) // error [ i15363.scala:3 ] diff --git a/tests/init/neg/i15883.scala b/tests/init/neg/i15883.scala new file mode 100644 index 000000000000..6f6e3066a878 --- /dev/null +++ b/tests/init/neg/i15883.scala @@ -0,0 +1,2 @@ +val a = b +val b = 1 // error diff --git a/tests/init/neg/inherit-non-hot.check b/tests/init/neg/inherit-non-hot.check index fd25876cb38e..408196333a27 100644 --- a/tests/init/neg/inherit-non-hot.check +++ b/tests/init/neg/inherit-non-hot.check @@ -11,7 +11,7 @@ | -> if b == null then b = new B(this) // error [ inherit-non-hot.scala:6 ] | ^^^^^^^^^^^^^^^ | - | Promoting the value to hot failed due to the following problem: + | Promoting the value to hot (transitively initialized) failed due to the following problem: | Cannot prove that the field value a is hot. Found = Cold. Promotion trace: | -> class B(a: A) { [ inherit-non-hot.scala:10 ] | ^^^^ diff --git a/tests/init/neg/override13.scala b/tests/init/neg/override13.scala deleted file mode 100644 index 172fdc2709c8..000000000000 --- a/tests/init/neg/override13.scala +++ /dev/null @@ -1,13 +0,0 @@ -abstract class A { - val x = f - - def f: Int -} - -class B(val y: Int) extends A { - def f: Int = y -} - -class C extends B(5) { - override val y: Int = 10 // error -} diff --git a/tests/init/neg/override16.scala b/tests/init/neg/override16.scala deleted file mode 100644 index 6e674faf57b1..000000000000 --- a/tests/init/neg/override16.scala +++ /dev/null @@ -1,23 +0,0 @@ -class A(n: Int) { - val x = n - - def f: Int = x * x -} - -class B(val a: A) { - val b = a.f -} - -class C(override val a: A) extends B(new A(10)) // ok - -class M(val a: A) - -class N(override val a: A) extends M(new A(10)) - -class X(val a: A) { - a.f -} - -class Y extends X(new A(10)) { - override val a: A = ??? // error -} diff --git a/tests/init/neg/override5.scala b/tests/init/neg/override5.scala index 8160793c5e35..061a41dc3fc2 100644 --- a/tests/init/neg/override5.scala +++ b/tests/init/neg/override5.scala @@ -25,9 +25,3 @@ trait Base { val message = "hello, " + name } - -class Derived(val name: String) extends Base - -class Derived2 extends Derived("hello") { - override val name: String = "ok" // error -} diff --git a/tests/init/neg/promotion-loop.check b/tests/init/neg/promotion-loop.check index 5d23841192b8..3d1eb7e74aec 100644 --- a/tests/init/neg/promotion-loop.check +++ b/tests/init/neg/promotion-loop.check @@ -8,7 +8,7 @@ | -> println(b) // error [ promotion-loop.scala:16 ] | ^ | - | Promoting the value to hot failed due to the following problem: + | Promoting the value to hot (transitively initialized) failed due to the following problem: | Cannot prove that the field value outer is hot. Found = ThisRef[class Test]. | Non initialized field(s): value n. Promotion trace: | -> val outer = test [ promotion-loop.scala:12 ] diff --git a/tests/init/neg/promotion-segment3.check b/tests/init/neg/promotion-segment3.check index 97b20022a2b2..220af18bd29a 100644 --- a/tests/init/neg/promotion-segment3.check +++ b/tests/init/neg/promotion-segment3.check @@ -8,5 +8,5 @@ | -> bar(new B) // error [ promotion-segment3.scala:9 ] | ^^^^^ | - | Promoting the value to hot failed due to the following problem: + | Promoting the value to hot (transitively initialized) failed due to the following problem: | Promotion cancelled as the value contains inner class C. diff --git a/tests/init/neg/secondary-ctor4.check b/tests/init/neg/secondary-ctor4.check index ce3dc3e6886d..1bf1a7286357 100644 --- a/tests/init/neg/secondary-ctor4.check +++ b/tests/init/neg/secondary-ctor4.check @@ -1,7 +1,7 @@ -- Error: tests/init/neg/secondary-ctor4.scala:54:14 ------------------------------------------------------------------- 54 | val c = new C(b, 5) // error | ^^^^^^^^^^^ - | Problematic object instantiation: arg 1 is not hot. Calling trace: + | Problematic object instantiation: arg 1 is not hot (transitively initialized). Calling trace: | -> class D { [ secondary-ctor4.scala:52 ] | ^ | -> val c = new C(b, 5) // error [ secondary-ctor4.scala:54 ] @@ -24,21 +24,21 @@ -- Error: tests/init/neg/secondary-ctor4.scala:42:4 -------------------------------------------------------------------- 42 | new A(new B(new D)) // error | ^^^^^^^^^^^^^^^^^^^ - | Problematic object instantiation: the outer M.this and arg 1 are not hot. Calling trace: - | -> class N(d: D) extends M(d) { [ secondary-ctor4.scala:59 ] - | ^ - | -> def this(d: D) = { [ secondary-ctor4.scala:7 ] - | ^ - | -> new A(new B(new D)) // error [ secondary-ctor4.scala:42 ] - | ^^^^^^^^^^^^^^^^^^^ + |Problematic object instantiation: the outer M.this and arg 1 are not hot (transitively initialized). Calling trace: + |-> class N(d: D) extends M(d) { [ secondary-ctor4.scala:59 ] + | ^ + |-> def this(d: D) = { [ secondary-ctor4.scala:7 ] + | ^ + |-> new A(new B(new D)) // error [ secondary-ctor4.scala:42 ] + | ^^^^^^^^^^^^^^^^^^^ | - | It leads to the following error during object initialization: - | Access field value n on a cold object. Calling trace: - | -> def this(b: B) = { [ secondary-ctor4.scala:17 ] - | ^ - | -> Inner().foo() [ secondary-ctor4.scala:26 ] - | ^^^^^^^ - | -> class Inner() { [ secondary-ctor4.scala:21 ] - | ^ - | -> println(b.n) [ secondary-ctor4.scala:23 ] - | ^^^ + |It leads to the following error during object initialization: + |Access field value n on a cold object. Calling trace: + |-> def this(b: B) = { [ secondary-ctor4.scala:17 ] + | ^ + |-> Inner().foo() [ secondary-ctor4.scala:26 ] + | ^^^^^^^ + |-> class Inner() { [ secondary-ctor4.scala:21 ] + | ^ + |-> println(b.n) [ secondary-ctor4.scala:23 ] + | ^^^ diff --git a/tests/init/neg/t3273.check b/tests/init/neg/t3273.check index 4ca79220c550..e548a5964cac 100644 --- a/tests/init/neg/t3273.check +++ b/tests/init/neg/t3273.check @@ -8,7 +8,7 @@ | -> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] | ^^^^^^^^^^^^^^^ | - | Promoting the value to hot failed due to the following problem: + | Promoting the value to hot (transitively initialized) failed due to the following problem: | Access non-initialized value num1. Promotion trace: | -> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] | ^^^^ @@ -22,7 +22,7 @@ | -> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - | Promoting the value to hot failed due to the following problem: + | Promoting the value to hot (transitively initialized) failed due to the following problem: | Access non-initialized value num2. Promotion trace: | -> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] | ^^^^ diff --git a/tests/neg-custom-args/captures/boundschecks.scala b/tests/neg-custom-args/captures/boundschecks.scala new file mode 100644 index 000000000000..cf4eab28f19d --- /dev/null +++ b/tests/neg-custom-args/captures/boundschecks.scala @@ -0,0 +1,18 @@ +object test { + + class Tree + + def f[X <: Tree](x: X): Unit = () + + class C[X <: Tree](x: X) + + def foo(t: {*} Tree) = + f(t) // error + f[{*} Tree](t) // error + f[Tree](t) // error + val c1 = C(t) // error + val c2 = C[{*} Tree](t) // error + val c3 = C[Tree](t) // error + + val foo: C[{*} Tree] = ??? +} diff --git a/tests/neg-custom-args/captures/boundschecks2.scala b/tests/neg-custom-args/captures/boundschecks2.scala new file mode 100644 index 000000000000..f6927b04931b --- /dev/null +++ b/tests/neg-custom-args/captures/boundschecks2.scala @@ -0,0 +1,13 @@ +object test { + + class Tree + + def f[X <: Tree](x: X): Unit = () + + class C[X <: Tree](x: X) + + val foo: C[{*} Tree] = ??? // error + type T = C[{*} Tree] // error + val bar: T -> T = ??? + val baz: C[{*} Tree] -> Unit = ??? // error +} diff --git a/tests/neg-custom-args/captures/box-adapt-boxing.scala b/tests/neg-custom-args/captures/box-adapt-boxing.scala new file mode 100644 index 000000000000..7a624d4225fc --- /dev/null +++ b/tests/neg-custom-args/captures/box-adapt-boxing.scala @@ -0,0 +1,38 @@ +trait Cap + +def main(io: {*} Cap, fs: {*} Cap): Unit = { + val test1: {} Unit -> Unit = _ => { // error + type Op = [T] -> ({io} T -> Unit) -> Unit + val f: ({io} Cap) -> Unit = ??? + val op: Op = ??? + op[{io} Cap](f) + // expected type of f: {io} (box {io} Cap) -> Unit + // actual type: ({io} Cap) -> Unit + // adapting f to the expected type will also + // charge the environment with {io} + } + + val test2: {} Unit -> Unit = _ => { + type Box[X] = X + type Op0[X] = Box[X] -> Unit + type Op1[X] = Unit -> Box[X] + val f: Unit -> ({io} Cap) -> Unit = ??? + val test: {} Op1[{io} Op0[{io} Cap]] = f + // expected: {} Unit -> box {io} (box {io} Cap) -> Unit + // actual: Unit -> ({io} Cap) -> Unit + // + // although adapting `({io} Cap) -> Unit` to + // `box {io} (box {io} Cap) -> Unit` will leak the + // captured variables {io}, but since it is inside a box, + // we will charge neither the outer type nor the environment + } + + val test3 = { + type Box[X] = X + type Id[X] = Box[X] -> Unit + type Op[X] = Unit -> Box[X] + val f: Unit -> ({io} Cap) -> Unit = ??? + val g: Op[{fs} Id[{io} Cap]] = f // error + val h: {} Op[{io} Id[{io} Cap]] = f + } +} diff --git a/tests/neg-custom-args/captures/box-adapt-cases.scala b/tests/neg-custom-args/captures/box-adapt-cases.scala new file mode 100644 index 000000000000..049ff385d73c --- /dev/null +++ b/tests/neg-custom-args/captures/box-adapt-cases.scala @@ -0,0 +1,29 @@ +trait Cap { def use(): Int } + +def test1(): Unit = { + type Id[X] = [T] -> (op: X => T) -> T + + val x: Id[{*} Cap] = ??? + x(cap => cap.use()) // error +} + +def test2(io: {*} Cap): Unit = { + type Id[X] = [T] -> (op: X -> T) -> T + + val x: Id[{io} Cap] = ??? + x(cap => cap.use()) // error +} + +def test3(io: {*} Cap): Unit = { + type Id[X] = [T] -> (op: {io} X -> T) -> T + + val x: Id[{io} Cap] = ??? + x(cap => cap.use()) // ok +} + +def test4(io: {*} Cap, fs: {*} Cap): Unit = { + type Id[X] = [T] -> (op: {io} X -> T) -> T + + val x: Id[{io, fs} Cap] = ??? + x(cap => cap.use()) // error +} diff --git a/tests/neg-custom-args/captures/box-adapt-cov.scala b/tests/neg-custom-args/captures/box-adapt-cov.scala new file mode 100644 index 000000000000..2040a1c4654d --- /dev/null +++ b/tests/neg-custom-args/captures/box-adapt-cov.scala @@ -0,0 +1,14 @@ +trait Cap + +def test1(io: {*} Cap) = { + type Op[X] = [T] -> Unit -> X + val f: Op[{io} Cap] = ??? + val x: [T] -> Unit -> ({io} Cap) = f // error +} + +def test2(io: {*} Cap) = { + type Op[X] = [T] -> Unit -> {io} X + val f: Op[{io} Cap] = ??? + val x: Unit -> ({io} Cap) = f[Unit] // error + val x1: {io} Unit -> ({io} Cap) = f[Unit] // ok +} diff --git a/tests/neg-custom-args/captures/box-adapt-cs.scala b/tests/neg-custom-args/captures/box-adapt-cs.scala new file mode 100644 index 000000000000..e35388efd203 --- /dev/null +++ b/tests/neg-custom-args/captures/box-adapt-cs.scala @@ -0,0 +1,19 @@ +trait Cap { def use(): Int } + +def test1(io: {*} Cap): Unit = { + type Id[X] = [T] -> (op: {io} X -> T) -> T + + val x: Id[{io} Cap] = ??? + val f: ({*} Cap) -> Unit = ??? + x(f) // ok + // actual: {*} Cap -> Unit + // expected: {io} box {io} Cap -> Unit +} + +def test2(io: {*} Cap): Unit = { + type Id[X] = [T] -> (op: {*} X -> T) -> T + + val x: Id[{*} Cap] = ??? + val f: ({io} Cap) -> Unit = ??? + x(f) // error +} diff --git a/tests/neg-custom-args/captures/box-adapt-depfun.scala b/tests/neg-custom-args/captures/box-adapt-depfun.scala new file mode 100644 index 000000000000..294e2c33f7fa --- /dev/null +++ b/tests/neg-custom-args/captures/box-adapt-depfun.scala @@ -0,0 +1,23 @@ +trait Cap { def use(): Int } + +def test1(io: {*} Cap): Unit = { + type Id[X] = [T] -> (op: {io} X -> T) -> T + + val x: Id[{io} Cap] = ??? + x(cap => cap.use()) // ok +} + +def test2(io: {*} Cap): Unit = { + type Id[X] = [T] -> (op: {io} (x: X) -> T) -> T + + val x: Id[{io} Cap] = ??? + x(cap => cap.use()) + // should work when the expected type is a dependent function +} + +def test3(io: {*} Cap): Unit = { + type Id[X] = [T] -> (op: {} (x: X) -> T) -> T + + val x: Id[{io} Cap] = ??? + x(cap => cap.use()) // error +} diff --git a/tests/neg-custom-args/captures/box-adapt-typefun.scala b/tests/neg-custom-args/captures/box-adapt-typefun.scala new file mode 100644 index 000000000000..b14b07e72e9b --- /dev/null +++ b/tests/neg-custom-args/captures/box-adapt-typefun.scala @@ -0,0 +1,13 @@ +trait Cap { def use(): Int } + +def test1(io: {*} Cap): Unit = { + type Op[X] = [T] -> X -> Unit + val f: [T] -> ({io} Cap) -> Unit = ??? + val op: Op[{io} Cap] = f // error +} + +def test2(io: {*} Cap): Unit = { + type Lazy[X] = [T] -> Unit -> X + val f: Lazy[{io} Cap] = ??? + val test: [T] -> Unit -> ({io} Cap) = f // error +} diff --git a/tests/neg-custom-args/captures/capt-depfun.scala b/tests/neg-custom-args/captures/capt-depfun.scala index 14f08f569725..c01eed7c4b25 100644 --- a/tests/neg-custom-args/captures/capt-depfun.scala +++ b/tests/neg-custom-args/captures/capt-depfun.scala @@ -1,8 +1,9 @@ import annotation.retains class C -type Cap = C @retains(*) +type Cap = C @retains(caps.*) +class Str def f(y: Cap, z: Cap) = def g(): C @retains(y, z) = ??? - val ac: ((x: Cap) => String @retains(x) => String @retains(x)) = ??? - val dc: (({y, z} String) => {y, z} String) = ac(g()) // error + val ac: ((x: Cap) => Str @retains(x) => Str @retains(x)) = ??? + val dc: (({y, z} Str) => {y, z} Str) = ac(g()) // error diff --git a/tests/neg-custom-args/captures/capt-depfun2.scala b/tests/neg-custom-args/captures/capt-depfun2.scala index 62c2381e01ad..52dd74aabf9f 100644 --- a/tests/neg-custom-args/captures/capt-depfun2.scala +++ b/tests/neg-custom-args/captures/capt-depfun2.scala @@ -1,11 +1,12 @@ import annotation.retains class C -type Cap = C @retains(*) +type Cap = C @retains(caps.*) +class Str def f(y: Cap, z: Cap) = def g(): C @retains(y, z) = ??? - val ac: ((x: Cap) => Array[String @retains(x)]) = ??? - val dc = ac(g()) // error: Needs explicit type Array[? >: String <: {y, z} String] + val ac: ((x: Cap) => Array[Str @retains(x)]) = ??? + val dc = ac(g()) // error: Needs explicit type Array[? >: Str <: {y, z} Str] // This is a shortcoming of rechecking since the originally inferred - // type is `Array[String]` and the actual type after rechecking - // cannot be expressed as `Array[C String]` for any capture set C \ No newline at end of file + // type is `Array[Str]` and the actual type after rechecking + // cannot be expressed as `Array[C Str]` for any capture set C \ No newline at end of file diff --git a/tests/neg-custom-args/captures/capt-test.scala b/tests/neg-custom-args/captures/capt-test.scala index 7080d6da67c6..1799fc5073ca 100644 --- a/tests/neg-custom-args/captures/capt-test.scala +++ b/tests/neg-custom-args/captures/capt-test.scala @@ -2,8 +2,8 @@ import annotation.retains import language.experimental.erasedDefinitions class CT[E <: Exception] -type CanThrow[E <: Exception] = CT[E] @retains(*) -type Top = Any @retains(*) +type CanThrow[E <: Exception] = CT[E] @retains(caps.*) +type Top = Any @retains(caps.*) infix type throws[R, E <: Exception] = (erased CanThrow[E]) ?=> R diff --git a/tests/neg-custom-args/captures/capt-wf2.scala b/tests/neg-custom-args/captures/capt-wf2.scala new file mode 100644 index 000000000000..ddde535fcab0 --- /dev/null +++ b/tests/neg-custom-args/captures/capt-wf2.scala @@ -0,0 +1,5 @@ +@annotation.capability class C + +def test(c: C) = + var x: {c} Any = ??? + val y: {x} Any = x // error diff --git a/tests/neg-custom-args/captures/capt1.check b/tests/neg-custom-args/captures/capt1.check index 4a9e3999f731..51ed3e6736cf 100644 --- a/tests/neg-custom-args/captures/capt1.check +++ b/tests/neg-custom-args/captures/capt1.check @@ -40,14 +40,14 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:32:24 ---------------------------------------- 32 | val z2 = h[() -> Cap](() => x) // error | ^^^^^^^ - | Found: {x} () -> {*} C + | Found: {x} () -> Cap | Required: () -> box {*} C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:33:5 ----------------------------------------- 33 | (() => C()) // error | ^^^^^^^^^ - | Found: ? () -> {*} C + | Found: ? () -> Cap | Required: () -> box {*} C | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/capt1.scala b/tests/neg-custom-args/captures/capt1.scala index ce69e77057e0..59ba874b02f5 100644 --- a/tests/neg-custom-args/captures/capt1.scala +++ b/tests/neg-custom-args/captures/capt1.scala @@ -1,21 +1,21 @@ import annotation.retains class C -def f(x: C @retains(*), y: C): () -> C = +def f(x: C @retains(caps.*), y: C): () -> C = () => if x == null then y else y // error -def g(x: C @retains(*), y: C): Matchable = +def g(x: C @retains(caps.*), y: C): Matchable = () => if x == null then y else y // error -def h1(x: C @retains(*), y: C): Any = +def h1(x: C @retains(caps.*), y: C): Any = def f() = if x == null then y else y () => f() // ok -def h2(x: C @retains(*)): Matchable = +def h2(x: C @retains(caps.*)): Matchable = def f(y: Int) = if x == null then y else y // error f class A -type Cap = C @retains(*) +type Cap = C @retains(caps.*) def h3(x: Cap): A = class F(y: Int) extends A: // error @@ -27,7 +27,7 @@ def h4(x: Cap, y: Int): A = def m() = if x == null then y else y def foo() = - val x: C @retains(*) = ??? + val x: C @retains(caps.*) = ??? def h[X](a: X)(b: X) = a val z2 = h[() -> Cap](() => x) // error (() => C()) // error diff --git a/tests/neg-custom-args/captures/capt3.scala b/tests/neg-custom-args/captures/capt3.scala index 4ffaf4a73c06..84164d433029 100644 --- a/tests/neg-custom-args/captures/capt3.scala +++ b/tests/neg-custom-args/captures/capt3.scala @@ -1,6 +1,6 @@ import annotation.retains class C -type Cap = C @retains(*) +type Cap = C @retains(caps.*) def test1() = val x: Cap = C() diff --git a/tests/neg-custom-args/captures/cc-this.check b/tests/neg-custom-args/captures/cc-this.check index c492df15078f..0049f42a5db5 100644 --- a/tests/neg-custom-args/captures/cc-this.check +++ b/tests/neg-custom-args/captures/cc-this.check @@ -9,10 +9,7 @@ 10 | class C2(val x: () => Int): // error | ^ | reference (C2.this.x : () => Int) is not included in allowed capture set {} of the self type of class C2 --- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this.scala:17:8 --------------------------------------- +-- Error: tests/neg-custom-args/captures/cc-this.scala:17:8 ------------------------------------------------------------ 17 | class C4(val f: () => Int) extends C3 // error - | ^ - | illegal inheritance: self type {C4.this.f} C4 of class C4 does not conform to self type C3 - | of parent class C3 - | - | longer explanation available when compiling with `-explain` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | reference (C4.this.f : () => Int) is not included in allowed capture set {} of pure base class class C3 diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check index d10519636ca8..086524d307a2 100644 --- a/tests/neg-custom-args/captures/cc-this2.check +++ b/tests/neg-custom-args/captures/cc-this2.check @@ -1,8 +1,6 @@ --- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 ----------------------------------- +-- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 -------------------------------------------------------- 2 |class D extends C: // error - | ^ - | illegal inheritance: self type {*} D of class D does not conform to self type C - | of parent class C - | - | longer explanation available when compiling with `-explain` + |^ + |reference (scala.caps.* : Any) is not included in allowed capture set {} of pure base class class C +3 | this: {*} D => diff --git a/tests/neg-custom-args/captures/cc1.scala b/tests/neg-custom-args/captures/cc1.scala index 7f3cd784ef84..10a9793eabe8 100644 --- a/tests/neg-custom-args/captures/cc1.scala +++ b/tests/neg-custom-args/captures/cc1.scala @@ -1,5 +1,5 @@ import annotation.retains object Test: - def f[A <: Matchable @retains(*)](x: A): Matchable = x // error + def f[A <: Matchable @retains(caps.*)](x: A): Matchable = x // error diff --git a/tests/neg-custom-args/captures/curried-simplified.check b/tests/neg-custom-args/captures/curried-simplified.check index b91914f72404..5d23a7a4955e 100644 --- a/tests/neg-custom-args/captures/curried-simplified.check +++ b/tests/neg-custom-args/captures/curried-simplified.check @@ -15,28 +15,28 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:11:39 --------------------------- 11 | def y3: Cap -> Protect[Int -> Int] = x3 // error | ^^ - | Found: (x$0: Cap) -> {x$0} Int -> Int + | Found: ? (x$0: Cap) -> {x$0} Int -> Int | Required: Cap -> Protect[Int -> Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:15:33 --------------------------- 15 | def y5: Cap -> {} Int -> Int = x5 // error | ^^ - | Found: Cap -> {x} Int -> Int + | Found: ? Cap -> {x} Int -> Int | Required: Cap -> {} Int -> Int | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:17:49 --------------------------- 17 | def y6: Cap -> {} Cap -> Protect[Int -> Int] = x6 // error | ^^ - | Found: (x$0: Cap) -> {x$0} (x$0: Cap) -> {x$0, x$0} Int -> Int - | Required: Cap -> {} Cap -> Protect[Int -> Int] + | Found: ? (x$0: Cap) -> {x$0} (x$0: Cap) -> {x$0, x$0} Int -> Int + | Required: Cap -> {} Cap -> Protect[Int -> Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:19:49 --------------------------- 19 | def y7: Cap -> Protect[Cap -> {} Int -> Int] = x7 // error | ^^ - | Found: (x$0: Cap) -> {x$0} (x: Cap) -> {x$0, x} Int -> Int + | Found: ? (x$0: Cap) -> {x$0} (x: Cap) -> {x$0, x} Int -> Int | Required: Cap -> Protect[Cap -> {} Int -> Int] | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/eta.check b/tests/neg-custom-args/captures/eta.check index da3609a86771..ebd63855181b 100644 --- a/tests/neg-custom-args/captures/eta.check +++ b/tests/neg-custom-args/captures/eta.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/eta.scala:4:9 -------------------------------------------- 4 | g // error | ^ - | Found: (g : () -> A) + | Found: ? () -> A | Required: () -> {f} Proc | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check new file mode 100644 index 000000000000..aca5d9217d64 --- /dev/null +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -0,0 +1,17 @@ +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:2:6 ----------------------------------------------- +2 |class Err extends Exception: // error + |^ + |reference (scala.caps.* : Any) is not included in allowed capture set {} of pure base class class Throwable +3 | self: {*} Err => +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:10:6 ---------------------------------------------- +10 |class Err4(c: {*} Any) extends AnyVal // error + |^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |reference (Err4.this.c : {*} Any) is not included in allowed capture set {} of pure base class class AnyVal +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- +7 | val x = c // error + | ^ + |(c : {*} Any) cannot be referenced here; it is not included in the allowed capture set {} of pure base class class Throwable +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:8 ----------------------------------------------- +8 | class Err3(c: {*} Any) extends Exception // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | reference (Err3.this.c : {*} Any) is not included in allowed capture set {} of pure base class class Throwable diff --git a/tests/neg-custom-args/captures/exception-definitions.scala b/tests/neg-custom-args/captures/exception-definitions.scala new file mode 100644 index 000000000000..9f3539b7febf --- /dev/null +++ b/tests/neg-custom-args/captures/exception-definitions.scala @@ -0,0 +1,12 @@ + +class Err extends Exception: // error + self: {*} Err => + +def test(c: {*} Any) = + class Err2 extends Exception: + val x = c // error + class Err3(c: {*} Any) extends Exception // error + +class Err4(c: {*} Any) extends AnyVal // error + + diff --git a/tests/neg-custom-args/captures/i15116.check b/tests/neg-custom-args/captures/i15116.check index 83c552087646..7c73a7ff52ff 100644 --- a/tests/neg-custom-args/captures/i15116.check +++ b/tests/neg-custom-args/captures/i15116.check @@ -2,27 +2,27 @@ 3 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Bar.this.m} Foo{m: {Bar.this.m} String} + | {Bar.this.m} Foo{val m: {Bar.this.m} String} | with non-empty capture set {Bar.this.m}. | The type needs to be declared explicitly. -- Error: tests/neg-custom-args/captures/i15116.scala:5:6 -------------------------------------------------------------- 5 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Baz.this} Foo{m: {Baz.this} String} + | {Baz.this} Foo{val m: {*} String} | with non-empty capture set {Baz.this}. | The type needs to be declared explicitly. -- Error: tests/neg-custom-args/captures/i15116.scala:7:6 -------------------------------------------------------------- 7 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Bar1.this.m} Foo{m: {Bar1.this.m} String} + | {Bar1.this.m} Foo{val m: {Bar1.this.m} String} | with non-empty capture set {Bar1.this.m}. | The type needs to be declared explicitly. -- Error: tests/neg-custom-args/captures/i15116.scala:9:6 -------------------------------------------------------------- 9 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Baz2.this} Foo{m: {Baz2.this} String} + | {Baz2.this} Foo{val m: {*} String} | with non-empty capture set {Baz2.this}. | The type needs to be declared explicitly. diff --git a/tests/neg-custom-args/captures/i15772.check b/tests/neg-custom-args/captures/i15772.check index 0c59e81742f1..a587f2d262ed 100644 --- a/tests/neg-custom-args/captures/i15772.check +++ b/tests/neg-custom-args/captures/i15772.check @@ -1,36 +1,28 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:18:2 ---------------------------------------- -18 | () => // error - | ^ - | Found: {x} () -> Int - | Required: () -> Int -19 | val c : {x} C = new C(x) -20 | val boxed1 : (({*} C) => Unit) -> Unit = box1(c) -21 | boxed1((cap: {*} C) => unsafe(c)) -22 | 0 +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:20:49 --------------------------------------- +20 | val boxed1 : (({*} C) => Unit) -> Unit = box1(c) // error + | ^^^^^^^ + | Found: {c} ({*} ({c} C{val arg: {*} C}) -> Unit) -> Unit + | Required: (({*} C) => Unit) -> Unit | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:25:2 ---------------------------------------- -25 | () => // error - | ^ - | Found: {x} () -> Int - | Required: () -> Int -26 | val c : {x} C = new C(x) -27 | val boxed2 : Observe[{*} C] = box2(c) -28 | boxed2((cap: {*} C) => unsafe(c)) -29 | 0 +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:27:38 --------------------------------------- +27 | val boxed2 : Observe[{*} C] = box2(c) // error + | ^^^^^^^ + | Found: {c} ({*} ({c} C{val arg: {*} C}) -> Unit) -> Unit + | Required: Observe[{*} C] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:33:37 --------------------------------------- 33 | val boxed2 : Observe[{*} C] = box2(c) // error | ^ | Found: {*} C - | Required: box {*} C{arg: ? C} + | Required: box {*} C{val arg: ? C} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:44:2 ---------------------------------------- 44 | x: (() -> Unit) // error | ^ - | Found: (x : {sayHello, io} () -> Unit) + | Found: {x} () -> Unit | Required: () -> Unit | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i15772.scala b/tests/neg-custom-args/captures/i15772.scala index 29794443c297..d3afdb6c63f1 100644 --- a/tests/neg-custom-args/captures/i15772.scala +++ b/tests/neg-custom-args/captures/i15772.scala @@ -15,16 +15,16 @@ class C(val arg: {*} C) { } def main1(x: {*} C) : () -> Int = - () => // error + () => val c : {x} C = new C(x) - val boxed1 : (({*} C) => Unit) -> Unit = box1(c) + val boxed1 : (({*} C) => Unit) -> Unit = box1(c) // error boxed1((cap: {*} C) => unsafe(c)) 0 def main2(x: {*} C) : () -> Int = - () => // error + () => val c : {x} C = new C(x) - val boxed2 : Observe[{*} C] = box2(c) + val boxed2 : Observe[{*} C] = box2(c) // error boxed2((cap: {*} C) => unsafe(c)) 0 @@ -41,4 +41,4 @@ def main(io: {*} Any) = val sayHello: (({io} File) => Unit) = (file: {io} File) => file.write("Hello World!\r\n") val filesList : List[{io} File] = ??? val x = () => filesList.foreach(sayHello) - x: (() -> Unit) // error \ No newline at end of file + x: (() -> Unit) // error diff --git a/tests/neg-custom-args/captures/i15923-cases.scala b/tests/neg-custom-args/captures/i15923-cases.scala new file mode 100644 index 000000000000..5fbb95355a60 --- /dev/null +++ b/tests/neg-custom-args/captures/i15923-cases.scala @@ -0,0 +1,15 @@ +trait Cap { def use(): Int } +type Id[X] = [T] -> (op: X => T) -> T +def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) + +def foo(x: Id[{*} Cap]) = { + x(_.use()) // error +} + +def bar(io: {*} Cap, x: Id[{io} Cap]) = { + x(_.use()) +} + +def barAlt(a: {*} Cap, b: {*} Cap, x: Id[{a, b} Cap]) = { + x(_.use()) +} diff --git a/tests/neg-custom-args/captures/i15923.scala b/tests/neg-custom-args/captures/i15923.scala new file mode 100644 index 000000000000..ac7ee995150e --- /dev/null +++ b/tests/neg-custom-args/captures/i15923.scala @@ -0,0 +1,14 @@ +trait Cap { def use(): Int } +type Id[X] = [T] -> (op: X => T) -> T +def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) + +def bar() = { + def withCap[X](op: ({*} Cap) => X): X = { + val cap: {*} Cap = new Cap { def use() = { println("cap is used"); 0 } } + val result = op(cap) + result + } + + val leak = withCap(cap => mkId(cap)) + leak { cap => cap.use() } // error +} diff --git a/tests/neg-custom-args/captures/i16114.scala b/tests/neg-custom-args/captures/i16114.scala new file mode 100644 index 000000000000..cc491226f9df --- /dev/null +++ b/tests/neg-custom-args/captures/i16114.scala @@ -0,0 +1,46 @@ +trait Cap { def use(): Int; def close(): Unit } +def mkCap(): {*} Cap = ??? + +def expect[T](x: T): x.type = x + +def withCap[T](op: ({*} Cap) => T): T = { + val cap: {*} Cap = mkCap() + val result = op(cap) + cap.close() + result +} + +def main(fs: {*} Cap): Unit = { + def badOp(io: {*} Cap): {} Unit -> Unit = { + val op1: {io} Unit -> Unit = (x: Unit) => // error // limitation + expect[{*} Cap] { + io.use() + fs + } + + val op2: {fs} Unit -> Unit = (x: Unit) => // error // limitation + expect[{*} Cap] { + fs.use() + io + } + + val op3: {io} Unit -> Unit = (x: Unit) => // ok + expect[{*} Cap] { + io.use() + io + } + + val op4: {} Unit -> Unit = (x: Unit) => // ok + expect[{*} Cap](io) + + val op: {} Unit -> Unit = (x: Unit) => // error + expect[{*} Cap] { + io.use() + io + } + op + } + + val leaked: {} Unit -> Unit = withCap(badOp) + leaked(()) +} diff --git a/tests/neg-custom-args/captures/io.scala b/tests/neg-custom-args/captures/io.scala index 91af0167c9f9..ae686d6b154e 100644 --- a/tests/neg-custom-args/captures/io.scala +++ b/tests/neg-custom-args/captures/io.scala @@ -3,17 +3,17 @@ sealed trait IO: def puts(msg: Any): Unit = println(msg) def test1 = - val IO : IO @retains(*) = new IO {} + val IO : IO @retains(caps.*) = new IO {} def foo = {IO; IO.puts("hello") } val x : () -> Unit = () => foo // error: Found: (() -> Unit) retains IO; Required: () -> Unit def test2 = - val IO : IO @retains(*) = new IO {} - def puts(msg: Any, io: IO @retains(*)) = println(msg) + val IO : IO @retains(caps.*) = new IO {} + def puts(msg: Any, io: IO @retains(caps.*)) = println(msg) def foo() = puts("hello", IO) val x : () -> Unit = () => foo() // error: Found: (() -> Unit) retains IO; Required: () -> Unit -type Capability[T] = T @retains(*) +type Capability[T] = T @retains(caps.*) def test3 = val IO : Capability[IO] = new IO {} diff --git a/tests/neg-custom-args/captures/lazylist.check b/tests/neg-custom-args/captures/lazylist.check index e43538ad97f7..ec620b39da08 100644 --- a/tests/neg-custom-args/captures/lazylist.check +++ b/tests/neg-custom-args/captures/lazylist.check @@ -5,11 +5,18 @@ | method tail of type -> {*} lazylists.LazyList[Nothing] has incompatible type | | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:17:15 ------------------------------------- +17 | def tail = xs() // error + | ^^^^ + | Found: {LazyCons.this.xs} lazylists.LazyList[T] + | Required: lazylists.LazyList[T] + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:35:29 ------------------------------------- 35 | val ref1c: LazyList[Int] = ref1 // error | ^^^^ - | Found: (ref1 : {cap1} lazylists.LazyCons[Int]{xs: {cap1} () -> {*} lazylists.LazyList[Int]}) - | Required: lazylists.LazyList[Int] + | Found: (ref1 : {cap1} lazylists.LazyCons[Int]{val xs: {cap1} () -> {*} lazylists.LazyList[Int]}) + | Required: lazylists.LazyList[Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:37:36 ------------------------------------- @@ -33,10 +40,3 @@ | Required: {cap1, ref3, cap3} lazylists.LazyList[Int] | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/lazylist.scala:17:6 ----------------------------------------------------------- -17 | def tail = xs() // error: cannot have an inferred type - | ^^^^^^^^^^^^^^^ - | Non-local method tail cannot have an inferred result type - | {LazyCons.this.xs} lazylists.LazyList[? T] - | with non-empty capture set {LazyCons.this.xs}. - | The type needs to be declared explicitly. diff --git a/tests/neg-custom-args/captures/lazylist.scala b/tests/neg-custom-args/captures/lazylist.scala index 56bfc3ea6da2..2674f15a0ee3 100644 --- a/tests/neg-custom-args/captures/lazylist.scala +++ b/tests/neg-custom-args/captures/lazylist.scala @@ -14,7 +14,7 @@ abstract class LazyList[+T]: class LazyCons[+T](val x: T, val xs: () => {*} LazyList[T]) extends LazyList[T]: def isEmpty = false def head = x - def tail = xs() // error: cannot have an inferred type + def tail = xs() // error object LazyNil extends LazyList[Nothing]: def isEmpty = true diff --git a/tests/neg-custom-args/captures/lazyref.check b/tests/neg-custom-args/captures/lazyref.check index fcd98d0d67bd..7471f8f4f686 100644 --- a/tests/neg-custom-args/captures/lazyref.check +++ b/tests/neg-custom-args/captures/lazyref.check @@ -1,28 +1,28 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:19:28 -------------------------------------- 19 | val ref1c: LazyRef[Int] = ref1 // error | ^^^^ - | Found: (ref1 : {cap1} LazyRef[Int]{elem: {cap1} () -> Int}) + | Found: (ref1 : {cap1} LazyRef[Int]{val elem: {cap1} () -> Int}) | Required: LazyRef[Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:21:35 -------------------------------------- 21 | val ref2c: {cap2} LazyRef[Int] = ref2 // error | ^^^^ - | Found: (ref2 : {cap2, ref1} LazyRef[Int]{elem: {*} () -> Int}) + | Found: (ref2 : {cap2, ref1} LazyRef[Int]{val elem: {*} () -> Int}) | Required: {cap2} LazyRef[Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:23:35 -------------------------------------- 23 | val ref3c: {ref1} LazyRef[Int] = ref3 // error | ^^^^ - | Found: (ref3 : {cap2, ref1} LazyRef[Int]{elem: {*} () -> Int}) + | Found: (ref3 : {cap2, ref1} LazyRef[Int]{val elem: {*} () -> Int}) | Required: {ref1} LazyRef[Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:25:35 -------------------------------------- 25 | val ref4c: {cap1} LazyRef[Int] = ref4 // error | ^^^^ - | Found: (ref4 : {cap2, cap1} LazyRef[Int]{elem: {*} () -> Int}) + | Found: (ref4 : {cap2, cap1} LazyRef[Int]{val elem: {*} () -> Int}) | Required: {cap1} LazyRef[Int] | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/selftype.scala b/tests/neg-custom-args/captures/selftype.scala new file mode 100644 index 000000000000..21148f625a7a --- /dev/null +++ b/tests/neg-custom-args/captures/selftype.scala @@ -0,0 +1,4 @@ +@annotation.experimental class C(x: () => Unit) extends caps.Pure // error + +@annotation.experimental class D(@annotation.constructorOnly x: () => Unit) extends caps.Pure // ok + diff --git a/tests/neg-custom-args/captures/try.check b/tests/neg-custom-args/captures/try.check index 30ebb910d34d..c9cc7f7c1b56 100644 --- a/tests/neg-custom-args/captures/try.check +++ b/tests/neg-custom-args/captures/try.check @@ -1,8 +1,8 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/try.scala:23:49 ------------------------------------------ 23 | val a = handle[Exception, CanThrow[Exception]] { // error | ^ - | Found: ? ({*} CT[Exception]) -> {*} CT[? >: ? Exception <: ? Exception] - | Required: CanThrow[Exception] => box {*} CT[Exception] + | Found: ? ({*} CT[Exception]) -> CanThrow[Exception] + | Required: CanThrow[Exception] => box {*} CT[Exception] 24 | (x: CanThrow[Exception]) => x 25 | }{ | @@ -10,7 +10,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/try.scala:29:43 ------------------------------------------ 29 | val b = handle[Exception, () -> Nothing] { // error | ^ - | Found: ? (x: {*} CT[Exception]) -> {x} () -> ? Nothing + | Found: ? (x: {*} CT[Exception]) -> {x} () -> Nothing | Required: CanThrow[Exception] => () -> Nothing 30 | (x: CanThrow[Exception]) => () => raise(new Exception)(using x) 31 | } { diff --git a/tests/neg-custom-args/captures/try.scala b/tests/neg-custom-args/captures/try.scala index 35c7ea4829f2..df7930f76af8 100644 --- a/tests/neg-custom-args/captures/try.scala +++ b/tests/neg-custom-args/captures/try.scala @@ -2,8 +2,8 @@ import annotation.retains import language.experimental.erasedDefinitions class CT[E <: Exception] -type CanThrow[E <: Exception] = CT[E] @retains(*) -type Top = Any @retains(*) +type CanThrow[E <: Exception] = CT[E] @retains(caps.*) +type Top = Any @retains(caps.*) infix type throws[R, E <: Exception] = (erased CanThrow[E]) ?=> R diff --git a/tests/neg-custom-args/captures/usingLogFile.check b/tests/neg-custom-args/captures/usingLogFile.check index 03c413cbb3ef..beb7ac23ed44 100644 --- a/tests/neg-custom-args/captures/usingLogFile.check +++ b/tests/neg-custom-args/captures/usingLogFile.check @@ -1,18 +1,18 @@ -- Error: tests/neg-custom-args/captures/usingLogFile.scala:23:27 ------------------------------------------------------ 23 | val later = usingLogFile { f => () => f.write(0) } // error | ^^^^^^^^^^^^^^^^^^^^^^^^^ - | {f, *} () -> Unit cannot be box-converted to box ? () -> Unit + | {f} () -> Unit cannot be box-converted to box ? () -> Unit | since one of their capture sets contains the root capability `*` -- Error: tests/neg-custom-args/captures/usingLogFile.scala:29:9 ------------------------------------------------------- 29 | later2.x() // error | ^^^^^^^^ | The expression's type box {*} () -> Unit is not allowed to capture the root capability `*`. | This usually means that a capability persists longer than its allowed lifetime. --- Error: tests/neg-custom-args/captures/usingLogFile.scala:31:6 ------------------------------------------------------- -31 | var later3: () => Unit = () => () // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | The mutable variable's type box {*} () -> Unit is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:33:2 ------------------------------------------------------- +33 | later3() // error + | ^^^^^^ + | box {*} () -> Unit cannot be box-converted to a type that can be selected or applied + | since one of their capture sets contains the root capability `*` -- Error: tests/neg-custom-args/captures/usingLogFile.scala:37:9 ------------------------------------------------------- 37 | later4.x() // error | ^^^^^^^^ @@ -21,15 +21,15 @@ -- Error: tests/neg-custom-args/captures/usingLogFile.scala:47:27 ------------------------------------------------------ 47 | val later = usingLogFile { f => () => f.write(0) } // error | ^^^^^^^^^^^^^^^^^^^^^^^^^ - | {f, *} () -> Unit cannot be box-converted to box ? () -> Unit + | {f} () -> Unit cannot be box-converted to box ? () -> Unit | since one of their capture sets contains the root capability `*` -- Error: tests/neg-custom-args/captures/usingLogFile.scala:62:33 ------------------------------------------------------ 62 | val later = usingFile("out", f => (y: Int) => xs.foreach(x => f.write(x + y))) // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | {f, *} (x$0: Int) -> Unit cannot be box-converted to box ? (x$0: Int) -> Unit + | {f} (x$0: Int) -> Unit cannot be box-converted to box ? (x$0: Int) -> Unit | since one of their capture sets contains the root capability `*` -- Error: tests/neg-custom-args/captures/usingLogFile.scala:71:37 ------------------------------------------------------ 71 | val later = usingFile("logfile", usingLogger(_, l => () => l.log("test"))) // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | {_$1, *} () -> Unit cannot be box-converted to box ? () -> Unit + | {_$1} () -> Unit cannot be box-converted to box ? () -> Unit | since one of their capture sets contains the root capability `*` diff --git a/tests/neg-custom-args/captures/usingLogFile.scala b/tests/neg-custom-args/captures/usingLogFile.scala index fb35b673d46e..8b367239050d 100644 --- a/tests/neg-custom-args/captures/usingLogFile.scala +++ b/tests/neg-custom-args/captures/usingLogFile.scala @@ -28,9 +28,9 @@ object Test2: private val later2 = usingLogFile { f => Cell(() => f.write(0)) } later2.x() // error - var later3: () => Unit = () => () // error + var later3: () => Unit = () => () usingLogFile { f => later3 = () => f.write(0) } - later3() + later3() // error var later4: Cell[() => Unit] = Cell(() => ()) usingLogFile { f => later4 = Cell(() => f.write(0)) } diff --git a/tests/neg-custom-args/captures/vars.check b/tests/neg-custom-args/captures/vars.check index b5a01558cd2c..8fe72a76493a 100644 --- a/tests/neg-custom-args/captures/vars.check +++ b/tests/neg-custom-args/captures/vars.check @@ -1,25 +1,32 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:11:24 ----------------------------------------- 11 | val z2c: () -> Unit = z2 // error | ^^ - | Found: (z2 : {x, cap1} () -> Unit) + | Found: {z2} () -> Unit | Required: () -> Unit | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/vars.scala:13:6 --------------------------------------------------------------- -13 | var a: String => String = f // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | The mutable variable's type box {*} String -> String is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. --- Error: tests/neg-custom-args/captures/vars.scala:15:4 --------------------------------------------------------------- -15 | b.head // error +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:15:10 ----------------------------------------- +15 | val u = a // error + | ^ + | Found: (a : box {*} String -> String) + | Required: {*} (x$0: String) -> String + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/vars.scala:16:2 --------------------------------------------------------------- +16 | a("") // error + | ^ + | box {*} String -> String cannot be box-converted to a type that can be selected or applied + | since one of their capture sets contains the root capability `*` +-- Error: tests/neg-custom-args/captures/vars.scala:17:4 --------------------------------------------------------------- +17 | b.head // error | ^^^^^^ | The expression's type box {*} String -> String is not allowed to capture the root capability `*`. | This usually means that a capability persists longer than its allowed lifetime. --- Error: tests/neg-custom-args/captures/vars.scala:30:8 --------------------------------------------------------------- -30 | local { cap3 => // error +-- Error: tests/neg-custom-args/captures/vars.scala:32:8 --------------------------------------------------------------- +32 | local { cap3 => // error | ^ - | The expression's type box {*} (x$0: ? String) -> ? String is not allowed to capture the root capability `*`. + | The expression's type box {*} (x$0: String) -> String is not allowed to capture the root capability `*`. | This usually means that a capability persists longer than its allowed lifetime. -31 | def g(x: String): String = if cap3 == cap3 then "" else "a" -32 | g -33 | } +33 | def g(x: String): String = if cap3 == cap3 then "" else "a" +34 | g +35 | } diff --git a/tests/neg-custom-args/captures/vars.scala b/tests/neg-custom-args/captures/vars.scala index 5e413b7ea3fb..2ad8fec53619 100644 --- a/tests/neg-custom-args/captures/vars.scala +++ b/tests/neg-custom-args/captures/vars.scala @@ -10,8 +10,10 @@ def test(cap1: Cap, cap2: Cap) = val z2 = () => { x = identity } val z2c: () -> Unit = z2 // error - var a: String => String = f // error + var a: String => String = f // was error, now OK var b: List[String => String] = Nil // was error, now OK + val u = a // error + a("") // error b.head // error def scope = diff --git a/tests/neg-custom-args/deprecation/i11344.scala b/tests/neg-custom-args/deprecation/i11344.scala new file mode 100644 index 000000000000..4829b9fcef6b --- /dev/null +++ b/tests/neg-custom-args/deprecation/i11344.scala @@ -0,0 +1,6 @@ +trait Pet(val name: String, rest: Int): + def f(suffix: String) = s"$name$suffix$rest" + +class Birdie(override val name: String) extends Pet("huh", 1) // error + + diff --git a/tests/neg-custom-args/erased/i4060.scala b/tests/neg-custom-args/erased/i4060.scala new file mode 100644 index 000000000000..a1a2eee68dc0 --- /dev/null +++ b/tests/neg-custom-args/erased/i4060.scala @@ -0,0 +1,21 @@ +// See https://github.com/lampepfl/dotty/issues/4060#issuecomment-445808377 + +object App { + trait A { type L >: Any} + def upcast(erased a: A)(x: Any): a.L = x + erased val p: A { type L <: Nothing } = p + def coerce(x: Any): Int = upcast(p)(x) // error + + def coerceInline(x: Any): Int = upcast(compiletime.erasedValue[A {type L <: Nothing}])(x) // error + + trait B { type L <: Nothing } + def upcast_dep_parameter(erased a: B)(x: a.L) : Int = x + erased val q : B { type L >: Any } = compiletime.erasedValue + + def coerceInlineWithB(x: Any): Int = upcast_dep_parameter(q)(x) // error + + def main(args: Array[String]): Unit = { + println(coerce("Uh oh!")) + println(coerceInlineWithB("Uh oh!")) + } +} diff --git a/tests/neg-custom-args/erased/i5525.scala b/tests/neg-custom-args/erased/i5525.scala index 108de00bf1a2..abf8488bd38b 100644 --- a/tests/neg-custom-args/erased/i5525.scala +++ b/tests/neg-custom-args/erased/i5525.scala @@ -1,10 +1,9 @@ - erased enum Foo6 {} // error: only access modifiers allowed -enum Foo10 { - erased case C6() // error: only access modifiers allowed +enum Foo10 { // error: Enumerations must contain at least one case + erased case C6() // error // error } -enum Foo11 { - erased case C6 // error: only access modifiers allowed +enum Foo11 { // error: Enumerations must contain at least one case + erased case C6 // error // error } diff --git a/tests/neg-custom-args/fatal-warnings/i12253.check b/tests/neg-custom-args/fatal-warnings/i12253.check index 9a88c111f734..654ea9fc8247 100644 --- a/tests/neg-custom-args/fatal-warnings/i12253.check +++ b/tests/neg-custom-args/fatal-warnings/i12253.check @@ -1,9 +1,9 @@ -- Error: tests/neg-custom-args/fatal-warnings/i12253.scala:11:10 ------------------------------------------------------ 11 | case extractors.InlinedLambda(_, Select(_, name)) => Expr(name) // error // error | ^ - | the type test for extractors.q2.reflect.Term cannot be checked at runtime + |the type test for extractors.q2.reflect.Term cannot be checked at runtime because it refers to an abstract type member or type parameter -- Error: tests/neg-custom-args/fatal-warnings/i12253.scala:11:38 ------------------------------------------------------ 11 | case extractors.InlinedLambda(_, Select(_, name)) => Expr(name) // error // error | ^ - | the type test for q1.reflect.Select cannot be checked at runtime + |the type test for q1.reflect.Select cannot be checked at runtime because it refers to an abstract type member or type parameter there was 1 deprecation warning; re-run with -deprecation for details diff --git a/tests/neg-custom-args/i4060.scala b/tests/neg-custom-args/i4060.scala deleted file mode 100644 index 3d5c180b5d7b..000000000000 --- a/tests/neg-custom-args/i4060.scala +++ /dev/null @@ -1,22 +0,0 @@ -class X { type R } -class T(erased val a: X)(val value: a.R) - -object App { - def coerce[U, V](u: U): V = { - trait X { type R >: U } - trait Y { type R = V } - - class T[A <: X](erased val a: A)(val value: a.R) // error - - object O { lazy val x : Y & X = ??? } - - val a = new T[Y & X](O.x)(u) - a.value - } - - def main(args: Array[String]): Unit = { - val x: Int = coerce[String, Int]("a") - println(x + 1) - - } -} diff --git a/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala b/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala index 85076cca723a..a4962c6153a0 100644 --- a/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala +++ b/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala @@ -1,7 +1,6 @@ import annotation.experimental class Class1: - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @@ -9,7 +8,6 @@ class Class1: def g = 1 object Object1: - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @@ -17,7 +15,6 @@ object Object1: def g = 1 def fun1 = - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @@ -25,7 +22,6 @@ def fun1 = def g = 1 val value1 = - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition diff --git a/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala b/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala index 1af04918b1d9..77fbe41479d2 100644 --- a/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala +++ b/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala @@ -1,25 +1,21 @@ import annotation.experimental class Class1: - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition object Object1: - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition def fun1 = - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition val value1 = - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition diff --git a/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala b/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala index b9fc38dc4915..180c43b9f671 100644 --- a/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala +++ b/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala @@ -1,28 +1,24 @@ import annotation.experimental class Class1: - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @experimental def f = 1 object Object1: - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @experimental def f = 1 def fun1 = - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @experimental def f = 1 val value1 = - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition diff --git a/tests/neg-custom-args/no-experimental/experimental-package-imports.scala b/tests/neg-custom-args/no-experimental/experimental-package-imports.scala index 90ec387b1036..047b3eb61e82 100644 --- a/tests/neg-custom-args/no-experimental/experimental-package-imports.scala +++ b/tests/neg-custom-args/no-experimental/experimental-package-imports.scala @@ -1,7 +1,6 @@ import annotation.experimental package foo { - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @@ -13,7 +12,6 @@ package foo { package foo2 { // ok: all definitions are top-level @experimental - import language.experimental.fewerBraces import language.experimental.namedTypeArguments import language.experimental.genericNumberLiterals import language.experimental.erasedDefinitions diff --git a/tests/neg-custom-args/no-experimental/experimentalInheritance.scala b/tests/neg-custom-args/no-experimental/experimentalInheritance.scala deleted file mode 100644 index f6eab1224310..000000000000 --- a/tests/neg-custom-args/no-experimental/experimentalInheritance.scala +++ /dev/null @@ -1,14 +0,0 @@ -import scala.annotation.experimental - -@experimental def x = 2 - -@experimental class A1(x: Any) -class A2(x: Any) - - -@experimental class B1 extends A1(1) -class B2 // error: extension of experimental class A1 must have @experimental annotation -extends A1(1) // error: class A1 is marked @experimental ... - -@experimental class C1 extends A2(x) -class C2 extends A2(x) // error def x is marked @experimental and therefore diff --git a/tests/neg-custom-args/no-experimental/experimentalInline.scala b/tests/neg-custom-args/no-experimental/experimentalInline.scala index 8827fd42e36a..eb49bf15d11a 100644 --- a/tests/neg-custom-args/no-experimental/experimentalInline.scala +++ b/tests/neg-custom-args/no-experimental/experimentalInline.scala @@ -4,5 +4,5 @@ import scala.annotation.experimental inline def g() = () def test: Unit = - g() // errors + g() // error () diff --git a/tests/neg-custom-args/no-experimental/experimentalInline2.scala b/tests/neg-custom-args/no-experimental/experimentalInline2.scala new file mode 100644 index 000000000000..c40eb050a832 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalInline2.scala @@ -0,0 +1,8 @@ +import scala.annotation.experimental + +@experimental +transparent inline def g() = () + +def test: Unit = + g() // error + () diff --git a/tests/neg-custom-args/nowarn/nowarn.check b/tests/neg-custom-args/nowarn/nowarn.check index 232ea1a3a05f..855f741a15bf 100644 --- a/tests/neg-custom-args/nowarn/nowarn.check +++ b/tests/neg-custom-args/nowarn/nowarn.check @@ -69,7 +69,7 @@ Matching filters for @nowarn or -Wconf: -- Unchecked Warning: tests/neg-custom-args/nowarn/nowarn.scala:53:7 --------------------------------------------------- 53 | case _: List[Int] => 0 // warning (patmat, unchecked) | ^ - | the type test for List[Int] cannot be checked at runtime + |the type test for List[Int] cannot be checked at runtime because its type arguments can't be determined from Any -- Error: tests/neg-custom-args/nowarn/nowarn.scala:31:1 --------------------------------------------------------------- 31 |@nowarn("id=1") def t4d = try 1 // error and warning (unused nowarn, wrong id) |^^^^^^^^^^^^^^^ diff --git a/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check b/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check index 301111860aa7..7687543ea75f 100644 --- a/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check +++ b/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check @@ -13,7 +13,7 @@ -- [E170] Type Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:17:27 ----------------------------- 17 | val d = js.constructorOf[NativeJSClass { def bar: Int }] // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | NativeJSClass{bar: Int} is not a class type + | NativeJSClass{def bar: Int} is not a class type -- Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:19:27 ----------------------------------------- 19 | val e = js.constructorOf[JSTrait] // error | ^^^^^^^ @@ -29,7 +29,7 @@ -- [E170] Type Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:23:27 ----------------------------- 23 | val h = js.constructorOf[JSClass { def bar: Int }] // error | ^^^^^^^^^^^^^^^^^^^^^^^^ - | JSClass{bar: Int} is not a class type + | JSClass{def bar: Int} is not a class type -- [E170] Type Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:25:42 ----------------------------- 25 | def foo[A <: js.Any] = js.constructorOf[A] // error | ^ diff --git a/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check b/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check index c4ce18b2e57c..142de318efd3 100644 --- a/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check +++ b/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check @@ -9,11 +9,11 @@ -- [E170] Type Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:16:61 ---------------------------- 16 | val c = js.constructorTag[NativeJSClass with NativeJSTrait] // error | ^ - | (NativeJSClass & NativeJSTrait) is not a class type + | NativeJSClass & NativeJSTrait is not a class type -- [E170] Type Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:17:59 ---------------------------- 17 | val d = js.constructorTag[NativeJSClass { def bar: Int }] // error | ^ - | NativeJSClass{bar: Int} is not a class type + | NativeJSClass{def bar: Int} is not a class type -- Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:19:36 ---------------------------------------- 19 | val e = js.constructorTag[JSTrait] // error | ^ @@ -25,11 +25,11 @@ -- [E170] Type Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:22:49 ---------------------------- 22 | val g = js.constructorTag[JSClass with JSTrait] // error | ^ - | (JSClass & JSTrait) is not a class type + | JSClass & JSTrait is not a class type -- [E170] Type Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:23:53 ---------------------------- 23 | val h = js.constructorTag[JSClass { def bar: Int }] // error | ^ - | JSClass{bar: Int} is not a class type + | JSClass{def bar: Int} is not a class type -- [E170] Type Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:25:45 ---------------------------- 25 | def foo[A <: js.Any] = js.constructorTag[A] // error | ^ diff --git a/tests/neg-strict/i16092.scala b/tests/neg-strict/i16092.scala new file mode 100644 index 000000000000..b86c034c815b --- /dev/null +++ b/tests/neg-strict/i16092.scala @@ -0,0 +1,24 @@ +trait X { + type T + def process(t: T): Unit +} + +class Z(val x: X, val t: x.T) { + def process(): Unit = x.process(t) +} +class Evil(x1: X, x2: X, t: x1.T) extends Z(x1, t) { + override val x: X = x2 // error breaks connection between x and t +} +// alarm bells should be ringing by now + +// taking it to its conclusion... +object x1 extends X { + override type T = Int + override def process(t: T): Unit = println("Int: " + t) +} +object x2 extends X { + override type T = String + override def process(t: T): Unit = println("String: " + t) +} + +@main def Test = new Evil(x1, x2, 42).process() // BOOM: basically did x2.process(42) diff --git a/tests/neg/15981.check b/tests/neg/15981.check new file mode 100644 index 000000000000..c4d677b486e9 --- /dev/null +++ b/tests/neg/15981.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/15981.scala:4:45 ----------------------------------------------------------------------------------- +4 | override def equals(any: Any): Boolean = any.isInstanceOf[PosInt] // error + | ^^^ + | the type test for PosInt cannot be checked at runtime because it's a local class diff --git a/tests/neg/15981.scala b/tests/neg/15981.scala new file mode 100644 index 000000000000..efbad2570e7b --- /dev/null +++ b/tests/neg/15981.scala @@ -0,0 +1,6 @@ +// scalac: -Werror +val _ = locally{ + sealed abstract class PosInt(val value: Int) { + override def equals(any: Any): Boolean = any.isInstanceOf[PosInt] // error + } +} diff --git a/tests/neg/cc-only-defs.scala b/tests/neg/cc-only-defs.scala index a9b480f9f590..43ac025f203a 100644 --- a/tests/neg/cc-only-defs.scala +++ b/tests/neg/cc-only-defs.scala @@ -5,7 +5,7 @@ trait Test { val z: *.type // error - val b: ImpureFunction1[Int, Int] // error + val b: ImpureFunction1[Int, Int] // now OK val a: {z} String // error } // error diff --git a/tests/neg/classOf.check b/tests/neg/classOf.check index c3873aff7391..e3be3ca17026 100644 --- a/tests/neg/classOf.check +++ b/tests/neg/classOf.check @@ -11,4 +11,4 @@ -- [E170] Type Error: tests/neg/classOf.scala:9:18 --------------------------------------------------------------------- 9 | val y = classOf[C { type I = String }] // error | ^^^^^^^^^^^^^^^^^^^^^ - | Test.C{I = String} is not a class type + | Test.C{type I = String} is not a class type diff --git a/tests/neg/closure-args.scala b/tests/neg/closure-args.scala index 3b166c81c61c..76e590ad28b9 100644 --- a/tests/neg/closure-args.scala +++ b/tests/neg/closure-args.scala @@ -1,4 +1,4 @@ -import language.experimental.fewerBraces +import language.`3.3` val x = List(1).map: (x: => Int) => // error ??? diff --git a/tests/neg/enum-values.check b/tests/neg/enum-values.check index 84df5889b500..37990e8f312e 100644 --- a/tests/neg/enum-values.check +++ b/tests/neg/enum-values.check @@ -6,7 +6,9 @@ | meaning a values array is not defined. | An extension method was tried, but could not be fully constructed: | - | example.Extensions.values(Tag) failed with + | example.Extensions.values(Tag) + | + | failed with: | | Found: example.Tag.type | Required: Nothing @@ -18,7 +20,9 @@ | meaning a values array is not defined. | An extension method was tried, but could not be fully constructed: | - | example.Extensions.values(ListLike) failed with + | example.Extensions.values(ListLike) + | + | failed with: | | Found: Array[example.Tag[?]] | Required: Array[example.ListLike[?]] @@ -30,7 +34,9 @@ | meaning a values array is not defined. | An extension method was tried, but could not be fully constructed: | - | example.Extensions.values(TypeCtorsK) failed with + | example.Extensions.values(TypeCtorsK) + | + | failed with: | | Found: Array[example.Tag[?]] | Required: Array[example.TypeCtorsK[?[_$1]]] @@ -63,7 +69,9 @@ | value values is not a member of object example.NotAnEnum. | An extension method was tried, but could not be fully constructed: | - | example.Extensions.values(NotAnEnum) failed with + | example.Extensions.values(NotAnEnum) + | + | failed with: | | Found: example.NotAnEnum.type | Required: Nothing diff --git a/tests/neg/experimentalInheritance.scala b/tests/neg/experimentalInheritance.scala deleted file mode 100644 index 8b6c0b11afa3..000000000000 --- a/tests/neg/experimentalInheritance.scala +++ /dev/null @@ -1,44 +0,0 @@ -import scala.annotation.experimental - -@experimental -class A - -@experimental -trait T - -class B extends A // error - -@experimental -class B2 extends A - -class C extends T // error - -@experimental -class C2 extends T - -@experimental -class O: - class X - - @experimental - class Y - - object Z - -@experimental -object O: - class A - - @experimental - class B - - object C - -class OA extends O.A // error -class OB extends O.B // error - -@experimental -class OA2 extends O.A - -@experimental -class OB2 extends O.B diff --git a/tests/neg/experimentalInheritance2.scala b/tests/neg/experimentalInheritance2.scala deleted file mode 100644 index 84668ac5850f..000000000000 --- a/tests/neg/experimentalInheritance2.scala +++ /dev/null @@ -1,6 +0,0 @@ -import scala.annotation.experimental - -@experimental class A - -class B // // error: extension of experimental class A1 must have @experimental annotation - extends A diff --git a/tests/neg/harmonize.scala b/tests/neg/harmonize.scala index 0fe03d2d7600..72275a8f68fc 100644 --- a/tests/neg/harmonize.scala +++ b/tests/neg/harmonize.scala @@ -79,9 +79,9 @@ object Test { val a4 = ArrayBuffer(1.0f, 1L) val b4: ArrayBuffer[Double] = a4 // error: no widening val a5 = ArrayBuffer(1.0f, 1L, f()) - val b5: ArrayBuffer[AnyVal] = a5 + val b5: ArrayBuffer[Float | Long | Int] = a5 val a6 = ArrayBuffer(1.0f, 1234567890) - val b6: ArrayBuffer[AnyVal] = a6 + val b6: ArrayBuffer[Float | Int] = a6 def totalDuration(results: List[Long], cond: Boolean): Long = results.map(r => if (cond) r else 0).sum diff --git a/tests/neg/i10715a.scala b/tests/neg/i10715a.scala new file mode 100644 index 000000000000..b5794c46d22c --- /dev/null +++ b/tests/neg/i10715a.scala @@ -0,0 +1,22 @@ +class Parent: + def f(x: Int): Parent = ??? + def f: Int = 0 + + def g[A](x: Int): Parent = ??? + def g[A]: Int = 0 + +class Sub extends Parent: + override def f(x: Int): Parent = ??? + override def g[A](x: Int): Parent = ??? + +def bad(c: Sub): Unit = + c.f: String // error + c.g: String // error + c.f.bad // error + c.g.bad // error + + c.f("") // error + c.g("") // error + c.g[Int]("") // error + c.g[Int]: (String => String) // error + c.g[Int]: (Int => Parent) // ok diff --git a/tests/neg/i10715b.scala b/tests/neg/i10715b.scala new file mode 100644 index 000000000000..922b80cf727b --- /dev/null +++ b/tests/neg/i10715b.scala @@ -0,0 +1,10 @@ +class Parent: + def f(x: Int): Unit = () + def f: Int = 0 + +class Sub extends Parent: + override def f(x: Int): Unit = () + def f(x: Int)(using String): Unit = () + +def bad(c: Sub): Unit = + c.f(1) // error: ambiguous overload diff --git a/tests/neg/i10901.check b/tests/neg/i10901.check index 7f506628798e..e055bed7dd3a 100644 --- a/tests/neg/i10901.check +++ b/tests/neg/i10901.check @@ -4,7 +4,9 @@ | value º is not a member of object BugExp4Point2D.IntT. | An extension method was tried, but could not be fully constructed: | - | º(x) failed with + | º(x) + | + | failed with: | | Ambiguous overload. The overloaded alternatives of method º in object dsl with types | [T1, T2] @@ -15,27 +17,31 @@ | (x: T1) | (y: BugExp4Point2D.ColumnType[T2]) | (implicit evidence$5: Numeric[T1], evidence$6: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] - | both match arguments ((x : BugExp4Point2D.IntT.type)) + | both match arguments ((x : BugExp4Point2D.IntT.type))((y : BugExp4Point2D.DoubleT.type)) -- [E008] Not Found Error: tests/neg/i10901.scala:48:38 ---------------------------------------------------------------- 48 | val pos4: Point2D[Int,Double] = x º 201.1 // error | ^^^ |value º is not a member of object BugExp4Point2D.IntT. |An extension method was tried, but could not be fully constructed: | - | º(x) failed with + | º(x) + | + | failed with: | | Ambiguous overload. The overloaded alternatives of method º in object dsl with types | [T1, T2] | (x: BugExp4Point2D.ColumnType[T1]) | (y: T2)(implicit evidence$9: Numeric[T1], evidence$10: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] | [T1, T2](x: T1)(y: T2)(implicit evidence$3: Numeric[T1], evidence$4: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] - | both match arguments ((x : BugExp4Point2D.IntT.type)) + | both match arguments ((x : BugExp4Point2D.IntT.type))((201.1d : Double)) -- [E008] Not Found Error: tests/neg/i10901.scala:62:16 ---------------------------------------------------------------- 62 | val y = "abc".foo // error | ^^^^^^^^^ | value foo is not a member of String. | An extension method was tried, but could not be fully constructed: | - | Test.foo("abc")(/* missing */summon[C]) failed with + | Test.foo("abc")(/* missing */summon[C]) + | + | failed with: | | No given instance of type C was found for parameter x$2 of method foo in object Test diff --git a/tests/neg/i10943.scala b/tests/neg/i10943.scala index 4a9697c31874..09a42ce66cc4 100644 --- a/tests/neg/i10943.scala +++ b/tests/neg/i10943.scala @@ -1,4 +1,4 @@ -import language.experimental.fewerBraces +import language.`3.3` object T: class A diff --git a/tests/neg/i10994.scala b/tests/neg/i10994.scala new file mode 100644 index 000000000000..ce5cb2cf3df9 --- /dev/null +++ b/tests/neg/i10994.scala @@ -0,0 +1,2 @@ +def foo = true match + case (b: Boolean): Boolean => () // error diff --git a/tests/neg/i13558.check b/tests/neg/i13558.check index 4c468a854781..ab10a42cdd32 100644 --- a/tests/neg/i13558.check +++ b/tests/neg/i13558.check @@ -4,7 +4,9 @@ | value id is not a member of testcode.A. | An extension method was tried, but could not be fully constructed: | - | testcode.ExtensionA.id(a) failed with + | testcode.ExtensionA.id(a) + | + | failed with: | | Reference to id is ambiguous, | it is both imported by import testcode.ExtensionB._ @@ -15,7 +17,9 @@ | value id is not a member of testcode.A. | An extension method was tried, but could not be fully constructed: | - | testcode.ExtensionB.id(a) failed with + | testcode.ExtensionB.id(a) + | + | failed with: | | Reference to id is ambiguous, | it is both imported by import testcode.ExtensionA._ diff --git a/tests/neg/i14025.check b/tests/neg/i14025.check index 3c67b954297b..804d45523deb 100644 --- a/tests/neg/i14025.check +++ b/tests/neg/i14025.check @@ -1,8 +1,8 @@ -- Error: tests/neg/i14025.scala:1:88 ---------------------------------------------------------------------------------- 1 |val foo = summon[deriving.Mirror.Product { type MirroredType = [X] =>> [Y] =>> (X, Y) }] // error | ^ - |No given instance of type deriving.Mirror.Product{MirroredType[X] = [Y] =>> (X, Y)} was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Product{MirroredType[X] = [Y] =>> (X, Y)}: type `[X] =>> [Y] =>> (X, Y)` is not a generic product because its subpart `[X] =>> [Y] =>> (X, Y)` is not a supported kind (either `*` or `* -> *`) + |No given instance of type deriving.Mirror.Product{type MirroredType[X] = [Y] =>> (X, Y)} was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Product{type MirroredType[X] = [Y] =>> (X, Y)}: type `[X] =>> [Y] =>> (X, Y)` is not a generic product because its subpart `[X] =>> [Y] =>> (X, Y)` is not a supported kind (either `*` or `* -> *`) -- Error: tests/neg/i14025.scala:2:90 ---------------------------------------------------------------------------------- 2 |val bar = summon[deriving.Mirror.Sum { type MirroredType = [X] =>> [Y] =>> List[(X, Y)] }] // error | ^ - |No given instance of type deriving.Mirror.Sum{MirroredType[X] = [Y] =>> List[(X, Y)]} was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Sum{MirroredType[X] = [Y] =>> List[(X, Y)]}: type `[X] =>> [Y] =>> List[(X, Y)]` is not a generic sum because its subpart `[X] =>> [Y] =>> List[(X, Y)]` is not a supported kind (either `*` or `* -> *`) + |No given instance of type deriving.Mirror.Sum{type MirroredType[X] = [Y] =>> List[(X, Y)]} was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Sum{type MirroredType[X] = [Y] =>> List[(X, Y)]}: type `[X] =>> [Y] =>> List[(X, Y)]` is not a generic sum because its subpart `[X] =>> [Y] =>> List[(X, Y)]` is not a supported kind (either `*` or `* -> *`) diff --git a/tests/neg/i14127.check b/tests/neg/i14127.check index 969092401012..d00c44d39078 100644 --- a/tests/neg/i14127.check +++ b/tests/neg/i14127.check @@ -1,10 +1,8 @@ -- Error: tests/neg/i14127.scala:6:55 ---------------------------------------------------------------------------------- 6 | *: Int *: Int *: Int *: Int *: Int *: EmptyTuple)]] // error | ^ - |No given instance of type deriving.Mirror.Of[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, - | Int - |, Int, Int)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, - | Int - |, Int, Int)]: + |No given instance of type deriving.Mirror.Of[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + | Int, Int, Int)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + | Int, Int, Int)]: | * class *: is not a generic product because it reduces to a tuple with arity 23, expected arity <= 22 | * class *: is not a generic sum because it does not have subclasses diff --git a/tests/neg/i14432.check b/tests/neg/i14432.check index 793ade82212b..424d43bb119e 100644 --- a/tests/neg/i14432.check +++ b/tests/neg/i14432.check @@ -1,6 +1,6 @@ -- Error: tests/neg/i14432.scala:13:33 --------------------------------------------------------------------------------- 13 |val mFoo = summon[Mirror.Of[Foo]] // error: no mirror found | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: + |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14432a.check b/tests/neg/i14432a.check index 5f847ce30a38..4c975789507c 100644 --- a/tests/neg/i14432a.check +++ b/tests/neg/i14432a.check @@ -1,6 +1,6 @@ -- Error: tests/neg/i14432a.scala:14:43 -------------------------------------------------------------------------------- 14 | val mFoo = summon[Mirror.Of[example.Foo]] // error: no mirror found | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: + |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14432b.check b/tests/neg/i14432b.check index 24cb04b731ca..1859beedd781 100644 --- a/tests/neg/i14432b.check +++ b/tests/neg/i14432b.check @@ -1,6 +1,6 @@ -- Error: tests/neg/i14432b.scala:15:43 -------------------------------------------------------------------------------- 15 | val mFoo = summon[Mirror.Of[example.Foo]] // error: no mirror found | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: + |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14432c.check b/tests/neg/i14432c.check index 5cbc89d97c50..e675656ad77f 100644 --- a/tests/neg/i14432c.check +++ b/tests/neg/i14432c.check @@ -1,14 +1,10 @@ -- Error: tests/neg/i14432c.scala:12:18 -------------------------------------------------------------------------------- -12 |class Bar extends example.Foo(23) { // error // error: cant access private[example] ctor +12 |class Bar extends example.Foo(23) { // error: cant access private[example] ctor | ^^^^^^^^^^^ | constructor Foo cannot be accessed as a member of example.Foo from class Bar. --- Error: tests/neg/i14432c.scala:12:6 --------------------------------------------------------------------------------- -12 |class Bar extends example.Foo(23) { // error // error: cant access private[example] ctor - | ^ - | constructor Foo cannot be accessed as a member of example.Foo from class Bar. -- Error: tests/neg/i14432c.scala:16:43 -------------------------------------------------------------------------------- 16 | val mFoo = summon[Mirror.Of[example.Foo]] // error: no mirror | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: + |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14432c.scala b/tests/neg/i14432c.scala index 1f023e53bd32..b1b9e1161f62 100644 --- a/tests/neg/i14432c.scala +++ b/tests/neg/i14432c.scala @@ -9,7 +9,7 @@ package example { } -class Bar extends example.Foo(23) { // error // error: cant access private[example] ctor +class Bar extends example.Foo(23) { // error: cant access private[example] ctor // however we can not provide an anonymous mirror // at this call site because the constructor is not accessible. diff --git a/tests/neg/i14432d.check b/tests/neg/i14432d.check index 0701fb02ea19..664c3f2073da 100644 --- a/tests/neg/i14432d.check +++ b/tests/neg/i14432d.check @@ -1,6 +1,6 @@ -- Error: tests/neg/i14432d.scala:17:45 -------------------------------------------------------------------------------- 17 | val mFoo = summon[Mirror.Of[example.Foo]] // error | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: + |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14823.check b/tests/neg/i14823.check index 4d5a64680882..b4662d60519c 100644 --- a/tests/neg/i14823.check +++ b/tests/neg/i14823.check @@ -1,6 +1,6 @@ -- Error: tests/neg/i14823.scala:8:50 ---------------------------------------------------------------------------------- 8 |val baz = summon[Mirror.Of[SubA[Int] | SubB[Int]]] // error | ^ - |No given instance of type deriving.Mirror.Of[SubA[Int] | SubB[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[SubA[Int] | SubB[Int]]: + |No given instance of type deriving.Mirror.Of[SubA[Int] | SubB[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[SubA[Int] | SubB[Int]]: | * type `SubA[Int] | SubB[Int]` is not a generic product because its subpart `SubA[Int] | SubB[Int]` is a top-level union type. | * type `SubA[Int] | SubB[Int]` is not a generic sum because its subpart `SubA[Int] | SubB[Int]` is a top-level union type. diff --git a/tests/neg/i14823a.check b/tests/neg/i14823a.check index 9c917548d9bf..644780067995 100644 --- a/tests/neg/i14823a.check +++ b/tests/neg/i14823a.check @@ -1,24 +1,24 @@ -- Error: tests/neg/i14823a.scala:16:48 -------------------------------------------------------------------------------- 16 |val foo = summon[Mirror.Of[Box[Int] | Box[Int]]] // error | ^ - |No given instance of type deriving.Mirror.Of[Box[Int] | Box[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Box[Int] | Box[Int]]: + |No given instance of type deriving.Mirror.Of[Box[Int] | Box[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Box[Int] | Box[Int]]: | * type `Box[Int] | Box[Int]` is not a generic product because its subpart `Box[Int] | Box[Int]` is a top-level union type. | * type `Box[Int] | Box[Int]` is not a generic sum because its subpart `Box[Int] | Box[Int]` is a top-level union type. -- Error: tests/neg/i14823a.scala:17:58 -------------------------------------------------------------------------------- 17 |val bar = summon[MirrorK1.Of[[X] =>> Box[Int] | Box[Int]]] // error | ^ - |No given instance of type MirrorK1.Of[[X] =>> Box[Int] | Box[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type MirrorK1.Of[[X] =>> Box[Int] | Box[Int]]: + |No given instance of type MirrorK1.Of[[X] =>> Box[Int] | Box[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type MirrorK1.Of[[X] =>> Box[Int] | Box[Int]]: | * type `[A] =>> Box[Int] | Box[Int]` is not a generic product because its subpart `Box[Int] | Box[Int]` is a top-level union type. | * type `[A] =>> Box[Int] | Box[Int]` is not a generic sum because its subpart `Box[Int] | Box[Int]` is a top-level union type. -- Error: tests/neg/i14823a.scala:18:63 -------------------------------------------------------------------------------- 18 |def baz = summon[deriving.Mirror.Of[Foo[String] | Foo[String]]] // error | ^ - |No given instance of type deriving.Mirror.Of[Foo[String] | Foo[String]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Foo[String] | Foo[String]]: + |No given instance of type deriving.Mirror.Of[Foo[String] | Foo[String]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Foo[String] | Foo[String]]: | * type `Foo[String] | Foo[String]` is not a generic product because its subpart `Foo[String] | Foo[String]` is a top-level union type. | * type `Foo[String] | Foo[String]` is not a generic sum because its subpart `Foo[String] | Foo[String]` is a top-level union type. -- Error: tests/neg/i14823a.scala:20:66 -------------------------------------------------------------------------------- 20 |def qux = summon[deriving.Mirror.Of[Option[Int] | Option[String]]] // error | ^ - |No given instance of type deriving.Mirror.Of[Option[Int] | Option[String]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Option[Int] | Option[String]]: + |No given instance of type deriving.Mirror.Of[Option[Int] | Option[String]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Option[Int] | Option[String]]: | * type `Option[Int] | Option[String]` is not a generic product because its subpart `Option[Int] | Option[String]` is a top-level union type. | * type `Option[Int] | Option[String]` is not a generic sum because its subpart `Option[Int] | Option[String]` is a top-level union type. diff --git a/tests/neg/i15000.check b/tests/neg/i15000.check index c63866993103..64c222b2a52e 100644 --- a/tests/neg/i15000.check +++ b/tests/neg/i15000.check @@ -16,9 +16,11 @@ |value apply is not a member of object ExtensionMethodReproduction.c. |An extension method was tried, but could not be fully constructed: | - | apply(ExtensionMethodReproduction.c) failed with + | apply(ExtensionMethodReproduction.c) + | + | failed with: | | Ambiguous overload. The overloaded alternatives of method apply in object ExtensionMethodReproduction with types | (c: ExtensionMethodReproduction.C)(x: Int, y: Int): String | (c: ExtensionMethodReproduction.C)(x: Int, y: String): String - | both match arguments (ExtensionMethodReproduction.c.type) + | both match arguments (ExtensionMethodReproduction.c.type)((ExtensionMethodReproduction.x : Int), ) diff --git a/tests/neg/i15287.check b/tests/neg/i15287.check new file mode 100644 index 000000000000..558916cea437 --- /dev/null +++ b/tests/neg/i15287.check @@ -0,0 +1,7 @@ +-- [E134] Type Error: tests/neg/i15287.scala:4:19 ---------------------------------------------------------------------- +4 |@main def Test() = f('c')(2) // error + | ^ + | None of the overloaded alternatives of method f with types + | (x: Char)(min: Boolean, max: Int): String + | (x: Char)(someParam: String): String + | match arguments (('c' : Char))((2 : Int)) diff --git a/tests/neg/i15287.scala b/tests/neg/i15287.scala new file mode 100644 index 000000000000..10dc587b8a17 --- /dev/null +++ b/tests/neg/i15287.scala @@ -0,0 +1,4 @@ +def f(x: Char)(someParam: String): String = "a" +def f(x: Char)(min: Boolean, max: Int = 4): String = "b" + +@main def Test() = f('c')(2) // error diff --git a/tests/neg/i15381.check b/tests/neg/i15381.check new file mode 100644 index 000000000000..ec273ea8c48a --- /dev/null +++ b/tests/neg/i15381.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i15381.scala:5:13 ------------------------------------------------------------- +5 |def g: Int = $ // error + | ^ + | Found: $.type + | Required: Int + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i15381.scala b/tests/neg/i15381.scala new file mode 100644 index 000000000000..c3e1cc771521 --- /dev/null +++ b/tests/neg/i15381.scala @@ -0,0 +1,11 @@ +// scalac: -Vprint:parser + +case class $[A](value: A) + +def g: Int = $ // error + +/* +was: + | Found: .type + | Required: Int + */ diff --git a/tests/neg/i15893.scala b/tests/neg/i15893.scala new file mode 100644 index 000000000000..997c51179099 --- /dev/null +++ b/tests/neg/i15893.scala @@ -0,0 +1,61 @@ +sealed trait NatT +case class Zero() extends NatT +case class Succ[+N <: NatT](n: N) extends NatT + +type Mod2[N <: NatT] <: NatT = N match + case Zero => Zero + case Succ[Zero] => Succ[Zero] + case Succ[Succ[predPredN]] => Mod2[predPredN] + +def mod2(n: NatT): NatT = n match + case Zero() => Zero() + case Succ(Zero()) => Succ(Zero()) + case Succ(Succ(predPredN)) => mod2(predPredN) + +inline def inlineMod2(inline n: NatT): NatT = inline n match + case Zero() => Zero() + case Succ(Zero()) => Succ(Zero()) + case Succ(Succ(predPredN)) => inlineMod2(predPredN) + +transparent inline def transparentInlineMod2(inline n: NatT): NatT = inline n match + case Zero() => Zero() + case Succ(Zero()) => Succ(Zero()) + case Succ(Succ(predPredN)) => transparentInlineMod2(predPredN) + +def dependentlyTypedMod2[N <: NatT](n: N): Mod2[N] = n match // exhaustivity warning; unexpected + case Zero(): Zero => Zero() // error + case Succ(Zero()): Succ[Zero] => Succ(Zero()) // error + case Succ(Succ(predPredN)): Succ[Succ[_]] => dependentlyTypedMod2(predPredN) // error + +inline def inlineDependentlyTypedMod2[N <: NatT](inline n: N): Mod2[N] = inline n match + case Zero(): Zero => Zero() // error + case Succ(Zero()): Succ[Zero] => Succ(Zero()) // error + case Succ(Succ(predPredN)): Succ[Succ[_]] => inlineDependentlyTypedMod2(predPredN) // error + +transparent inline def transparentInlineDependentlyTypedMod2[N <: NatT](inline n: N): Mod2[N] = inline n match + case Zero(): Zero => Zero() // error + case Succ(Zero()): Succ[Zero] => Succ(Zero()) // error + case Succ(Succ(predPredN)): Succ[Succ[_]] => transparentInlineDependentlyTypedMod2(predPredN) // error + +def foo(n: NatT): NatT = mod2(n) match + case Succ(Zero()) => Zero() + case _ => n + +inline def inlineFoo(inline n: NatT): NatT = inline inlineMod2(n) match + case Succ(Zero()) => Zero() + case _ => n + +inline def transparentInlineFoo(inline n: NatT): NatT = inline transparentInlineMod2(n) match + case Succ(Zero()) => Zero() + case _ => n + +@main def main(): Unit = + println(mod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected + println(foo(Succ(Succ(Succ(Zero()))))) // prints Zero(), as expected + println(inlineMod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected + println(inlineFoo(Succ(Succ(Succ(Zero()))))) // prints Succ(Succ(Succ(Zero()))); unexpected + println(transparentInlineMod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected + println(transparentInlineFoo(Succ(Succ(Succ(Zero()))))) // prints Zero(), as expected + println(dependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // runtime error; unexpected +// println(inlineDependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // doesn't compile; unexpected +// println(transparentInlineDependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // doesn't compile; unexpected diff --git a/tests/neg/i15897.scala b/tests/neg/i15897.scala new file mode 100644 index 000000000000..d9dbe94c4eef --- /dev/null +++ b/tests/neg/i15897.scala @@ -0,0 +1,8 @@ +object O { + class AC(code: => Unit) + + val action = new AC({mode = ???}) {} // error + + def mode: AnyRef = ??? + def mode=(em: AnyRef): Unit = {} // error // error // error +} \ No newline at end of file diff --git a/tests/neg/i15898.scala b/tests/neg/i15898.scala new file mode 100644 index 000000000000..374fc49600eb --- /dev/null +++ b/tests/neg/i15898.scala @@ -0,0 +1,22 @@ +object O { + class AC(code: => Unit) { + def apply() = code + + def this(code: => Unit, key: Int = 1, modifiers: Int = 0) = { + this(code) + } + } + + class Doc { + def method: Boolean = true + } + + val doc = new Doc + + val ac = new AC(doc.method) // error + + def foo[T](code: => Unit): Unit = () + def foo[T](code: => Unit, key: Int = 1, modifiers: Int = 0): Unit = foo(code) + foo(doc.method) // error + foo[Int](doc.method) // error +} \ No newline at end of file diff --git a/tests/neg/i15939.check b/tests/neg/i15939.check new file mode 100644 index 000000000000..bc1ce15b8c76 --- /dev/null +++ b/tests/neg/i15939.check @@ -0,0 +1,45 @@ +-- [E007] Type Mismatch Error: tests/neg/i15939.scala:19:16 ------------------------------------------------------------ +19 | mkFoo.andThen(mkBarString) // error + | ^^^^^^^^^^^ + | Found: String + | Required: ?1.Bar + | + | where: ?1 is an unknown value of type Test.Foo + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i15939.scala:20:2 ------------------------------------------------------------- +20 | mkBarString andThen_: mkFoo // error + | ^^^^^^^^^^^ + | Found: String + | Required: ?2.Bar + | + | where: ?2 is an unknown value of type Test.Foo + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i15939.scala:21:18 ------------------------------------------------------------ +21 | mkFoo.andThen_:(mkBarString) // error + | ^^^^^^^^^^^ + | Found: String + | Required: ?3.Bar + | + | where: ?3 is an unknown value of type Test.Foo + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i15939.scala:22:2 ------------------------------------------------------------- +22 | mkBarString andThenByName_: mkFoo // error + | ^^^^^^^^^^^ + | Found: String + | Required: ?4.Bar + | + | where: ?4 is an unknown value of type Test.Foo + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i15939.scala:23:24 ------------------------------------------------------------ +23 | mkFoo.andThenByName_:(mkBarString) // error + | ^^^^^^^^^^^ + | Found: String + | Required: ?5.Bar + | + | where: ?5 is an unknown value of type Test.Foo + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i15939.scala b/tests/neg/i15939.scala new file mode 100644 index 000000000000..4307d8107e16 --- /dev/null +++ b/tests/neg/i15939.scala @@ -0,0 +1,24 @@ +import scala.language.implicitConversions + +object Test { + class Foo { + class Bar { + override def toString() = "bar" + } + object Bar { + implicit def fromString(a: String): Bar = { println("convert bar") ; new Bar } + } + + def andThen(b: Bar): Unit = { println("pre") ; println(s"use $b") ; println("post") } + def andThen_:(b: Bar) = { println("pre") ; println(s"use $b") ; println("post") } + def andThenByName_:(b: => Bar) = { println("pre") ; println(s"use $b") ; println(s"use $b") ; println("post") } + } + + def mkFoo: Foo = ??? + def mkBarString: String = ??? + mkFoo.andThen(mkBarString) // error + mkBarString andThen_: mkFoo // error + mkFoo.andThen_:(mkBarString) // error + mkBarString andThenByName_: mkFoo // error + mkFoo.andThenByName_:(mkBarString) // error +} \ No newline at end of file diff --git a/tests/neg/i15991.abstract.scala b/tests/neg/i15991.abstract.scala new file mode 100644 index 000000000000..aa974e487080 --- /dev/null +++ b/tests/neg/i15991.abstract.scala @@ -0,0 +1,20 @@ +object Foo: + def unapply[T <: Tuple](tup: T): String *: String *: T = + "a" *: "b" *: tup + +// like {pos,neg}/i15991, but with an abstract tuple tail +class Test: + val tup2: String *: String *: EmptyTuple = ("c", "d") + + def test3 = + val Foo(x, y, z) = tup2 // error: Wrong number of argument patterns for Foo; expected: (String, String, String, String) + x + y + z + + def test3a = + val x1x = tup2 match + case Foo(x, y, z) => // error: Wrong number of argument patterns for Foo; expected: (String, String, String, String) + (x, y, z) + val x = x1x._1 + val y = x1x._2 + val z = x1x._3 + x + y + z diff --git a/tests/neg/i15991.scala b/tests/neg/i15991.scala new file mode 100644 index 000000000000..9cd6b1c1d27e --- /dev/null +++ b/tests/neg/i15991.scala @@ -0,0 +1,7 @@ +object Foo: + def unapply(x: Any): String *: String *: EmptyTuple = ("a", "b") + +class Test: + def test = + val Foo(x, y, z) = 1 // error: Wrong number of argument patterns for Foo; expected: (String, String) + x + y + z diff --git a/tests/neg/i16035.scala b/tests/neg/i16035.scala new file mode 100644 index 000000000000..6cf53df7abca --- /dev/null +++ b/tests/neg/i16035.scala @@ -0,0 +1,12 @@ +object Scope: + opaque type Uses[A, B] = A ?=> B // error + opaque type UsesAlt = [A, B] =>> A ?=> B // error + + object Uses: + def apply[A, B](fn: A ?=> B): Uses[A, B] = fn + +import Scope.* +val uses = + given Int = 1 + Uses[Int, String](i ?=> s"*$i*") + diff --git a/tests/neg/i16035a.scala b/tests/neg/i16035a.scala new file mode 100644 index 000000000000..6d003dadcbe9 --- /dev/null +++ b/tests/neg/i16035a.scala @@ -0,0 +1,14 @@ +trait S: + type Uses[A, B] <: A ?=> B + object Uses: + def apply[A, B](fn: A ?=> B): Uses[A, B] = fn // error + val uses1 = + given Int = 1 + Uses[Int, String](i ?=> s"*$i*") + +object I extends S: + type Uses[A, B] = A ?=> B + val uses2 = + given Int = 1 + Uses[Int, String](i ?=> s"*$i*") + diff --git a/tests/neg/i16092-members-only.scala b/tests/neg/i16092-members-only.scala new file mode 100644 index 000000000000..d0161931628a --- /dev/null +++ b/tests/neg/i16092-members-only.scala @@ -0,0 +1,31 @@ +trait X: + type T + def process(t: T): Unit + +abstract class Z: + def x1: X + val x: X = x1 + def t: x.T + def process(): Unit = x.process(t) + +class Evil extends Z: + def x2: X + override val x: X = x2 + +// alarm bells should be ringing by now + +// taking it to its conclusion... +object X1 extends X: + override type T = Int + override def process(t: T): Unit = println("Int: " + t) + +object X2 extends X: + override type T = String + override def process(t: T): Unit = println("String: " + t) + +@main def Test = + new Evil{ + val x1 = X1 + val x2 = X2 + val t = 42 // error + }.process() // BOOM: basically did x2.process(42) diff --git a/tests/neg/i16095.scala b/tests/neg/i16095.scala new file mode 100644 index 000000000000..1d8085cec53f --- /dev/null +++ b/tests/neg/i16095.scala @@ -0,0 +1,16 @@ +package x + +import scala.concurrent.* + +def cpsAsync[F[_]] = + Test.InfernAsyncArg + +object Test { + class InfernAsyncArg[F[_]] { + def apply[A](): F[A] = ??? + } + object InfernAsyncArg + + def testExample1Future(): Unit = + val fr = cpsAsync[Future]() // error +} \ No newline at end of file diff --git a/tests/neg/i16130.check b/tests/neg/i16130.check new file mode 100644 index 000000000000..7ee7a821a32b --- /dev/null +++ b/tests/neg/i16130.check @@ -0,0 +1,4 @@ +-- [E171] Type Error: tests/neg/i16130.scala:3:16 ---------------------------------------------------------------------- +3 | val foo = new Foo {} // error + | ^^^ + | missing argument for parameter x of constructor Foo in trait Foo: (x: Int): Foo diff --git a/tests/neg/i16130.scala b/tests/neg/i16130.scala new file mode 100644 index 000000000000..ce7ba8e18b3d --- /dev/null +++ b/tests/neg/i16130.scala @@ -0,0 +1,3 @@ +@main def ParameterizedTypeLacksArgumentsID = + trait Foo(x: Int) + val foo = new Foo {} // error diff --git a/tests/neg/i16146.scala b/tests/neg/i16146.scala new file mode 100644 index 000000000000..6aed6b263ce4 --- /dev/null +++ b/tests/neg/i16146.scala @@ -0,0 +1,3 @@ + +type N = [X] => (X => X) => X => X +val exp = (a: N) => (b: N) => b(a) // error diff --git a/tests/neg/i4373b.scala b/tests/neg/i4373b.scala index 45b60a46c721..93d967ef7778 100644 --- a/tests/neg/i4373b.scala +++ b/tests/neg/i4373b.scala @@ -1,5 +1,5 @@ // ==> 05bef7805687ba94da37177f7568e3ba7da1f91c.scala <== class x0 { - x1: - x0 | _ // error -// error \ No newline at end of file + x1: // error + x0 | _ + // error \ No newline at end of file diff --git a/tests/neg/i4812.check b/tests/neg/i4812.check index 6914a2f515de..275cda56defe 100644 --- a/tests/neg/i4812.check +++ b/tests/neg/i4812.check @@ -1,28 +1,28 @@ -- Error: tests/neg/i4812.scala:8:11 ----------------------------------------------------------------------------------- 8 | case prev: A => // error: the type test for A cannot be checked at runtime | ^ - | the type test for A cannot be checked at runtime + | the type test for A cannot be checked at runtime because it's a local class -- Error: tests/neg/i4812.scala:18:11 ---------------------------------------------------------------------------------- 18 | case prev: A => // error: the type test for A cannot be checked at runtime | ^ - | the type test for A cannot be checked at runtime + | the type test for A cannot be checked at runtime because it's a local class -- Error: tests/neg/i4812.scala:28:11 ---------------------------------------------------------------------------------- 28 | case prev: A => // error: the type test for A cannot be checked at runtime | ^ - | the type test for A cannot be checked at runtime + | the type test for A cannot be checked at runtime because it's a local class -- Error: tests/neg/i4812.scala:38:11 ---------------------------------------------------------------------------------- 38 | case prev: A => // error: the type test for A cannot be checked at runtime | ^ - | the type test for A cannot be checked at runtime + | the type test for A cannot be checked at runtime because it's a local class -- Error: tests/neg/i4812.scala:50:13 ---------------------------------------------------------------------------------- 50 | case prev: A => // error: the type test for A cannot be checked at runtime | ^ - | the type test for A cannot be checked at runtime + | the type test for A cannot be checked at runtime because it's a local class -- Error: tests/neg/i4812.scala:60:11 ---------------------------------------------------------------------------------- 60 | case prev: A => // error: the type test for A cannot be checked at runtime | ^ - | the type test for A cannot be checked at runtime + | the type test for A cannot be checked at runtime because it's a local class -- Error: tests/neg/i4812.scala:96:11 ---------------------------------------------------------------------------------- 96 | case x: B => // error: the type test for B cannot be checked at runtime | ^ - | the type test for B cannot be checked at runtime + | the type test for B cannot be checked at runtime because it's a local class diff --git a/tests/neg/i4986c.check b/tests/neg/i4986c.check index a5fe0cee26bf..30f746e5af7f 100644 --- a/tests/neg/i4986c.check +++ b/tests/neg/i4986c.check @@ -25,11 +25,11 @@ -- Error: tests/neg/i4986c.scala:45:87 --------------------------------------------------------------------------------- 45 | implicitly[Outer[Option[String] | List[Iterable[Char]]] { type MyType = BigDecimal }] // error | ^ - |Missing Outer[Option[String] | List[Iterable[Char]]] with OuterMember = pkg.Outer[Option[String] | List[Iterable[Char]]]{MyType = BigDecimal}#OuterMember + |Missing Outer[Option[String] | List[Iterable[Char]]] with OuterMember = pkg.Outer[Option[String] | List[Iterable[Char]]]{type MyType = BigDecimal}#OuterMember -- Error: tests/neg/i4986c.scala:46:106 -------------------------------------------------------------------------------- 46 | implicitly[(Outer[Option[String] | List[Iterable[Char]]] { type MyType = BigDecimal })#Inner[Byte, Seq]] // error | ^ - |Missing Inner[Byte, Seq] with InnerMember = pkg.Outer[Option[String] | List[Iterable[Char]]]{MyType = BigDecimal}#Inner[Byte, Seq]#InnerMember from Outer[Option[String] | List[Iterable[Char]]] with OuterMember = pkg.Outer[Option[String] | List[Iterable[Char]]]{MyType = BigDecimal}#OuterMember + |Missing Inner[Byte, Seq] with InnerMember = pkg.Outer[Option[String] | List[Iterable[Char]]]{type MyType = BigDecimal}#Inner[Byte, Seq]#InnerMember from Outer[Option[String] | List[Iterable[Char]]] with OuterMember = pkg.Outer[Option[String] | List[Iterable[Char]]]{type MyType = BigDecimal}#OuterMember -- Error: tests/neg/i4986c.scala:47:33 --------------------------------------------------------------------------------- 47 | implicitly[Outer[Int] @myAnnot] // error | ^ diff --git a/tests/neg/i5525.scala b/tests/neg/i5525.scala index ceec2c90173f..12ffb4704ba9 100644 --- a/tests/neg/i5525.scala +++ b/tests/neg/i5525.scala @@ -29,6 +29,6 @@ enum Foo11 { protected case C9 // ok } -enum Foo12 { +enum Foo12 { // error: Enumerations must contain at least one case inline case C10() // error: only access modifiers allowed } \ No newline at end of file diff --git a/tests/pending/neg/i5690.scala b/tests/neg/i5690.scala similarity index 100% rename from tests/pending/neg/i5690.scala rename to tests/neg/i5690.scala diff --git a/tests/neg/i6183.check b/tests/neg/i6183.check index 70c1afaae621..6c7e96f1088a 100644 --- a/tests/neg/i6183.check +++ b/tests/neg/i6183.check @@ -4,7 +4,9 @@ | value render is not a member of Int. | An extension method was tried, but could not be fully constructed: | - | render(42) failed with + | render(42) + | + | failed with: | | Ambiguous overload. The overloaded alternatives of method render in object Test with types | [B](b: B)(using x$2: DummyImplicit): Char diff --git a/tests/neg/i6778.check b/tests/neg/i6778.check index bec4f57b5ee7..2287811ee361 100644 --- a/tests/neg/i6778.check +++ b/tests/neg/i6778.check @@ -1,6 +1,10 @@ -- [E104] Syntax Error: tests/neg/i6778.scala:3:27 --------------------------------------------------------------------- -3 |class Bar extends Foo with A(10) // error +3 |class Bar extends Foo with A(10) // error // error | ^^^^^ | class A is not a trait | | longer explanation available when compiling with `-explain` +-- [E171] Type Error: tests/neg/i6778.scala:3:6 ------------------------------------------------------------------------ +3 |class Bar extends Foo with A(10) // error // error + | ^ + | missing argument for parameter x of constructor A in class A: (x: Int): A diff --git a/tests/neg/i6778.scala b/tests/neg/i6778.scala index e3cb8dd6507e..0fde37c91129 100644 --- a/tests/neg/i6778.scala +++ b/tests/neg/i6778.scala @@ -1,3 +1,3 @@ trait Foo class A(x: Int) -class Bar extends Foo with A(10) // error +class Bar extends Foo with A(10) // error // error diff --git a/tests/neg/i6779.check b/tests/neg/i6779.check index d895203221ec..8e05c22eb640 100644 --- a/tests/neg/i6779.check +++ b/tests/neg/i6779.check @@ -11,7 +11,9 @@ | value f is not a member of T. | An extension method was tried, but could not be fully constructed: | - | Test.f[G[T]](x)(given_Stuff) failed with + | Test.f[G[T]](x)(given_Stuff) + | + | failed with: | | Found: (x : T) | Required: G[T] diff --git a/tests/neg/i7613.check b/tests/neg/i7613.check index d0d4de1aeda1..85d73b5c88f3 100644 --- a/tests/neg/i7613.check +++ b/tests/neg/i7613.check @@ -1,8 +1,4 @@ -- Error: tests/neg/i7613.scala:10:16 ---------------------------------------------------------------------------------- -10 | new BazLaws[A] {} // error // error +10 | new BazLaws[A] {} // error | ^ | No given instance of type Baz[A] was found for parameter x$1 of constructor BazLaws in trait BazLaws --- Error: tests/neg/i7613.scala:10:2 ----------------------------------------------------------------------------------- -10 | new BazLaws[A] {} // error // error - | ^ - | No given instance of type Bar[A] was found for parameter x$1 of constructor BarLaws in trait BarLaws diff --git a/tests/neg/i7613.scala b/tests/neg/i7613.scala index f50700d94219..3d5fb58db085 100644 --- a/tests/neg/i7613.scala +++ b/tests/neg/i7613.scala @@ -7,5 +7,4 @@ trait BarLaws[A](using Bar[A]) extends FooLaws[A] trait BazLaws[A](using Baz[A]) extends BarLaws[A] def instance[A](using Foo[A]): BazLaws[A] = - new BazLaws[A] {} // error // error - + new BazLaws[A] {} // error diff --git a/tests/neg/i7751.scala b/tests/neg/i7751.scala index 4c835a533704..978ed860574f 100644 --- a/tests/neg/i7751.scala +++ b/tests/neg/i7751.scala @@ -1,3 +1,3 @@ -import language.experimental.fewerBraces +import language.`3.3` val a = Some(a=a,)=> // error // error val a = Some(x=y,)=> diff --git a/tests/neg/i8984.check b/tests/neg/i8984.check new file mode 100644 index 000000000000..b66cee1459d6 --- /dev/null +++ b/tests/neg/i8984.check @@ -0,0 +1,9 @@ +-- [E007] Type Mismatch Error: tests/neg/i8984.scala:36:52 ------------------------------------------------------------- +36 | case Cat(_, "red", rest) => dropRedCats(rest) // error + | ^^^^ + | Found: (rest : Any) + | Required: Fix.T[F] + | + | where: F is a type in method dropRedCats with bounds >: Cat and <: [a] =>> Any + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i8984.scala b/tests/neg/i8984.scala new file mode 100644 index 000000000000..ff798d803ba3 --- /dev/null +++ b/tests/neg/i8984.scala @@ -0,0 +1,61 @@ +import scala.annotation.tailrec +type |@[F[+_], G[+_]] = [a] =>> F[a] | G[a] + +object Fix: + opaque type T[+F[+_]] = ApplyFix.T[F] + + def apply[F[+_]](f: F[Fix[F]]): T[F] = ApplyFix(f) + + extension [F[+_]](fix: T[F]) + def value: F[Fix[F]] = ApplyFix.unwrap(fix) + + object ApplyFix: + opaque type T[+F[+_]] = F[Fix[F]] + + def apply[F[+_]](f: F[Fix[F]]): T[F] = f + + def unwrap[F[+_]](v: T[F]): F[Fix[F]] = v + +type Fix[+F[+_]] = Fix.T[F] + +final case class Cat[+R](name: String, fur: String, rest: R) +object Cat: + def of[R, F[+_]](name: String, fur: String, rest: Fix[F]): Fix[F |@ Cat] = Fix(new Cat(name, fur, rest)) + +final case class Dog[+R](name: String, size: Long, rest: R) +object Dog: + def of[R, F[+_]](name: String, size: Long, rest: Fix[F]): Fix[F |@ Dog] = Fix(new Dog(name, size, rest)) + +case object End: + type f[+a] = End.type + def apply() = Fix[f](End) + +object DropRed: + @tailrec def dropRedCats[F[+a] >: Cat[a]](cats: Fix[F]): Fix[F] = + cats.value match + case Cat(_, "red", rest) => dropRedCats(rest) // error + case _ => cats + + type CatDogVector = Vector[Either[Cat[Unit], Dog[Unit]]] + type CatOrDogs[+a] = Cat[a] | Dog[a] | End.type + + extension (catDogs: Fix[CatOrDogs]) def toVector : CatDogVector = + @tailrec def go(acc: CatDogVector, catDogs: Fix[CatOrDogs]) : CatDogVector = catDogs.value match + case Cat(name, fur, rest) => go(acc :+ Left(Cat(name, fur, ())), rest) + case Dog(name, size, rest) => go(acc :+ Right(Dog(name, size, ())), rest) + case End => acc + + go(Vector(), catDogs) + + val x = + Cat.of("lilly" , "red" , + Cat.of("anya" , "red" , + Cat.of("boris" , "black", + Dog.of("mashka", 3 , + Cat.of("manya" , "red" , + End()))))) + + + def main(args: Array[String]) = + println(x.toVector) + println(dropRedCats(x).toVector) diff --git a/tests/neg/i9185.check b/tests/neg/i9185.check index ffeed7e2fb2d..22751a3095ae 100644 --- a/tests/neg/i9185.check +++ b/tests/neg/i9185.check @@ -5,8 +5,9 @@ |An extension method was tried, but could not be fully constructed: | | M.pure[A, F]("ola")( - | /* ambiguous: both object listMonad in object M and object optionMonad in object M match type M[F] */summon[M[F]] - | ) failed with + | /* ambiguous: both object listMonad in object M and object optionMonad in object M match type M[F] */summon[M[F]]) + | + | failed with: | | Ambiguous given instances: both object listMonad in object M and object optionMonad in object M match type M[F] of parameter m of method pure in object M -- Error: tests/neg/i9185.scala:8:28 ----------------------------------------------------------------------------------- @@ -19,7 +20,9 @@ | value len is not a member of String. | An extension method was tried, but could not be fully constructed: | - | M.len("abc") failed with + | M.len("abc") + | + | failed with: | | Found: ("abc" : String) | Required: Int diff --git a/tests/neg/i9436.check b/tests/neg/i9436.check index b5f9b5c470b1..d8e87fb54930 100644 --- a/tests/neg/i9436.check +++ b/tests/neg/i9436.check @@ -4,7 +4,7 @@ | method f1 must be called with () argument | | longer explanation available when compiling with `-explain` --- Error: tests/neg/i9436.scala:9:14 ----------------------------------------------------------------------------------- +-- [E171] Type Error: tests/neg/i9436.scala:9:14 ----------------------------------------------------------------------- 9 | println(x.f2(1)) // error | ^^^^^^^ | missing argument for parameter y of method f2: (x: Int, y: Int): Int diff --git a/tests/neg/i9460.scala b/tests/neg/i9460.scala index 9cc08bf2ad4d..2290b07a9759 100644 --- a/tests/neg/i9460.scala +++ b/tests/neg/i9460.scala @@ -1,4 +1,4 @@ -trait A(val s: String) { println(s) } -trait B extends A { override val s = "B" } // requires override val s +trait A(s: String) { println(s) } +trait B extends A { val s = "B" } class C extends B // error @main def Test = C() diff --git a/tests/neg/indent-colons.check b/tests/neg/indent-colons.check index 06bd7a31b079..102d41592014 100644 --- a/tests/neg/indent-colons.check +++ b/tests/neg/indent-colons.check @@ -1,29 +1,29 @@ --- Error: tests/neg/indent-colons.scala:6:4 ---------------------------------------------------------------------------- -6 | : // error +-- Error: tests/neg/indent-colons.scala:7:4 ---------------------------------------------------------------------------- +7 | : // error | ^ | end of statement expected but ':' found --- Error: tests/neg/indent-colons.scala:12:2 --------------------------------------------------------------------------- -12 | : // error +-- Error: tests/neg/indent-colons.scala:13:2 --------------------------------------------------------------------------- +13 | : // error | ^ | end of statement expected but ':' found --- Error: tests/neg/indent-colons.scala:19:2 --------------------------------------------------------------------------- -19 | : // error +-- Error: tests/neg/indent-colons.scala:20:2 --------------------------------------------------------------------------- +20 | : // error | ^ | end of statement expected but ':' found --- [E018] Syntax Error: tests/neg/indent-colons.scala:26:14 ------------------------------------------------------------ -26 | val y = 1 + : // error +-- [E018] Syntax Error: tests/neg/indent-colons.scala:27:14 ------------------------------------------------------------ +27 | val y = 1 + : // error | ^ | expression expected but : found | | longer explanation available when compiling with `-explain` --- [E018] Syntax Error: tests/neg/indent-colons.scala:30:27 ------------------------------------------------------------ -30 | val all = credentials ++ : // error +-- [E018] Syntax Error: tests/neg/indent-colons.scala:31:27 ------------------------------------------------------------ +31 | val all = credentials ++ : // error | ^ | expression expected but : found | | longer explanation available when compiling with `-explain` --- [E134] Type Error: tests/neg/indent-colons.scala:23:12 -------------------------------------------------------------- -23 | val x = 1.+ : // error +-- [E134] Type Error: tests/neg/indent-colons.scala:24:12 -------------------------------------------------------------- +24 | val x = 1.+ : // error | ^^^ | None of the overloaded alternatives of method + in class Int with types | (x: Double): Double @@ -35,27 +35,27 @@ | (x: Byte): Int | (x: String): String | match expected type (2 : Int) --- [E006] Not Found Error: tests/neg/indent-colons.scala:32:7 ---------------------------------------------------------- -32 | if file.isEmpty // error +-- [E006] Not Found Error: tests/neg/indent-colons.scala:33:7 ---------------------------------------------------------- +33 | if file.isEmpty // error | ^^^^ | Not found: file | | longer explanation available when compiling with `-explain` --- [E006] Not Found Error: tests/neg/indent-colons.scala:34:13 --------------------------------------------------------- -34 | else Seq(file) // error +-- [E006] Not Found Error: tests/neg/indent-colons.scala:35:13 --------------------------------------------------------- +35 | else Seq(file) // error | ^^^^ | Not found: file | | longer explanation available when compiling with `-explain` --- Error: tests/neg/indent-colons.scala:4:2 ---------------------------------------------------------------------------- -4 | tryEither: // error +-- Error: tests/neg/indent-colons.scala:5:2 ---------------------------------------------------------------------------- +5 | tryEither: // error | ^^^^^^^^^ | missing arguments for method tryEither --- Error: tests/neg/indent-colons.scala:10:2 --------------------------------------------------------------------------- -10 | tryEither: // error +-- Error: tests/neg/indent-colons.scala:11:2 --------------------------------------------------------------------------- +11 | tryEither: // error | ^^^^^^^^^ | missing arguments for method tryEither --- Error: tests/neg/indent-colons.scala:17:2 --------------------------------------------------------------------------- -17 | Some(3).fold: // error +-- Error: tests/neg/indent-colons.scala:18:2 --------------------------------------------------------------------------- +18 | Some(3).fold: // error | ^^^^^^^^^^^^ | missing arguments for method fold in class Option diff --git a/tests/neg/indent-colons.scala b/tests/neg/indent-colons.scala index 5364713dd4aa..240012f5489b 100644 --- a/tests/neg/indent-colons.scala +++ b/tests/neg/indent-colons.scala @@ -1,3 +1,4 @@ +import language.`3.2` def tryEither[T](x: T)(y: Int => T): T = ??? def test1 = diff --git a/tests/neg/indent-experimental.scala b/tests/neg/indent-experimental.scala index e945e172d1de..34ea5633010c 100644 --- a/tests/neg/indent-experimental.scala +++ b/tests/neg/indent-experimental.scala @@ -1,4 +1,4 @@ -import language.experimental.fewerBraces +import language.`3.3` val x = if true then: // error diff --git a/tests/neg/java-ann-extends-separate/Ann_1.java b/tests/neg/java-ann-extends-separate/Ann_1.java new file mode 100644 index 000000000000..97184df24c83 --- /dev/null +++ b/tests/neg/java-ann-extends-separate/Ann_1.java @@ -0,0 +1,3 @@ +public @interface Ann_1 { + int value(); +} diff --git a/tests/neg/java-ann-extends-separate/Test_2.scala b/tests/neg/java-ann-extends-separate/Test_2.scala new file mode 100644 index 000000000000..4e73b71679f6 --- /dev/null +++ b/tests/neg/java-ann-extends-separate/Test_2.scala @@ -0,0 +1,2 @@ +def test(x: Ann_1) = + val y: scala.annotation.Annotation = x // error diff --git a/tests/neg/java-ann-extends/Ann.java b/tests/neg/java-ann-extends/Ann.java new file mode 100644 index 000000000000..9ae845a8af63 --- /dev/null +++ b/tests/neg/java-ann-extends/Ann.java @@ -0,0 +1,3 @@ +public @interface Ann { + int value(); +} diff --git a/tests/neg/java-ann-extends/Test.scala b/tests/neg/java-ann-extends/Test.scala new file mode 100644 index 000000000000..629f1daa9acc --- /dev/null +++ b/tests/neg/java-ann-extends/Test.scala @@ -0,0 +1,2 @@ +def test(x: Ann) = + val y: scala.annotation.Annotation = x // error diff --git a/tests/neg/java-ann-super-class/Ann.java b/tests/neg/java-ann-super-class/Ann.java new file mode 100644 index 000000000000..9ae845a8af63 --- /dev/null +++ b/tests/neg/java-ann-super-class/Ann.java @@ -0,0 +1,3 @@ +public @interface Ann { + int value(); +} diff --git a/tests/neg/java-ann-super-class/Test.scala b/tests/neg/java-ann-super-class/Test.scala new file mode 100644 index 000000000000..cf2f72d2f633 --- /dev/null +++ b/tests/neg/java-ann-super-class/Test.scala @@ -0,0 +1,9 @@ +class Bar extends Ann(1) { // error + def value = 1 + def annotationType = classOf[Ann] +} + +def test = + // Typer errors + new Ann // error + new Ann(1) {} // error diff --git a/tests/neg/java-ann-super-class2/Ann.java b/tests/neg/java-ann-super-class2/Ann.java new file mode 100644 index 000000000000..9ae845a8af63 --- /dev/null +++ b/tests/neg/java-ann-super-class2/Ann.java @@ -0,0 +1,3 @@ +public @interface Ann { + int value(); +} diff --git a/tests/neg/java-ann-super-class2/Test.scala b/tests/neg/java-ann-super-class2/Test.scala new file mode 100644 index 000000000000..d5c22860899c --- /dev/null +++ b/tests/neg/java-ann-super-class2/Test.scala @@ -0,0 +1,3 @@ +def test = + // Posttyper errors + new Ann(1) // error diff --git a/tests/neg/java-ann-super-class3/Ann.java b/tests/neg/java-ann-super-class3/Ann.java new file mode 100644 index 000000000000..9ae845a8af63 --- /dev/null +++ b/tests/neg/java-ann-super-class3/Ann.java @@ -0,0 +1,3 @@ +public @interface Ann { + int value(); +} diff --git a/tests/neg/java-ann-super-class3/Test.scala b/tests/neg/java-ann-super-class3/Test.scala new file mode 100644 index 000000000000..8fd9791e6fe3 --- /dev/null +++ b/tests/neg/java-ann-super-class3/Test.scala @@ -0,0 +1,3 @@ +def test = + // Refchecks error + new Ann {} // error diff --git a/tests/neg/java-fake-ann-separate/FakeAnn_1.java b/tests/neg/java-fake-ann-separate/FakeAnn_1.java new file mode 100644 index 000000000000..597ea980585d --- /dev/null +++ b/tests/neg/java-fake-ann-separate/FakeAnn_1.java @@ -0,0 +1 @@ +interface FakeAnn_1 extends java.lang.annotation.Annotation { } diff --git a/tests/neg/java-fake-ann-separate/Test_2.scala b/tests/neg/java-fake-ann-separate/Test_2.scala new file mode 100644 index 000000000000..becc8babdaa0 --- /dev/null +++ b/tests/neg/java-fake-ann-separate/Test_2.scala @@ -0,0 +1,3 @@ +@FakeAnn_1 def test = // error + (1: @FakeAnn_1) // error + diff --git a/tests/neg/java-fake-ann/FakeAnn.java b/tests/neg/java-fake-ann/FakeAnn.java new file mode 100644 index 000000000000..2b055f782d42 --- /dev/null +++ b/tests/neg/java-fake-ann/FakeAnn.java @@ -0,0 +1 @@ +interface FakeAnn extends java.lang.annotation.Annotation { } diff --git a/tests/neg/java-fake-ann/Test.scala b/tests/neg/java-fake-ann/Test.scala new file mode 100644 index 000000000000..827527cb80bf --- /dev/null +++ b/tests/neg/java-fake-ann/Test.scala @@ -0,0 +1,2 @@ +@FakeAnn def test = // error + (1: @FakeAnn) // error diff --git a/tests/neg/language-imports.scala b/tests/neg/language-imports.scala new file mode 100644 index 000000000000..82c54cd4bf57 --- /dev/null +++ b/tests/neg/language-imports.scala @@ -0,0 +1,6 @@ +def test = + import language.experimental.captureChecking // error + import language.experimental.pureFunctions // error + 1 + + diff --git a/tests/neg/mirror-synthesis-errors-b.check b/tests/neg/mirror-synthesis-errors-b.check index ea41d14da296..bd846a59f295 100644 --- a/tests/neg/mirror-synthesis-errors-b.check +++ b/tests/neg/mirror-synthesis-errors-b.check @@ -9,13 +9,13 @@ -- Error: tests/neg/mirror-synthesis-errors-b.scala:23:49 -------------------------------------------------------------- 23 |val testC = summon[Mirror.Of[Cns[Int] & Sm[Int]]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.Of[Cns[Int] & Sm[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Cns[Int] & Sm[Int]]: + |No given instance of type deriving.Mirror.Of[Cns[Int] & Sm[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Cns[Int] & Sm[Int]]: | * type `Cns[Int] & Sm[Int]` is not a generic product because its subpart `Cns[Int] & Sm[Int]` is an intersection of unrelated definitions class Cns and class Sm. | * type `Cns[Int] & Sm[Int]` is not a generic sum because its subpart `Cns[Int] & Sm[Int]` is an intersection of unrelated definitions class Cns and class Sm. -- Error: tests/neg/mirror-synthesis-errors-b.scala:24:49 -------------------------------------------------------------- 24 |val testD = summon[Mirror.Of[Sm[Int] & Cns[Int]]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.Of[Sm[Int] & Cns[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Sm[Int] & Cns[Int]]: + |No given instance of type deriving.Mirror.Of[Sm[Int] & Cns[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Sm[Int] & Cns[Int]]: | * type `Sm[Int] & Cns[Int]` is not a generic product because its subpart `Sm[Int] & Cns[Int]` is an intersection of unrelated definitions class Sm and class Cns. | * type `Sm[Int] & Cns[Int]` is not a generic sum because its subpart `Sm[Int] & Cns[Int]` is an intersection of unrelated definitions class Sm and class Cns. -- Error: tests/neg/mirror-synthesis-errors-b.scala:25:55 -------------------------------------------------------------- @@ -29,12 +29,12 @@ -- Error: tests/neg/mirror-synthesis-errors-b.scala:27:54 -------------------------------------------------------------- 27 |val testG = summon[Mirror.Of[Foo.A.type & Foo.B.type]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.Of[(Foo.A : Foo) & (Foo.B : Foo)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Foo.A : Foo) & (Foo.B : Foo)]: + |No given instance of type deriving.Mirror.Of[(Foo.A : Foo) & (Foo.B : Foo)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Foo.A : Foo) & (Foo.B : Foo)]: | * type `(Foo.A : Foo) & (Foo.B : Foo)` is not a generic product because its subpart `(Foo.A : Foo) & (Foo.B : Foo)` is an intersection of unrelated definitions value A and value B. | * type `(Foo.A : Foo) & (Foo.B : Foo)` is not a generic sum because its subpart `(Foo.A : Foo) & (Foo.B : Foo)` is an intersection of unrelated definitions value A and value B. -- Error: tests/neg/mirror-synthesis-errors-b.scala:28:54 -------------------------------------------------------------- 28 |val testH = summon[Mirror.Of[Foo.B.type & Foo.A.type]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.Of[(Foo.B : Foo) & (Foo.A : Foo)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Foo.B : Foo) & (Foo.A : Foo)]: + |No given instance of type deriving.Mirror.Of[(Foo.B : Foo) & (Foo.A : Foo)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Foo.B : Foo) & (Foo.A : Foo)]: | * type `(Foo.B : Foo) & (Foo.A : Foo)` is not a generic product because its subpart `(Foo.B : Foo) & (Foo.A : Foo)` is an intersection of unrelated definitions value B and value A. | * type `(Foo.B : Foo) & (Foo.A : Foo)` is not a generic sum because its subpart `(Foo.B : Foo) & (Foo.A : Foo)` is an intersection of unrelated definitions value B and value A. diff --git a/tests/neg/mirror-synthesis-errors.check b/tests/neg/mirror-synthesis-errors.check index d108c99280ae..cde026e38910 100644 --- a/tests/neg/mirror-synthesis-errors.check +++ b/tests/neg/mirror-synthesis-errors.check @@ -1,42 +1,42 @@ -- Error: tests/neg/mirror-synthesis-errors.scala:21:32 ---------------------------------------------------------------- 21 |val testA = summon[Mirror.Of[A]] // error: Not a sealed trait | ^ - |No given instance of type deriving.Mirror.Of[A] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[A]: + |No given instance of type deriving.Mirror.Of[A] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[A]: | * trait A is not a generic product because it is not a case class | * trait A is not a generic sum because it is not a sealed trait -- Error: tests/neg/mirror-synthesis-errors.scala:22:32 ---------------------------------------------------------------- 22 |val testC = summon[Mirror.Of[C]] // error: Does not have subclasses | ^ - |No given instance of type deriving.Mirror.Of[C] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[C]: + |No given instance of type deriving.Mirror.Of[C] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[C]: | * trait C is not a generic product because it is not a case class | * trait C is not a generic sum because it does not have subclasses -- Error: tests/neg/mirror-synthesis-errors.scala:23:32 ---------------------------------------------------------------- 23 |val testD = summon[Mirror.Of[D]] // error: child SubD takes more than one parameter list | ^ - |No given instance of type deriving.Mirror.Of[D] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[D]: + |No given instance of type deriving.Mirror.Of[D] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[D]: | * class D is not a generic product because it is not a case class | * class D is not a generic sum because its child class SubD is not a generic product because it takes more than one parameter list -- Error: tests/neg/mirror-synthesis-errors.scala:24:38 ---------------------------------------------------------------- 24 |val testSubD = summon[Mirror.Of[SubD]] // error: takes more than one parameter list | ^ - |No given instance of type deriving.Mirror.Of[SubD] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[SubD]: + |No given instance of type deriving.Mirror.Of[SubD] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[SubD]: | * class SubD is not a generic product because it takes more than one parameter list | * class SubD is not a generic sum because it is not a sealed class -- Error: tests/neg/mirror-synthesis-errors.scala:25:32 ---------------------------------------------------------------- 25 |val testE = summon[Mirror.Of[E]] // error: Not an abstract class | ^ - |No given instance of type deriving.Mirror.Of[E] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[E]: + |No given instance of type deriving.Mirror.Of[E] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[E]: | * class E is not a generic product because it is not a case class | * class E is not a generic sum because it is not an abstract class -- Error: tests/neg/mirror-synthesis-errors.scala:26:32 ---------------------------------------------------------------- 26 |val testF = summon[Mirror.Of[F]] // error: No children | ^ - |No given instance of type deriving.Mirror.Of[F] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[F]: + |No given instance of type deriving.Mirror.Of[F] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[F]: | * trait F is not a generic product because it is not a case class | * trait F is not a generic sum because it does not have subclasses -- Error: tests/neg/mirror-synthesis-errors.scala:27:36 ---------------------------------------------------------------- 27 |val testG = summon[Mirror.Of[Foo.G]] // error: Has anonymous subclasses | ^ - |No given instance of type deriving.Mirror.Of[Foo.G] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Foo.G]: + |No given instance of type deriving.Mirror.Of[Foo.G] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Foo.G]: | * trait G is not a generic product because it is not a case class | * trait G is not a generic sum because it has anonymous or inaccessible subclasses diff --git a/tests/neg/repeatable/Test_1.scala b/tests/neg/repeatable/Test_1.scala index 3779b6ffa4a8..6466da95dfa8 100644 --- a/tests/neg/repeatable/Test_1.scala +++ b/tests/neg/repeatable/Test_1.scala @@ -6,11 +6,11 @@ import repeatable._ @FirstLevel_0(Array()) // error trait U -@FirstLevel_0(Array(Plain_0(4), Plain_0(5))) -@FirstLevel_0(Array(Plain_0(6), Plain_0(7))) +@FirstLevel_0(Array(new Plain_0(4), new Plain_0(5))) +@FirstLevel_0(Array(new Plain_0(6), new Plain_0(7))) @SecondLevel_0(Array()) // error trait T @SecondLevel_0(Array()) @SecondLevel_0(Array()) // error -trait S \ No newline at end of file +trait S diff --git a/tests/neg/supertraits.scala b/tests/neg/supertraits.scala index 2fc79ca30f1d..6952c7640529 100644 --- a/tests/neg/supertraits.scala +++ b/tests/neg/supertraits.scala @@ -6,19 +6,20 @@ class C extends A, S val x = if ??? then B() else C() val x1: S = x // error -case object a -case object b +class Top +case object a extends Top +case object b extends Top val y = if ??? then a else b val y1: Product = y // error val y2: Serializable = y // error -enum Color { +enum Color extends Top { case Red, Green, Blue } -enum Nucleobase { +enum Nucleobase extends Top { case A, C, G, T } val z = if ??? then Color.Red else Nucleobase.G -val z1: reflect.Enum = z // error: Found: (z : Object) Required: reflect.Enum +val z1: reflect.Enum = z // error: Found: (z : Top) Required: reflect.Enum diff --git a/tests/neg-with-compiler/t12290/SCALA_ONLY_Invalid1.java b/tests/neg/t12290/SCALA_ONLY_Invalid1.java similarity index 65% rename from tests/neg-with-compiler/t12290/SCALA_ONLY_Invalid1.java rename to tests/neg/t12290/SCALA_ONLY_Invalid1.java index fe206e0865ad..4b9ea5021bee 100644 --- a/tests/neg-with-compiler/t12290/SCALA_ONLY_Invalid1.java +++ b/tests/neg/t12290/SCALA_ONLY_Invalid1.java @@ -1,3 +1,4 @@ +// note that scala's java parser doesn't care about the platform version class SCALA_ONLY_Invalid1 { public static final String badOpeningDelimiter = """non-whitespace // error diff --git a/tests/neg-with-compiler/t12290/SCALA_ONLY_Invalid2.java b/tests/neg/t12290/SCALA_ONLY_Invalid2.java similarity index 100% rename from tests/neg-with-compiler/t12290/SCALA_ONLY_Invalid2.java rename to tests/neg/t12290/SCALA_ONLY_Invalid2.java diff --git a/tests/neg/t5702-neg-bad-and-wild.check b/tests/neg/t5702-neg-bad-and-wild.check index f6d761a6726f..731195411069 100644 --- a/tests/neg/t5702-neg-bad-and-wild.check +++ b/tests/neg/t5702-neg-bad-and-wild.check @@ -10,10 +10,10 @@ | pattern expected | | longer explanation available when compiling with `-explain` --- [E040] Syntax Error: tests/neg/t5702-neg-bad-and-wild.scala:13:23 --------------------------------------------------- +-- [E040] Syntax Error: tests/neg/t5702-neg-bad-and-wild.scala:13:22 --------------------------------------------------- 13 | case List(1, _*3:) => // error // error - | ^ - | an identifier expected, but ')' found + | ^ + | ')' expected, but ':' found -- [E032] Syntax Error: tests/neg/t5702-neg-bad-and-wild.scala:15:18 --------------------------------------------------- 15 | case List(x*, 1) => // error: pattern expected | ^ diff --git a/tests/neg/transparent.scala b/tests/neg/transparent.scala index b4d89478b0ac..95899bfa0b33 100644 --- a/tests/neg/transparent.scala +++ b/tests/neg/transparent.scala @@ -1,7 +1,8 @@ transparent def foo = 1 // error transparent inline def bar = 2 // ok transparent inline val x = 2 // error -transparent class c // error +transparent class c // ok +transparent final class d // error transparent object y // error transparent trait t // ok transparent type T = c // error diff --git a/tests/neg/union.scala b/tests/neg/union.scala index 0a702ab70058..c6fd42e6629e 100644 --- a/tests/neg/union.scala +++ b/tests/neg/union.scala @@ -11,8 +11,9 @@ object Test { } object O { - class A - class B + class Top + class A extends Top + class B extends Top def f[T](x: T, y: T): T = x val x: A = f(new A { }, new A) diff --git a/tests/new/test.scala b/tests/new/test.scala index ac8f67d1530f..8aa8f42ac787 100644 --- a/tests/new/test.scala +++ b/tests/new/test.scala @@ -1,3 +1,2 @@ object Test: - - def test = ??? + def test = 0 diff --git a/tests/patmat/aliasing.check b/tests/patmat/aliasing.check index d7c21e8d0605..c367626d6f1e 100644 --- a/tests/patmat/aliasing.check +++ b/tests/patmat/aliasing.check @@ -1,3 +1,3 @@ 14: Pattern Match Exhaustivity: _: Trait & Test.Alias1, _: Clazz & Test.Alias1 19: Pattern Match Exhaustivity: _: Trait & Test.Alias2 -23: Pattern Match Exhaustivity: _: Trait & (Test.Alias2 & OpenTrait2){x: Int} +23: Pattern Match Exhaustivity: _: Trait & (Test.Alias2 & OpenTrait2){val x: Int} diff --git a/tests/patmat/andtype-refinedtype-interaction.check b/tests/patmat/andtype-refinedtype-interaction.check index 9f57c5ba4867..d9512b5cb3e4 100644 --- a/tests/patmat/andtype-refinedtype-interaction.check +++ b/tests/patmat/andtype-refinedtype-interaction.check @@ -1,9 +1,9 @@ -32: Pattern Match Exhaustivity: _: Trait & C1{x: Int} -48: Pattern Match Exhaustivity: _: Trait & (C1 | (C2 | T1)){x: Int} & (C3 | (C4 | T2)){x: Int}, _: Clazz & (C1 | (C2 | T1)){x: Int} & (C3 | (C4 | T2)){x: Int} -54: Pattern Match Exhaustivity: _: Trait & (C1 | (C2 | T1)){x: Int} & C3{x: Int} -59: Pattern Match Exhaustivity: _: Trait & (C1 & C2){x: Int} -65: Pattern Match Exhaustivity: _: Trait & (C1 | C2){x: Int} & (C3 | SubC1){x: Int} -72: Pattern Match Exhaustivity: _: Trait & (T1 & (C1 | SubC2)){x: Int} & (T2 & (C2 | C3 | SubC1)){x: Int} & - SubSubC1{x: Int} -79: Pattern Match Exhaustivity: _: Trait & (T1 & (C1 | SubC2)){x: Int} & (T2 & (C2 | C3 | SubC1)){x: Int} & - SubSubC2{x: Int} +32: Pattern Match Exhaustivity: _: Trait & C1{val x: Int} +48: Pattern Match Exhaustivity: _: Trait & (C1 | (C2 | T1)){val x: Int} & (C3 | (C4 | T2)){val x: Int}, _: Clazz & (C1 | (C2 | T1)){val x: Int} & (C3 | (C4 | T2)){val x: Int} +54: Pattern Match Exhaustivity: _: Trait & (C1 | (C2 | T1)){val x: Int} & C3{val x: Int} +59: Pattern Match Exhaustivity: _: Trait & (C1 & C2){val x: Int} +65: Pattern Match Exhaustivity: _: Trait & (C1 | C2){val x: Int} & (C3 | SubC1){val x: Int} +72: Pattern Match Exhaustivity: _: Trait & (T1 & (C1 | SubC2)){val x: Int} & (T2 & (C2 | C3 | SubC1)){val x: Int} & + SubSubC1{val x: Int} +79: Pattern Match Exhaustivity: _: Trait & (T1 & (C1 | SubC2)){val x: Int} & (T2 & (C2 | C3 | SubC1)){val x: Int} & + SubSubC2{val x: Int} diff --git a/tests/neg-custom-args/allow-deep-subtypes/i15365.scala b/tests/pending/pos/i15365.scala similarity index 100% rename from tests/neg-custom-args/allow-deep-subtypes/i15365.scala rename to tests/pending/pos/i15365.scala diff --git a/tests/pending/pos/i15915.scala b/tests/pending/pos/i15915.scala new file mode 100644 index 000000000000..7c484b242cc7 --- /dev/null +++ b/tests/pending/pos/i15915.scala @@ -0,0 +1,24 @@ +class _Monoid[A] +object _Monoid { + implicit val Monoid: _Monoid[Int] = new _Monoid[Int] +} + +class Lifecycle[A] +object Lifecycle { + + implicit def monoidForLifecycle[Monoid[_], A]( + implicit + monoidType: GetMonoidType[Monoid], + monoidA: Monoid[A] + ): Monoid[Lifecycle[A]] = new _Monoid().asInstanceOf[Monoid[Lifecycle[A]]] + +} + +sealed class GetMonoidType[C[_]] +object GetMonoidType { + implicit val getMonoid: GetMonoidType[_Monoid] = new GetMonoidType[_Monoid] +} + +object App extends App { + println(implicitly[_Monoid[Lifecycle[Int]]]) +} \ No newline at end of file diff --git a/tests/pending/pos/i15926.scala b/tests/pending/pos/i15926.scala new file mode 100644 index 000000000000..44faf17ffd54 --- /dev/null +++ b/tests/pending/pos/i15926.scala @@ -0,0 +1,31 @@ +//@main def main(): Unit = +// println(summon[Sum[Minus[Succ[Zero]], Minus[Succ[Zero]]] =:= Minus[Succ[Succ[Zero]]]]) + +sealed trait IntT +sealed trait NatT extends IntT +final case class Zero() extends NatT +final case class Succ[+N <: NatT](n: N) extends NatT +final case class Minus[+N <: Succ[NatT]](n: N) extends IntT + +type NatSum[X <: NatT, Y <: NatT] <: NatT = Y match + case Zero => X + case Succ[y] => NatSum[Succ[X], y] + +type NatDif[X <: NatT, Y <: NatT] <: IntT = Y match + case Zero => X + case Succ[y] => X match + case Zero => Minus[Y] + case Succ[x] => NatDif[x, y] + +type Sum[X <: IntT, Y <: IntT] <: IntT = Y match + case Zero => X + case Minus[y] => X match + case Minus[x] => Minus[NatSum[x, y]] + case _ => NatDif[X, y] + case _ => X match + case Minus[x] => NatDif[Y, x] + case _ => NatSum[X, Y] + +def test = + val x: Sum[Minus[Succ[Zero]], Minus[Succ[Zero]]] = ??? + val y = x diff --git a/tests/pending/pos/i16268.scala b/tests/pending/pos/i16268.scala new file mode 100644 index 000000000000..6b44e71a2247 --- /dev/null +++ b/tests/pending/pos/i16268.scala @@ -0,0 +1,25 @@ +import language.experimental.captureChecking +class Tree +case class Thicket(trees: List[Tree]) extends Tree + +def test1(segments: List[{*} Tree]) = + val elems = segments flatMap { (t: {*} Tree) => t match // error + case ts: Thicket => ts.trees.tail + case t => Nil + } + elems + +def test2(segments: List[{*} Tree]) = + val f = (t: {*} Tree) => t match + case ts: Thicket => ts.trees.tail + case t => Nil + val elems = segments.flatMap(f) // error + elems + +def test3(c: {*} Any)(segments: List[{c} Tree]) = + val elems = segments flatMap { (t: {c} Tree) => t match + case ts: Thicket => ts.trees.tail + case t => Nil + } + elems + diff --git a/tests/pending/pos/i16311.scala b/tests/pending/pos/i16311.scala new file mode 100644 index 000000000000..d3e110860545 --- /dev/null +++ b/tests/pending/pos/i16311.scala @@ -0,0 +1,17 @@ +trait Tagged[U] +type WithTag[+T, U] = T & Tagged[U] + +trait FromInput[Val] +implicit def coercedScalaInput[T]: FromInput[WithTag[T, Int]] = ??? +implicit def optionInput[T](implicit ev: FromInput[T]): FromInput[Option[T]] = ??? + +trait WithoutInputTypeTags[T] +implicit def coercedOptArgTpe[T]: WithoutInputTypeTags[Option[WithTag[T, Int]]] = ??? + +trait InputType[+T] +class OptionInputType[T](ofType: InputType[T]) extends InputType[Option[T]] + +type Argument[T] +def argument[T](argumentType: InputType[T])(implicit fromInput: FromInput[T], res: WithoutInputTypeTags[T]): Argument[Option[T]] = ??? + +def test = argument(OptionInputType(??? : InputType[WithTag[Boolean, Int]])) diff --git a/tests/pending/run/i15893.scala b/tests/pending/run/i15893.scala new file mode 100644 index 000000000000..dedec2138f2a --- /dev/null +++ b/tests/pending/run/i15893.scala @@ -0,0 +1,65 @@ +sealed trait NatT +case class Zero() extends NatT +case class Succ[+N <: NatT](n: N) extends NatT + +type Mod2[N <: NatT] <: NatT = N match + case Zero => Zero + case Succ[Zero] => Succ[Zero] + case Succ[Succ[predPredN]] => Mod2[predPredN] + + +def mod2(n: NatT): NatT = n match + case Zero() => Zero() + case Succ(Zero()) => Succ(Zero()) + case Succ(Succ(predPredN)) => mod2(predPredN) + +/* +inline def inlineMod2(inline n: NatT): NatT = inline n match + case Zero() => Zero() + case Succ(Zero()) => Succ(Zero()) + case Succ(Succ(predPredN)) => inlineMod2(predPredN) + +transparent inline def transparentInlineMod2(inline n: NatT): NatT = inline n match + case Zero() => Zero() + case Succ(Zero()) => Succ(Zero()) + case Succ(Succ(predPredN)) => transparentInlineMod2(predPredN) +*/ +def dependentlyTypedMod2[N <: NatT](n: N): Mod2[N] = n match // exhaustivity warning; unexpected + case Zero(): Zero => Zero() + case Succ(Zero()): Succ[Zero] => Succ(Zero()) + case Succ(Succ(predPredN)): Succ[Succ[_]] => dependentlyTypedMod2(predPredN) +/* +inline def inlineDependentlyTypedMod2[N <: NatT](inline n: N): Mod2[N] = inline n match + case Zero(): Zero => Zero() + case Succ(Zero()): Succ[Zero] => Succ(Zero()) + case Succ(Succ(predPredN)): Succ[Succ[_]] => inlineDependentlyTypedMod2(predPredN) + +transparent inline def transparentInlineDependentlyTypedMod2[N <: NatT](inline n: N): Mod2[N] = inline n match + case Zero(): Zero => Zero() + case Succ(Zero()): Succ[Zero] => Succ(Zero()) + case Succ(Succ(predPredN)): Succ[Succ[_]] => transparentInlineDependentlyTypedMod2(predPredN) + +def foo(n: NatT): NatT = mod2(n) match + case Succ(Zero()) => Zero() + case _ => n + +inline def inlineFoo(inline n: NatT): NatT = inline inlineMod2(n) match + case Succ(Zero()) => Zero() + case _ => n + +inline def transparentInlineFoo(inline n: NatT): NatT = inline transparentInlineMod2(n) match + case Succ(Zero()) => Zero() + case _ => n +*/ +@main def main(): Unit = +/* + println(mod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected + println(foo(Succ(Succ(Succ(Zero()))))) // prints Zero(), as expected + println(inlineMod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected + println(inlineFoo(Succ(Succ(Succ(Zero()))))) // prints Succ(Succ(Succ(Zero()))); unexpected + println(transparentInlineMod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected + println(transparentInlineFoo(Succ(Succ(Succ(Zero()))))) // prints Zero(), as expected +*/ + println(dependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // runtime error; unexpected +// println(inlineDependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // doesn't compile; unexpected +// println(transparentInlineDependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // doesn't compile; unexpected diff --git a/tests/pos-custom-args/captures/boxmap.scala b/tests/pos-custom-args/captures/boxmap.scala index 5642763b5511..18baabd4e584 100644 --- a/tests/pos-custom-args/captures/boxmap.scala +++ b/tests/pos-custom-args/captures/boxmap.scala @@ -1,5 +1,5 @@ import annotation.retains -type Top = Any @retains(*) +type Top = Any @retains(caps.*) type Box[+T <: Top] = ([K <: Top] -> (T => K) -> K) @@ -9,7 +9,7 @@ def box[T <: Top](x: T): Box[T] = def map[A <: Top, B <: Top](b: Box[A])(f: A => B): Box[B] = b[Box[B]]((x: A) => box(f(x))) -def lazymap[A <: Top, B <: Top](b: Box[A])(f: A => B): (() -> Box[B]) = +def lazymap[A <: Top, B <: Top](b: Box[A])(f: A => B): {f} (() -> Box[B]) = () => b[Box[B]]((x: A) => box(f(x))) def test[A <: Top, B <: Top] = diff --git a/tests/pos-custom-args/captures/byname-varargs.scala b/tests/pos-custom-args/captures/byname-varargs.scala new file mode 100644 index 000000000000..219565a65d1c --- /dev/null +++ b/tests/pos-custom-args/captures/byname-varargs.scala @@ -0,0 +1,9 @@ +def typeMismatch(addenda: => String*) = ??? +class TypeMismatch(addenda: => String*) + +def test = + typeMismatch("foo") + typeMismatch("foo", "bar") + TypeMismatch("foo") + TypeMismatch("foo", "bar") + diff --git a/tests/pos-custom-args/captures/bynamefun.scala b/tests/pos-custom-args/captures/bynamefun.scala new file mode 100644 index 000000000000..86bad201ffc3 --- /dev/null +++ b/tests/pos-custom-args/captures/bynamefun.scala @@ -0,0 +1,11 @@ +object test: + class Plan(elem: Plan) + object SomePlan extends Plan(???) + def f1(expr: (-> Plan) -> Plan): Plan = expr(SomePlan) + f1 { onf => Plan(onf) } + def f2(expr: (=> Plan) -> Plan): Plan = ??? + f2 { onf => Plan(onf) } + def f3(expr: (-> Plan) => Plan): Plan = ??? + f1 { onf => Plan(onf) } + def f4(expr: (=> Plan) => Plan): Plan = ??? + f2 { onf => Plan(onf) } diff --git a/tests/pos-custom-args/captures/caps-universal.scala b/tests/pos-custom-args/captures/caps-universal.scala new file mode 100644 index 000000000000..d84f2b7b2584 --- /dev/null +++ b/tests/pos-custom-args/captures/caps-universal.scala @@ -0,0 +1,7 @@ +import annotation.retains + +val foo: Int => Int = x => x +val bar: (Int -> Int) @retains(caps.*) = foo +val baz: {*} Int -> Int = bar + + diff --git a/tests/pos-custom-args/captures/capt-depfun.scala b/tests/pos-custom-args/captures/capt-depfun.scala index 861f4a0d1c14..0e9786b2ee34 100644 --- a/tests/pos-custom-args/captures/capt-depfun.scala +++ b/tests/pos-custom-args/captures/capt-depfun.scala @@ -1,6 +1,6 @@ import annotation.retains class C -type Cap = C @retains(*) +type Cap = C @retains(caps.*) type T = (x: Cap) -> String @retains(x) @@ -8,7 +8,7 @@ type ID[X] = X val aa: ((x: Cap) -> String @retains(x)) = (x: Cap) => "" -def f(y: Cap, z: Cap): String @retains(*) = +def f(y: Cap, z: Cap): String @retains(caps.*) = val a: ((x: Cap) -> String @retains(x)) = (x: Cap) => "" val b = a(y) val c: String @retains(y) = b diff --git a/tests/pos-custom-args/captures/capt-depfun2.scala b/tests/pos-custom-args/captures/capt-depfun2.scala index 837d143d5141..1c747d5885e6 100644 --- a/tests/pos-custom-args/captures/capt-depfun2.scala +++ b/tests/pos-custom-args/captures/capt-depfun2.scala @@ -1,6 +1,6 @@ import annotation.retains class C -type Cap = C @retains(*) +type Cap = C @retains(caps.*) def f(y: Cap, z: Cap) = def g(): C @retains(y, z) = ??? diff --git a/tests/pos-custom-args/captures/capt-test.scala b/tests/pos-custom-args/captures/capt-test.scala index 6ee0d2a4d9f4..c61577e96eb1 100644 --- a/tests/pos-custom-args/captures/capt-test.scala +++ b/tests/pos-custom-args/captures/capt-test.scala @@ -21,6 +21,9 @@ def map[A, B](f: A => B)(xs: LIST[A]): LIST[B] = class C type Cap = {*} C +class Foo(x: Cap): + this: {x} Foo => + def test(c: Cap, d: Cap) = def f(x: Cap): Unit = if c == x then () def g(x: Cap): Unit = if d == x then () diff --git a/tests/pos-custom-args/captures/capt0.scala b/tests/pos-custom-args/captures/capt0.scala index 2544e8abe5f1..52d6253af46b 100644 --- a/tests/pos-custom-args/captures/capt0.scala +++ b/tests/pos-custom-args/captures/capt0.scala @@ -3,5 +3,5 @@ object Test: def test() = val x: {*} Any = "abc" val y: Object @scala.annotation.retains(x) = ??? - val z: Object @scala.annotation.retains(x, *) = y: Object @annotation.retains(x) + val z: Object @scala.annotation.retains(x, caps.*) = y: Object @annotation.retains(x) diff --git a/tests/pos-custom-args/captures/capt2.scala b/tests/pos-custom-args/captures/capt2.scala index 204310d21ddf..77c0caaf0f1d 100644 --- a/tests/pos-custom-args/captures/capt2.scala +++ b/tests/pos-custom-args/captures/capt2.scala @@ -1,6 +1,6 @@ import annotation.retains class C -type Cap = C @retains(*) +type Cap = C @retains(caps.*) def test1() = val y: {*} String = "" diff --git a/tests/pos-custom-args/captures/cc-expand.scala b/tests/pos-custom-args/captures/cc-expand.scala index eba97f182385..87b2c34caf5f 100644 --- a/tests/pos-custom-args/captures/cc-expand.scala +++ b/tests/pos-custom-args/captures/cc-expand.scala @@ -5,7 +5,7 @@ object Test: class B class C class CTC - type CT = CTC @retains(*) + type CT = CTC @retains(caps.*) def test(ct: CT, dt: CT) = diff --git a/tests/pos-custom-args/captures/cmp-singleton-2.scala b/tests/pos-custom-args/captures/cmp-singleton-2.scala new file mode 100644 index 000000000000..daaa4add3858 --- /dev/null +++ b/tests/pos-custom-args/captures/cmp-singleton-2.scala @@ -0,0 +1,11 @@ +class T +class A extends T +class B extends T + +def test(tp: T) = + val mapping: Map[A, String] = ??? + + tp match + case a: A => mapping(a) match + case s: String => B() + case null => a diff --git a/tests/pos-custom-args/captures/cmp-singleton.scala b/tests/pos-custom-args/captures/cmp-singleton.scala new file mode 100644 index 000000000000..45b4009f5e89 --- /dev/null +++ b/tests/pos-custom-args/captures/cmp-singleton.scala @@ -0,0 +1,10 @@ +class Denotation +abstract class SingleDenotation extends Denotation +def goRefined: Denotation = + val foo: Denotation = ??? + val joint = foo + joint match + case joint: SingleDenotation => + joint + case _ => + joint \ No newline at end of file diff --git a/tests/pos-custom-args/captures/disambiguate-select.scala b/tests/pos-custom-args/captures/disambiguate-select.scala new file mode 100644 index 000000000000..b8da302acfd9 --- /dev/null +++ b/tests/pos-custom-args/captures/disambiguate-select.scala @@ -0,0 +1,8 @@ +import collection.mutable +class Suppression: + def matches(f: SourceFile): Boolean = ??? +class SourceFile +private val mySuppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty + +def test(f: SourceFile) = + mySuppressions.getOrElse(f, Nil).find(_.matches(f)) \ No newline at end of file diff --git a/tests/pos-custom-args/captures/enum-extends.scala b/tests/pos-custom-args/captures/enum-extends.scala new file mode 100644 index 000000000000..0835956ae89a --- /dev/null +++ b/tests/pos-custom-args/captures/enum-extends.scala @@ -0,0 +1,4 @@ +enum E: + + case A extends E + diff --git a/tests/pos-custom-args/captures/foreach.scala b/tests/pos-custom-args/captures/foreach.scala new file mode 100644 index 000000000000..b7dfc49272a9 --- /dev/null +++ b/tests/pos-custom-args/captures/foreach.scala @@ -0,0 +1,4 @@ +import caps.unsafe.* +def test = + val tasks = new collection.mutable.ArrayBuffer[() => Unit] + val _: Unit = tasks.foreach(((task: () => Unit) => task()).unsafeBoxFunArg) diff --git a/tests/pos-custom-args/captures/gadt-ycheck.scala b/tests/pos-custom-args/captures/gadt-ycheck.scala new file mode 100644 index 000000000000..946763b53e7e --- /dev/null +++ b/tests/pos-custom-args/captures/gadt-ycheck.scala @@ -0,0 +1,14 @@ +package test + +import reflect.ClassTag +import language.experimental.pureFunctions + +object Settings: + val OptionTag: ClassTag[Option[?]] = ClassTag(classOf[Option[?]]) + + class Setting[T: ClassTag](propertyClass: Option[Class[?]]): + def tryToSet() = + def update(value: Any): String = ??? + implicitly[ClassTag[T]] match + case OptionTag => + update(Some(propertyClass.get.getConstructor().newInstance())) diff --git a/tests/pos-custom-args/captures/i15922.scala b/tests/pos-custom-args/captures/i15922.scala new file mode 100644 index 000000000000..8547f7598eef --- /dev/null +++ b/tests/pos-custom-args/captures/i15922.scala @@ -0,0 +1,14 @@ +trait Cap { def use(): Int } +type Id[X] = [T] -> (op: X => T) -> T +def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) + +def withCap[X](op: ({*} Cap) => X): X = { + val cap: {*} Cap = new Cap { def use() = { println("cap is used"); 0 } } + val result = op(cap) + result +} + +def leaking(c: {*} Cap): Id[{c} Cap] = mkId(c) + +def test = + val bad = withCap(leaking) diff --git a/tests/pos-custom-args/captures/i16116.scala b/tests/pos-custom-args/captures/i16116.scala new file mode 100644 index 000000000000..2f5d5304dca5 --- /dev/null +++ b/tests/pos-custom-args/captures/i16116.scala @@ -0,0 +1,39 @@ +package x + +import scala.annotation.* +import scala.concurrent.* + +trait CpsMonad[F[_]] { + type Context +} + +object CpsMonad { + type Aux[F[_],C] = CpsMonad[F] { type Context = C } + given CpsMonad[Future] with {} +} + +@experimental +object Test { + + @capability + class CpsTransform[F[_]] { + def await[T](ft: F[T]): { this } T = ??? + } + + transparent inline def cpsAsync[F[_]](using m:CpsMonad[F]) = + new Test.InfernAsyncArg + + class InfernAsyncArg[F[_],C](using am:CpsMonad.Aux[F,C]) { + def apply[A](expr: (CpsTransform[F], C) ?=> A): F[A] = ??? + } + + def asyncPlus[F[_]](a:Int, b:F[Int])(using cps: CpsTransform[F]): { cps } Int = + a + (cps.await(b).asInstanceOf[Int]) + + def testExample1Future(): Unit = + val fr = cpsAsync[Future] { + val y = asyncPlus(1,Future successful 2).asInstanceOf[Int] + y+1 + } + +} diff --git a/tests/pos-custom-args/captures/i16226.scala b/tests/pos-custom-args/captures/i16226.scala new file mode 100644 index 000000000000..8edf3f54d739 --- /dev/null +++ b/tests/pos-custom-args/captures/i16226.scala @@ -0,0 +1,14 @@ +@annotation.capability class Cap + +class LazyRef[T](val elem: () => T): + val get: {elem} () -> T = elem + def map[U](f: T => U): {f, this} LazyRef[U] = + new LazyRef(() => f(elem())) + +def map[A, B](ref: {*} LazyRef[A], f: A => B): {f, ref} LazyRef[B] = + new LazyRef(() => f(ref.elem())) + +def main(io: Cap) = { + def mapd[A, B]: ({io} LazyRef[A], A => B) => {*} LazyRef[B] = + (ref1, f1) => map[A, B](ref1, f1) +} diff --git a/tests/pos-custom-args/captures/i16226a.scala b/tests/pos-custom-args/captures/i16226a.scala new file mode 100644 index 000000000000..444d7f2ed0d7 --- /dev/null +++ b/tests/pos-custom-args/captures/i16226a.scala @@ -0,0 +1,13 @@ +class Name +class TermName extends Name +class TypeName extends Name + +trait ParamInfo: + type ThisName <: Name + def variance: Long +object ParamInfo: + type Of[N <: Name] = ParamInfo { type ThisName = N } + +def test(tparams1: List[ParamInfo{ type ThisName = TypeName }], tparams2: List[ParamInfo.Of[TypeName]]) = + tparams1.lazyZip(tparams2).map((p1, p2) => p1.variance + p2.variance) + diff --git a/tests/pos-custom-args/captures/matchtypes.scala b/tests/pos-custom-args/captures/matchtypes.scala new file mode 100644 index 000000000000..b2442277f1f7 --- /dev/null +++ b/tests/pos-custom-args/captures/matchtypes.scala @@ -0,0 +1,10 @@ +type HEAD[X <: NonEmptyTuple] = X match { + case x *: (_ <: NonEmptyTuple) => x +} + +inline def head[A <: NonEmptyTuple](x: A): HEAD[A] = null.asInstanceOf[HEAD[A]] + +def show[A, T <: Tuple](x: A *: T) = + show1(head(x)) + show1(x.head) +def show1[A](x: A): String = ??? \ No newline at end of file diff --git a/tests/pos-custom-args/captures/overrides.scala b/tests/pos-custom-args/captures/overrides.scala new file mode 100644 index 000000000000..7e70afe7a327 --- /dev/null +++ b/tests/pos-custom-args/captures/overrides.scala @@ -0,0 +1,14 @@ +import caps.* + +abstract class Foo: + def foo: () => Unit = () => () + def bar: String = "" + +class Bar extends Foo: + override def foo = () => println("bar") + override def bar = "bar" + override def toString = bar + +class Baz extends Bar: + override def foo = () => println("baz") + override def bar = "baz" diff --git a/tests/pos-custom-args/captures/overrides/A.scala b/tests/pos-custom-args/captures/overrides/A.scala new file mode 100644 index 000000000000..6a81f8562164 --- /dev/null +++ b/tests/pos-custom-args/captures/overrides/A.scala @@ -0,0 +1,4 @@ +abstract class Message: + lazy val message: String = ??? + def rawMessage = message + diff --git a/tests/pos-custom-args/captures/overrides/B.scala b/tests/pos-custom-args/captures/overrides/B.scala new file mode 100644 index 000000000000..ce4a3f20f1d2 --- /dev/null +++ b/tests/pos-custom-args/captures/overrides/B.scala @@ -0,0 +1,6 @@ + +abstract class SyntaxMsg extends Message + +class CyclicInheritance extends SyntaxMsg + + diff --git a/tests/pos-custom-args/captures/selftypes.scala b/tests/pos-custom-args/captures/selftypes.scala new file mode 100644 index 000000000000..c1b8eefce506 --- /dev/null +++ b/tests/pos-custom-args/captures/selftypes.scala @@ -0,0 +1,15 @@ + import annotation.constructorOnly + trait A: + self: A => + def foo: Int + + abstract class B extends A: + def foo: Int + + class C extends B: + def foo = 1 + def derived = this + + class D(@constructorOnly op: Int => Int) extends C: + val x = 1//op(1) + diff --git a/tests/pos-custom-args/captures/try.scala b/tests/pos-custom-args/captures/try.scala index dbc952cad3c0..b2dcf6f11dd0 100644 --- a/tests/pos-custom-args/captures/try.scala +++ b/tests/pos-custom-args/captures/try.scala @@ -2,7 +2,7 @@ import annotation.retains import language.experimental.erasedDefinitions class CT[E <: Exception] -type CanThrow[E <: Exception] = CT[E] @retains(*) +type CanThrow[E <: Exception] = CT[E] @retains(caps.*) infix type throws[R, E <: Exception] = (erased CanThrow[E]) ?-> R diff --git a/tests/pos-custom-args/captures/unsafe-unbox.scala b/tests/pos-custom-args/captures/unsafe-unbox.scala new file mode 100644 index 000000000000..b228d8c07925 --- /dev/null +++ b/tests/pos-custom-args/captures/unsafe-unbox.scala @@ -0,0 +1,4 @@ +import caps.unsafe.* +def test = + var finalizeActions = collection.mutable.ListBuffer[() => Unit]() + val action = finalizeActions.remove(0).unsafeUnbox diff --git a/tests/pos-custom-args/captures/vars1.scala b/tests/pos-custom-args/captures/vars1.scala new file mode 100644 index 000000000000..c008bac2e72f --- /dev/null +++ b/tests/pos-custom-args/captures/vars1.scala @@ -0,0 +1,21 @@ +import caps.unsafe.* + +object Test: + type ErrorHandler = (Int, String) => Unit + + var defaultIncompleteHandler: ErrorHandler = ??? + var incompleteHandler: ErrorHandler = defaultIncompleteHandler + val x = incompleteHandler.unsafeUnbox + val _ : ErrorHandler = x + val _ = x(1, "a") + + def defaultIncompleteHandler1(): ErrorHandler = ??? + val defaultIncompleteHandler2: ErrorHandler = ??? + var incompleteHandler1: ErrorHandler = defaultIncompleteHandler1().unsafeBox + var incompleteHandler2: ErrorHandler = defaultIncompleteHandler2.unsafeBox + private var incompleteHandler7 = defaultIncompleteHandler1().unsafeBox + private var incompleteHandler8 = defaultIncompleteHandler2.unsafeBox + + incompleteHandler1 = defaultIncompleteHandler2.unsafeBox + incompleteHandler1 = defaultIncompleteHandler2.unsafeBox + val saved = incompleteHandler1.unsafeUnbox diff --git a/tests/pos-custom-args/no-experimental/dotty-experimental.scala b/tests/pos-custom-args/no-experimental/dotty-experimental.scala new file mode 100644 index 000000000000..320c68dbea50 --- /dev/null +++ b/tests/pos-custom-args/no-experimental/dotty-experimental.scala @@ -0,0 +1,6 @@ +package dotty.tools +object test { + + val x = caps.unsafe.unsafeBox + +} diff --git a/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala b/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala index bb27629a6062..998086c5d9a4 100644 --- a/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala +++ b/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala @@ -1,5 +1,4 @@ import annotation.experimental -import language.experimental.fewerBraces import language.experimental.namedTypeArguments import language.experimental.genericNumberLiterals import language.experimental.erasedDefinitions diff --git a/tests/pos-custom-args/no-experimental/i16091.scala b/tests/pos-custom-args/no-experimental/i16091.scala new file mode 100644 index 000000000000..0324aeacccc9 --- /dev/null +++ b/tests/pos-custom-args/no-experimental/i16091.scala @@ -0,0 +1,9 @@ +import scala.annotation.experimental + +object Macro { + @experimental + inline def foo() = fooImpl + + @experimental + private def fooImpl = () +} \ No newline at end of file diff --git a/tests/pos-macros/i11211.scala b/tests/pos-macros/i11211.scala index 2650fa754193..154d8df174e7 100644 --- a/tests/pos-macros/i11211.scala +++ b/tests/pos-macros/i11211.scala @@ -12,7 +12,7 @@ def takeOptionImpl2[T](using Quotes, Type[T]): Unit = '{ def takeOptionImpl[T](o: Expr[Option[T]], default: Expr[T])(using Quotes, Type[T]): Expr[T] = '{ $o match { case Some(t1) => t1 - case None: Option[T] => $default + case None => $default } } diff --git a/tests/pos-special/fatal-warnings/i10994.scala b/tests/pos-special/fatal-warnings/i10994.scala deleted file mode 100644 index 99ae647466b1..000000000000 --- a/tests/pos-special/fatal-warnings/i10994.scala +++ /dev/null @@ -1,2 +0,0 @@ -def foo = true match - case (b: Boolean): Boolean => () diff --git a/tests/pos-with-compiler-cc/dotc/Bench.scala b/tests/pos-with-compiler-cc/dotc/Bench.scala new file mode 100644 index 000000000000..c9c032b0ae7d --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Bench.scala @@ -0,0 +1,64 @@ +package dotty.tools +package dotc + +import core.Contexts._ +import reporting.Reporter +import io.AbstractFile + +import scala.annotation.internal.sharable + +/** A main class for running compiler benchmarks. Can instantiate a given + * number of compilers and run each (sequentially) a given number of times + * on the same sources. + */ +object Bench extends Driver: + + @sharable private var numRuns = 1 + + private def ntimes(n: Int)(op: => Reporter): Reporter = + (0 until n).foldLeft(emptyReporter)((_, _) => op) + + @sharable private var times: Array[Int] = _ + + override def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = + times = new Array[Int](numRuns) + var reporter: Reporter = emptyReporter + for i <- 0 until numRuns do + val start = System.nanoTime() + reporter = super.doCompile(compiler, files) + times(i) = ((System.nanoTime - start) / 1000000).toInt + println(s"time elapsed: ${times(i)}ms") + if ctx.settings.Xprompt.value then + print("hit to continue >") + System.in.nn.read() + println() + reporter + + def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = { + val pos = args indexOf name + if (pos < 0) (default, args) + else (args(pos + 1).toInt, (args take pos) ++ (args drop (pos + 2))) + } + + def reportTimes() = + val best = times.sorted + val measured = numRuns / 3 + val avgBest = best.take(measured).sum / measured + val avgLast = times.reverse.take(measured).sum / measured + println(s"best out of $numRuns runs: ${best(0)}") + println(s"average out of best $measured: $avgBest") + println(s"average out of last $measured: $avgLast") + + override def process(args: Array[String], rootCtx: Context): Reporter = + val (numCompilers, args1) = extractNumArg(args, "#compilers") + val (numRuns, args2) = extractNumArg(args1, "#runs") + this.numRuns = numRuns + var reporter: Reporter = emptyReporter + for i <- 0 until numCompilers do + reporter = super.process(args2, rootCtx) + reportTimes() + reporter + +end Bench + + diff --git a/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala b/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala new file mode 100644 index 000000000000..44ca582c3c61 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala @@ -0,0 +1,166 @@ +package dotty.tools +package dotc + +import core._ +import Contexts._ +import SymDenotations.ClassDenotation +import Symbols._ +import util.{FreshNameCreator, SourceFile, NoSource} +import util.Spans.Span +import ast.{tpd, untpd} +import tpd.{Tree, TreeTraverser} +import ast.Trees.{Import, Ident} +import typer.Nullables +import transform.SymUtils._ +import core.Decorators._ +import config.{SourceVersion, Feature} +import StdNames.nme +import scala.annotation.internal.sharable + +class CompilationUnit protected (val source: SourceFile) { + + override def toString: String = source.toString + + var untpdTree: untpd.Tree = untpd.EmptyTree + + var tpdTree: tpd.Tree = tpd.EmptyTree + + /** Is this the compilation unit of a Java file */ + def isJava: Boolean = source.file.name.endsWith(".java") + + /** The source version for this unit, as determined by a language import */ + var sourceVersion: Option[SourceVersion] = None + + /** Pickled TASTY binaries, indexed by class. */ + var pickled: Map[ClassSymbol, () => Array[Byte]] = Map() + + /** The fresh name creator for the current unit. + * FIXME(#7661): This is not fine-grained enough to enable reproducible builds, + * see https://github.com/scala/scala/commit/f50ec3c866263448d803139e119b33afb04ec2bc + */ + val freshNames: FreshNameCreator = new FreshNameCreator.Default + + /** Will be set to `true` if there are inline call that must be inlined after typer. + * The information is used in phase `Inlining` in order to avoid traversing trees that need no transformations. + */ + var needsInlining: Boolean = false + + /** Set to `true` if inliner added anonymous mirrors that need to be completed */ + var needsMirrorSupport: Boolean = false + + /** Will be set to `true` if contains `Quote`. + * The information is used in phase `Staging`/`Splicing`/`PickleQuotes` in order to avoid traversing trees that need no transformations. + */ + var needsStaging: Boolean = false + + /** Will be set to true if the unit contains a captureChecking language import */ + var needsCaptureChecking: Boolean = false + + /** Will be set to true if the unit contains a pureFunctions language import */ + var knowsPureFuns: Boolean = false + + var suspended: Boolean = false + var suspendedAtInliningPhase: Boolean = false + + /** Can this compilation unit be suspended */ + def isSuspendable: Boolean = true + + /** Suspends the compilation unit by thowing a SuspendException + * and recording the suspended compilation unit + */ + def suspend()(using Context): Nothing = + assert(isSuspendable) + if !suspended then + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspended: $this") + suspended = true + ctx.run.nn.suspendedUnits += this + if ctx.phase == Phases.inliningPhase then + suspendedAtInliningPhase = true + throw CompilationUnit.SuspendException() + + private var myAssignmentSpans: Map[Int, List[Span]] | Null = null + + /** A map from (name-) offsets of all local variables in this compilation unit + * that can be tracked for being not null to the list of spans of assignments + * to these variables. + */ + def assignmentSpans(using Context): Map[Int, List[Span]] = + if myAssignmentSpans == null then myAssignmentSpans = Nullables.assignmentSpans + myAssignmentSpans.nn +} + +@sharable object NoCompilationUnit extends CompilationUnit(NoSource) { + + override def isJava: Boolean = false + + override def suspend()(using Context): Nothing = + throw CompilationUnit.SuspendException() + + override def assignmentSpans(using Context): Map[Int, List[Span]] = Map.empty +} + +object CompilationUnit { + + class SuspendException extends Exception + + /** Make a compilation unit for top class `clsd` with the contents of the `unpickled` tree */ + def apply(clsd: ClassDenotation, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = + val file = clsd.symbol.associatedFile.nn + apply(SourceFile(file, Array.empty[Char]), unpickled, forceTrees) + + /** Make a compilation unit, given picked bytes and unpickled tree */ + def apply(source: SourceFile, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = { + assert(!unpickled.isEmpty, unpickled) + val unit1 = new CompilationUnit(source) + unit1.tpdTree = unpickled + if (forceTrees) { + val force = new Force + force.traverse(unit1.tpdTree) + unit1.needsStaging = force.containsQuote + unit1.needsInlining = force.containsInline + } + unit1 + } + + /** Create a compilation unit corresponding to `source`. + * If `mustExist` is true, this will fail if `source` does not exist. + */ + def apply(source: SourceFile, mustExist: Boolean = true)(using Context): CompilationUnit = { + val src = + if (!mustExist) + source + else if (source.file.isDirectory) { + report.error(s"expected file, received directory '${source.file.path}'") + NoSource + } + else if (!source.file.exists) { + report.error(s"source file not found: ${source.file.path}") + NoSource + } + else source + new CompilationUnit(src) + } + + /** Force the tree to be loaded */ + private class Force extends TreeTraverser { + var containsQuote = false + var containsInline = false + var containsCaptureChecking = false + def traverse(tree: Tree)(using Context): Unit = { + if (tree.symbol.isQuote) + containsQuote = true + if tree.symbol.is(Flags.Inline) then + containsInline = true + tree match + case Import(qual, selectors) => + tpd.languageImport(qual) match + case Some(prefix) => + for case untpd.ImportSelector(untpd.Ident(imported), untpd.EmptyTree, _) <- selectors do + Feature.handleGlobalLanguageImport(prefix, imported) + case _ => + case _ => + traverseChildren(tree) + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/Compiler.scala b/tests/pos-with-compiler-cc/dotc/Compiler.scala new file mode 100644 index 000000000000..b121a47781e1 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Compiler.scala @@ -0,0 +1,171 @@ +package dotty.tools +package dotc + +import core._ +import Contexts._ +import typer.{TyperPhase, RefChecks} +import cc.CheckCaptures +import parsing.Parser +import Phases.Phase +import transform._ +import dotty.tools.backend +import backend.jvm.{CollectSuperCalls, GenBCode} +import localopt.StringInterpolatorOpt + +/** The central class of the dotc compiler. The job of a compiler is to create + * runs, which process given `phases` in a given `rootContext`. + */ +class Compiler { + + /** Meta-ordering constraint: + * + * DenotTransformers that change the signature of their denotation's info must go + * after erasure. The reason is that denotations are permanently referred to by + * TermRefs which contain a signature. If the signature of a symbol would change, + * all refs to it would become outdated - they could not be dereferenced in the + * new phase. + * + * After erasure, signature changing denot-transformers are OK because signatures + * are never recomputed later than erasure. + */ + def phases: List[List[Phase]] = + frontendPhases ::: picklerPhases ::: transformPhases ::: backendPhases + + /** Phases dealing with the frontend up to trees ready for TASTY pickling */ + protected def frontendPhases: List[List[Phase]] = + List(new Parser) :: // Compiler frontend: scanner, parser + List(new TyperPhase) :: // Compiler frontend: namer, typer + List(new YCheckPositions) :: // YCheck positions + List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks + List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files + List(new PostTyper) :: // Additional checks and cleanups after type checking + List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) + List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks + List(new SetRootTree) :: // Set the `rootTreeOrProvider` on class symbols + Nil + + /** Phases dealing with TASTY tree pickling and unpickling */ + protected def picklerPhases: List[List[Phase]] = + List(new Pickler) :: // Generate TASTY info + List(new Inlining) :: // Inline and execute macros + List(new PostInlining) :: // Add mirror support for inlined code + List(new Staging) :: // Check staging levels and heal staged types + List(new Splicing) :: // Replace level 1 splices with holes + List(new PickleQuotes) :: // Turn quoted trees into explicit run-time data structures + Nil + + /** Phases dealing with the transformation from pickled trees to backend trees */ + protected def transformPhases: List[List[Phase]] = + List(new InstrumentCoverage) :: // Perform instrumentation for code coverage (if -coverage-out is set) + List(new FirstTransform, // Some transformations to put trees into a canonical form + new CheckReentrant, // Internal use only: Check that compiled program has no data races involving global vars + new ElimPackagePrefixes, // Eliminate references to package prefixes in Select nodes + new CookComments, // Cook the comments: expand variables, doc, etc. + new CheckStatic, // Check restrictions that apply to @static members + new CheckLoopingImplicits, // Check that implicit defs do not call themselves in an infinite loop + new BetaReduce, // Reduce closure applications + new InlineVals, // Check right hand-sides of an `inline val`s + new ExpandSAMs, // Expand single abstract method closures to anonymous classes + new ElimRepeated, // Rewrite vararg parameters and arguments + new RefChecks) :: // Various checks mostly related to abstract members and overriding + List(new init.Checker) :: // Check initialization of objects + List(new CrossVersionChecks, // Check issues related to deprecated and experimental + new ProtectedAccessors, // Add accessors for protected members + new ExtensionMethods, // Expand methods of value classes with extension methods + new UncacheGivenAliases, // Avoid caching RHS of simple parameterless given aliases + new ElimByName, // Map by-name parameters to functions + new HoistSuperArgs, // Hoist complex arguments of supercalls to enclosing scope + new ForwardDepChecks, // Check that there are no forward references to local vals + new SpecializeApplyMethods, // Adds specialized methods to FunctionN + new TryCatchPatterns, // Compile cases in try/catch + new PatternMatcher) :: // Compile pattern matches + List(new TestRecheck.Pre) :: // Test only: run rechecker, enabled under -Yrecheck-test + List(new TestRecheck) :: // Test only: run rechecker, enabled under -Yrecheck-test + List(new CheckCaptures.Pre) :: // Preparations for check captures phase, enabled under captureChecking + List(new CheckCaptures) :: // Check captures, enabled under captureChecking + List(new ElimOpaque, // Turn opaque into normal aliases + new sjs.ExplicitJSClasses, // Make all JS classes explicit (Scala.js only) + new ExplicitOuter, // Add accessors to outer classes from nested ones. + new ExplicitSelf, // Make references to non-trivial self types explicit as casts + new StringInterpolatorOpt) :: // Optimizes raw and s and f string interpolators by rewriting them to string concatenations or formats + List(new PruneErasedDefs, // Drop erased definitions from scopes and simplify erased expressions + new UninitializedDefs, // Replaces `compiletime.uninitialized` by `_` + new InlinePatterns, // Remove placeholders of inlined patterns + new VCInlineMethods, // Inlines calls to value class methods + new SeqLiterals, // Express vararg arguments as arrays + new InterceptedMethods, // Special handling of `==`, `|=`, `getClass` methods + new Getters, // Replace non-private vals and vars with getter defs (fields are added later) + new SpecializeFunctions, // Specialized Function{0,1,2} by replacing super with specialized super + new SpecializeTuples, // Specializes Tuples by replacing tuple construction and selection trees + new LiftTry, // Put try expressions that might execute on non-empty stacks into their own methods + new CollectNullableFields, // Collect fields that can be nulled out after use in lazy initialization + new ElimOuterSelect, // Expand outer selections + new ResolveSuper, // Implement super accessors + new FunctionXXLForwarders, // Add forwarders for FunctionXXL apply method + new ParamForwarding, // Add forwarders for aliases of superclass parameters + new TupleOptimizations, // Optimize generic operations on tuples + new LetOverApply, // Lift blocks from receivers of applications + new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify. + List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements. + List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types + new PureStats, // Remove pure stats from blocks + new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations + new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference + new ArrayApply, // Optimize `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]` + new sjs.AddLocalJSFakeNews, // Adds fake new invocations to local JS classes in calls to `createLocalJSClass` + new ElimPolyFunction, // Rewrite PolyFunction subclasses to FunctionN subclasses + new TailRec, // Rewrite tail recursion to loops + new CompleteJavaEnums, // Fill in constructors for Java enums + new Mixin, // Expand trait fields and trait initializers + new LazyVals, // Expand lazy vals + new Memoize, // Add private fields to getters and setters + new NonLocalReturns, // Expand non-local returns + new CapturedVars) :: // Represent vars captured by closures as heap objects + List(new Constructors, // Collect initialization code in primary constructors + // Note: constructors changes decls in transformTemplate, no InfoTransformers should be added after it + new Instrumentation) :: // Count calls and allocations under -Yinstrument + List(new LambdaLift, // Lifts out nested functions to class scope, storing free variables in environments + // Note: in this mini-phase block scopes are incorrect. No phases that rely on scopes should be here + new ElimStaticThis, // Replace `this` references to static objects by global identifiers + new CountOuterAccesses) :: // Identify outer accessors that can be dropped + List(new DropOuterAccessors, // Drop unused outer accessors + new CheckNoSuperThis, // Check that supercalls don't contain references to `this` + new Flatten, // Lift all inner classes to package scope + new TransformWildcards, // Replace wildcards with default values + new MoveStatics, // Move static methods from companion to the class itself + new ExpandPrivate, // Widen private definitions accessed from nested classes + new RestoreScopes, // Repair scopes rendered invalid by moving definitions in prior phases of the group + new SelectStatic, // get rid of selects that would be compiled into GetStatic + new sjs.JUnitBootstrappers, // Generate JUnit-specific bootstrapper classes for Scala.js (not enabled by default) + new CollectEntryPoints, // Collect all entry points and save them in the context + new CollectSuperCalls, // Find classes that are called with super + new RepeatableAnnotations) :: // Aggregate repeatable annotations + Nil + + /** Generate the output of the compilation */ + protected def backendPhases: List[List[Phase]] = + List(new backend.sjs.GenSJSIR) :: // Generate .sjsir files for Scala.js (not enabled by default) + List(new GenBCode) :: // Generate JVM bytecode + Nil + + var runId: Int = 1 + def nextRunId: Int = { + runId += 1; runId + } + + def reset()(using Context): Unit = { + ctx.base.reset() + val run = ctx.run + if (run != null) run.reset() + } + + def newRun(using Context): Run = { + reset() + val rctx = + if ctx.settings.Xsemanticdb.value then + ctx.addMode(Mode.ReadPositions) + else + ctx + new Run(this, rctx) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/Driver.scala b/tests/pos-with-compiler-cc/dotc/Driver.scala new file mode 100644 index 000000000000..14a71463c66d --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Driver.scala @@ -0,0 +1,207 @@ +package dotty.tools.dotc + +import dotty.tools.FatalError +import config.CompilerCommand +import core.Comments.{ContextDoc, ContextDocstrings} +import core.Contexts._ +import core.{MacroClassLoader, TypeError} +import dotty.tools.dotc.ast.Positioned +import dotty.tools.io.AbstractFile +import reporting._ +import core.Decorators._ +import config.Feature + +import scala.util.control.NonFatal +import fromtasty.{TASTYCompiler, TastyFileUtil} + +/** Run the Dotty compiler. + * + * Extending this class lets you customize many aspect of the compilation + * process, but in most cases you only need to call [[process]] on the + * existing object [[Main]]. + */ +class Driver { + + protected def newCompiler(using Context): Compiler = + if (ctx.settings.fromTasty.value) new TASTYCompiler + else new Compiler + + protected def emptyReporter: Reporter = new StoreReporter(null) + + protected def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = + if files.nonEmpty then + try + val run = compiler.newRun + run.compile(files) + finish(compiler, run) + catch + case ex: FatalError => + report.error(ex.getMessage.nn) // signals that we should fail compilation. + case ex: TypeError => + println(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}") + throw ex + case ex: Throwable => + println(s"$ex while compiling ${files.map(_.path).mkString(", ")}") + throw ex + ctx.reporter + + protected def finish(compiler: Compiler, run: Run)(using Context): Unit = + run.printSummary() + if !ctx.reporter.errorsReported && run.suspendedUnits.nonEmpty then + val suspendedUnits = run.suspendedUnits.toList + if (ctx.settings.XprintSuspension.value) + report.echo(i"compiling suspended $suspendedUnits%, %") + val run1 = compiler.newRun + for unit <- suspendedUnits do unit.suspended = false + run1.compileUnits(suspendedUnits) + finish(compiler, run1)(using MacroClassLoader.init(ctx.fresh)) + + protected def initCtx: Context = (new ContextBase).initialCtx + + protected def sourcesRequired: Boolean = true + + protected def command: CompilerCommand = ScalacCommand + + /** Setup context with initialized settings from CLI arguments, then check if there are any settings that + * would change the default behaviour of the compiler. + * + * @return If there is no setting like `-help` preventing us from continuing compilation, + * this method returns a list of files to compile and an updated Context. + * If compilation should be interrupted, this method returns None. + */ + def setup(args: Array[String], rootCtx: Context): Option[(List[AbstractFile], Context)] = { + val ictx = rootCtx.fresh + val summary = command.distill(args, ictx.settings)(ictx.settingsState)(using ictx) + ictx.setSettings(summary.sstate) + Feature.checkExperimentalSettings(using ictx) + MacroClassLoader.init(ictx) + Positioned.init(using ictx) + + inContext(ictx) { + if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then + ictx.setProperty(ContextDoc, new ContextDocstrings) + val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) + fileNamesOrNone.map { fileNames => + val files = fileNames.map(ctx.getFile) + (files, fromTastySetup(files)) + } + } + } + + /** Setup extra classpath of tasty and jar files */ + protected def fromTastySetup(files: List[AbstractFile])(using Context): Context = + if ctx.settings.fromTasty.value then + val newEntries: List[String] = files + .flatMap { file => + if !file.exists then + report.error(s"File does not exist: ${file.path}") + None + else file.extension match + case "jar" => Some(file.path) + case "tasty" => + TastyFileUtil.getClassPath(file) match + case Some(classpath) => Some(classpath) + case _ => + report.error(s"Could not load classname from: ${file.path}") + None + case _ => + report.error(s"File extension is not `tasty` or `jar`: ${file.path}") + None + } + .distinct + val ctx1 = ctx.fresh + val fullClassPath = + (newEntries :+ ctx.settings.classpath.value).mkString(java.io.File.pathSeparator.nn) + ctx1.setSetting(ctx1.settings.classpath, fullClassPath) + else ctx + + /** Entry point to the compiler that can be conveniently used with Java reflection. + * + * This entry point can easily be used without depending on the `dotty` package, + * you only need to depend on `dotty-interfaces` and call this method using + * reflection. This allows you to write code that will work against multiple + * versions of dotty without recompilation. + * + * The trade-off is that you can only pass a SimpleReporter to this method + * and not a normal Reporter which is more powerful. + * + * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala]] + * + * @param args Arguments to pass to the compiler. + * @param simple Used to log errors, warnings, and info messages. + * The default reporter is used if this is `null`. + * @param callback Used to execute custom code during the compilation + * process. No callbacks will be executed if this is `null`. + * @return + */ + final def process(args: Array[String], simple: interfaces.SimpleReporter | Null, + callback: interfaces.CompilerCallback | Null): interfaces.ReporterResult = { + val reporter = if (simple == null) null else Reporter.fromSimpleReporter(simple) + process(args, reporter, callback) + } + + /** Principal entry point to the compiler. + * + * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/EntryPointsTest.scala.disabled]] + * in method `runCompiler` + * + * @param args Arguments to pass to the compiler. + * @param reporter Used to log errors, warnings, and info messages. + * The default reporter is used if this is `null`. + * @param callback Used to execute custom code during the compilation + * process. No callbacks will be executed if this is `null`. + * @return The `Reporter` used. Use `Reporter#hasErrors` to check + * if compilation succeeded. + */ + final def process(args: Array[String], reporter: Reporter | Null = null, + callback: interfaces.CompilerCallback | Null = null): Reporter = { + val compileCtx = initCtx.fresh + if (reporter != null) + compileCtx.setReporter(reporter) + if (callback != null) + compileCtx.setCompilerCallback(callback) + process(args, compileCtx) + } + + /** Entry point to the compiler with no optional arguments. + * + * This overload is provided for compatibility reasons: the + * `RawCompiler` of sbt expects this method to exist and calls + * it using reflection. Keeping it means that we can change + * the other overloads without worrying about breaking compatibility + * with sbt. + */ + final def process(args: Array[String]): Reporter = + process(args, null: Reporter | Null, null: interfaces.CompilerCallback | Null) + + /** Entry point to the compiler using a custom `Context`. + * + * In most cases, you do not need a custom `Context` and should + * instead use one of the other overloads of `process`. However, + * the other overloads cannot be overridden, instead you + * should override this one which they call internally. + * + * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/EntryPointsTest.scala.disabled]] + * in method `runCompilerWithContext` + * + * @param args Arguments to pass to the compiler. + * @param rootCtx The root Context to use. + * @return The `Reporter` used. Use `Reporter#hasErrors` to check + * if compilation succeeded. + */ + def process(args: Array[String], rootCtx: Context): Reporter = { + setup(args, rootCtx) match + case Some((files, compileCtx)) => + doCompile(newCompiler(using compileCtx), files)(using compileCtx) + case None => + rootCtx.reporter + } + + def main(args: Array[String]): Unit = { + // Preload scala.util.control.NonFatal. Otherwise, when trying to catch a StackOverflowError, + // we may try to load it but fail with another StackOverflowError and lose the original exception, + // see . + val _ = NonFatal + sys.exit(if (process(args).hasErrors) 1 else 0) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/Main.scala b/tests/pos-with-compiler-cc/dotc/Main.scala new file mode 100644 index 000000000000..3288fded52a2 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Main.scala @@ -0,0 +1,5 @@ +package dotty.tools +package dotc + +/** Main class of the `dotc` batch compiler. */ +object Main extends Driver diff --git a/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala b/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala new file mode 100644 index 000000000000..ae20d81226c9 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala @@ -0,0 +1,9 @@ +package dotty.tools.dotc + +import dotty.tools.FatalError + +class MissingCoreLibraryException(rootPackage: String) extends FatalError( + s"""Could not find package $rootPackage from compiler core libraries. + |Make sure the compiler core libraries are on the classpath. + """.stripMargin +) diff --git a/tests/pos-with-compiler-cc/dotc/Resident.scala b/tests/pos-with-compiler-cc/dotc/Resident.scala new file mode 100644 index 000000000000..0b9bca0dc75b --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Resident.scala @@ -0,0 +1,61 @@ +package dotty.tools +package dotc + +import core.Contexts._ +import reporting.Reporter +import java.io.EOFException +import scala.annotation.tailrec + +/** A compiler which stays resident between runs. This is more of a PoC than + * something that's expected to be used often + * + * Usage: + * + * > scala dotty.tools.dotc.Resident + * + * dotc> "more options and files to compile" + * + * ... + * + * dotc> :reset // reset all options to the ones passed on the command line + * + * ... + * + * dotc> :q // quit + */ +class Resident extends Driver { + + object residentCompiler extends Compiler + + override def sourcesRequired: Boolean = false + + private val quit = ":q" + private val reset = ":reset" + private val prompt = "dotc> " + + private def getLine() = { + Console.print(prompt) + try scala.io.StdIn.readLine() catch { case _: EOFException => quit } + } + + final override def process(args: Array[String], rootCtx: Context): Reporter = { + @tailrec def loop(args: Array[String], prevCtx: Context): Reporter = { + setup(args, prevCtx) match + case Some((files, ctx)) => + inContext(ctx) { + doCompile(residentCompiler, files) + } + var nextCtx = ctx + var line = getLine() + while (line == reset) { + nextCtx = rootCtx + line = getLine() + } + if line.startsWith(quit) then ctx.reporter + else loop((line split "\\s+").asInstanceOf[Array[String]], nextCtx) + case None => + prevCtx.reporter + } + loop(args, rootCtx) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/Run.scala b/tests/pos-with-compiler-cc/dotc/Run.scala new file mode 100644 index 000000000000..705664177507 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Run.scala @@ -0,0 +1,398 @@ +package dotty.tools +package dotc + +import core._ +import Contexts._ +import Periods._ +import Symbols._ +import Scopes._ +import Names.Name +import Denotations.Denotation +import typer.Typer +import typer.ImportInfo.withRootImports +import Decorators._ +import io.AbstractFile +import Phases.unfusedPhases + +import util._ +import reporting.{Suppression, Action, Profile, ActiveProfile, NoProfile} +import reporting.Diagnostic +import reporting.Diagnostic.Warning +import rewrites.Rewrites +import profile.Profiler +import printing.XprintMode +import typer.ImplicitRunInfo +import config.Feature +import StdNames.nme + +import java.io.{BufferedWriter, OutputStreamWriter} +import java.nio.charset.StandardCharsets + +import scala.collection.mutable +import scala.util.control.NonFatal +import scala.io.Codec +import caps.unsafe.unsafeUnbox + +/** A compiler run. Exports various methods to compile source files */ +class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo { + + /** Default timeout to stop looking for further implicit suggestions, in ms. + * This is usually for the first import suggestion; subsequent suggestions + * may get smaller timeouts. @see ImportSuggestions.reduceTimeBudget + */ + private var myImportSuggestionBudget: Int = + Int.MinValue // sentinel value; means whatever is set in command line option + + def importSuggestionBudget = + if myImportSuggestionBudget == Int.MinValue then ictx.settings.XimportSuggestionTimeout.value + else myImportSuggestionBudget + + def importSuggestionBudget_=(x: Int) = + myImportSuggestionBudget = x + + /** If this variable is set to `true`, some core typer operations will + * return immediately. Currently these early abort operations are + * `Typer.typed` and `Implicits.typedImplicit`. + */ + @volatile var isCancelled = false + + private var compiling = false + + private var myUnits: List[CompilationUnit] = Nil + private var myUnitsCached: List[CompilationUnit] = Nil + private var myFiles: Set[AbstractFile] = _ + + // `@nowarn` annotations by source file, populated during typer + private val mySuppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty + // source files whose `@nowarn` annotations are processed + private val mySuppressionsComplete: mutable.Set[SourceFile] = mutable.Set.empty + // warnings issued before a source file's `@nowarn` annotations are processed, suspended so that `@nowarn` can filter them + private val mySuspendedMessages: mutable.LinkedHashMap[SourceFile, mutable.LinkedHashSet[Warning]] = mutable.LinkedHashMap.empty + + object suppressions: + // When the REPL creates a new run (ReplDriver.compile), parsing is already done in the old context, with the + // previous Run. Parser warnings were suspended in the old run and need to be copied over so they are not lost. + // Same as scala/scala/commit/79ca1408c7. + def initSuspendedMessages(oldRun: Run | Null) = if oldRun != null then + mySuspendedMessages.clear() + mySuspendedMessages ++= oldRun.mySuspendedMessages + + def suppressionsComplete(source: SourceFile) = source == NoSource || mySuppressionsComplete(source) + + def addSuspendedMessage(warning: Warning) = + mySuspendedMessages.getOrElseUpdate(warning.pos.source, mutable.LinkedHashSet.empty) += warning + + def nowarnAction(dia: Diagnostic): Action.Warning.type | Action.Verbose.type | Action.Silent.type = + mySuppressions.getOrElse(dia.pos.source, Nil).find(_.matches(dia)) match { + case Some(s) => + s.markUsed() + if (s.verbose) Action.Verbose + else Action.Silent + case _ => + Action.Warning + } + + def addSuppression(sup: Suppression): Unit = + val source = sup.annotPos.source + mySuppressions.getOrElseUpdate(source, mutable.ListBuffer.empty) += sup + + def reportSuspendedMessages(source: SourceFile)(using Context): Unit = { + // sort suppressions. they are not added in any particular order because of lazy type completion + for (sups <- mySuppressions.get(source)) + mySuppressions(source) = sups.sortBy(sup => 0 - sup.start) + mySuppressionsComplete += source + mySuspendedMessages.remove(source).foreach(_.foreach(ctx.reporter.issueIfNotSuppressed)) + } + + def runFinished(hasErrors: Boolean): Unit = + // report suspended messages (in case the run finished before typer) + mySuspendedMessages.keysIterator.toList.foreach(reportSuspendedMessages) + // report unused nowarns only if all all phases are done + if !hasErrors && ctx.settings.WunusedHas.nowarn then + for { + source <- mySuppressions.keysIterator.toList + sups <- mySuppressions.remove(source) + sup <- sups.reverse + } if (!sup.used) + report.warning("@nowarn annotation does not suppress any warnings", sup.annotPos) + + /** The compilation units currently being compiled, this may return different + * results over time. + */ + def units: List[CompilationUnit] = myUnits + + private def units_=(us: List[CompilationUnit]): Unit = + myUnits = us + + var suspendedUnits: mutable.ListBuffer[CompilationUnit] = mutable.ListBuffer() + + def checkSuspendedUnits(newUnits: List[CompilationUnit])(using Context): Unit = + if newUnits.isEmpty && suspendedUnits.nonEmpty && !ctx.reporter.errorsReported then + val where = + if suspendedUnits.size == 1 then i"in ${suspendedUnits.head}." + else i"""among + | + | ${suspendedUnits.toList}%, % + |""" + val enableXprintSuspensionHint = + if ctx.settings.XprintSuspension.value then "" + else "\n\nCompiling with -Xprint-suspension gives more information." + report.error(em"""Cyclic macro dependencies $where + |Compilation stopped since no further progress can be made. + | + |To fix this, place macros in one set of files and their callers in another.$enableXprintSuspensionHint""") + + /** The files currently being compiled (active or suspended). + * This may return different results over time. + * These files do not have to be source files since it's possible to compile + * from TASTY. + */ + def files: Set[AbstractFile] = { + if (myUnits ne myUnitsCached) { + myUnitsCached = myUnits + myFiles = (myUnits ++ suspendedUnits).map(_.source.file).toSet + } + myFiles + } + + /** The source files of all late entered symbols, as a set */ + private var lateFiles = mutable.Set[AbstractFile]() + + /** A cache for static references to packages and classes */ + val staticRefs = util.EqHashMap[Name, Denotation](initialCapacity = 1024) + + /** Actions that need to be performed at the end of the current compilation run */ + private var finalizeActions = mutable.ListBuffer[() => Unit]() + + /** Will be set to true if any of the compiled compilation units contains + * a pureFunctions language import. + */ + var pureFunsImportEncountered = false + + /** Will be set to true if any of the compiled compilation units contains + * a captureChecking language import. + */ + var ccImportEncountered = false + + def compile(files: List[AbstractFile]): Unit = + try + val sources = files.map(runContext.getSource(_)) + compileSources(sources) + catch + case NonFatal(ex) => + if units.nonEmpty then report.echo(i"exception occurred while compiling $units%, %") + else report.echo(s"exception occurred while compiling ${files.map(_.name).mkString(", ")}") + throw ex + + /** TODO: There's a fundamental design problem here: We assemble phases using `fusePhases` + * when we first build the compiler. But we modify them with -Yskip, -Ystop + * on each run. That modification needs to either transform the tree structure, + * or we need to assemble phases on each run, and take -Yskip, -Ystop into + * account. I think the latter would be preferable. + */ + def compileSources(sources: List[SourceFile]): Unit = + if (sources forall (_.exists)) { + units = sources.map(CompilationUnit(_)) + compileUnits() + } + + + def compileUnits(us: List[CompilationUnit]): Unit = { + units = us + compileUnits() + } + + def compileUnits(us: List[CompilationUnit], ctx: Context): Unit = { + units = us + compileUnits()(using ctx) + } + + var profile: Profile = NoProfile + + private def compileUnits()(using Context) = Stats.maybeMonitored { + if (!ctx.mode.is(Mode.Interactive)) // IDEs might have multi-threaded access, accesses are synchronized + ctx.base.checkSingleThreaded() + + compiling = true + + profile = + if ctx.settings.Vprofile.value + || !ctx.settings.VprofileSortedBy.value.isEmpty + || ctx.settings.VprofileDetails.value != 0 + then ActiveProfile(ctx.settings.VprofileDetails.value.max(0).min(1000)) + else NoProfile + + // If testing pickler, make sure to stop after pickling phase: + val stopAfter = + if (ctx.settings.YtestPickler.value) List("pickler") + else ctx.settings.YstopAfter.value + + val pluginPlan = ctx.base.addPluginPhases(ctx.base.phasePlan) + val phases = ctx.base.fusePhases(pluginPlan, + ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, stopAfter, ctx.settings.Ycheck.value) + ctx.base.usePhases(phases) + + def runPhases(using Context) = { + var lastPrintedTree: PrintedTree = NoPrintedTree + val profiler = ctx.profiler + var phasesWereAdjusted = false + + for (phase <- ctx.base.allPhases) + if (phase.isRunnable) + Stats.trackTime(s"$phase ms ") { + val start = System.currentTimeMillis + val profileBefore = profiler.beforePhase(phase) + units = phase.runOn(units) + profiler.afterPhase(phase, profileBefore) + if (ctx.settings.Xprint.value.containsPhase(phase)) + for (unit <- units) + lastPrintedTree = + printTree(lastPrintedTree)(using ctx.fresh.setPhase(phase.next).setCompilationUnit(unit)) + report.informTime(s"$phase ", start) + Stats.record(s"total trees at end of $phase", ast.Trees.ntrees) + for (unit <- units) + Stats.record(s"retained typed trees at end of $phase", unit.tpdTree.treeSize) + ctx.typerState.gc() + } + if !phasesWereAdjusted then + phasesWereAdjusted = true + if !Feature.ccEnabledSomewhere then + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase.prev) + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase) + + profiler.finished() + } + + val runCtx = ctx.fresh + runCtx.setProfiler(Profiler()) + unfusedPhases.foreach(_.initContext(runCtx)) + runPhases(using runCtx) + if (!ctx.reporter.hasErrors) + Rewrites.writeBack() + suppressions.runFinished(hasErrors = ctx.reporter.hasErrors) + while (finalizeActions.nonEmpty) { + val action = finalizeActions.remove(0).unsafeUnbox + action() + } + compiling = false + } + + /** Enter top-level definitions of classes and objects contained in source file `file`. + * The newly added symbols replace any previously entered symbols. + * If `typeCheck = true`, also run typer on the compilation unit, and set + * `rootTreeOrProvider`. + */ + def lateCompile(file: AbstractFile, typeCheck: Boolean)(using Context): Unit = + if (!files.contains(file) && !lateFiles.contains(file)) { + lateFiles += file + + val unit = CompilationUnit(ctx.getSource(file)) + val unitCtx = runContext.fresh + .setCompilationUnit(unit) + .withRootImports + + def process()(using Context) = + ctx.typer.lateEnterUnit(doTypeCheck => + if typeCheck then + if compiling then finalizeActions += doTypeCheck + else doTypeCheck() + ) + + process()(using unitCtx) + } + + private sealed trait PrintedTree + private /*final*/ case class SomePrintedTree(phase: String, tree: String) extends PrintedTree + private object NoPrintedTree extends PrintedTree + + private def printTree(last: PrintedTree)(using Context): PrintedTree = { + val unit = ctx.compilationUnit + val fusedPhase = ctx.phase.prevMega + val echoHeader = f"[[syntax trees at end of $fusedPhase%25s]] // ${unit.source}" + val tree = if ctx.isAfterTyper then unit.tpdTree else unit.untpdTree + val treeString = fusedPhase.show(tree) + + last match { + case SomePrintedTree(phase, lastTreeString) if lastTreeString == treeString => + report.echo(s"$echoHeader: unchanged since $phase") + last + + case SomePrintedTree(phase, lastTreeString) if ctx.settings.XprintDiff.value || ctx.settings.XprintDiffDel.value => + val diff = DiffUtil.mkColoredCodeDiff(treeString, lastTreeString, ctx.settings.XprintDiffDel.value) + report.echo(s"$echoHeader\n$diff\n") + SomePrintedTree(fusedPhase.phaseName, treeString) + + case _ => + report.echo(s"$echoHeader\n$treeString\n") + SomePrintedTree(fusedPhase.phaseName, treeString) + } + } + + def compileFromStrings(scalaSources: List[String], javaSources: List[String] = Nil): Unit = { + def sourceFile(source: String, isJava: Boolean): SourceFile = { + val uuid = java.util.UUID.randomUUID().toString + val ext = if (isJava) "java" else "scala" + val name = s"compileFromString-$uuid.$ext" + SourceFile.virtual(name, source) + } + val sources = + scalaSources.map(sourceFile(_, isJava = false)) ++ + javaSources.map(sourceFile(_, isJava = true)) + + compileSources(sources) + } + + /** Print summary of warnings and errors encountered */ + def printSummary(): Unit = { + printMaxConstraint() + val r = runContext.reporter + if !r.errorsReported then + profile.printSummary() + r.summarizeUnreportedWarnings() + r.printSummary() + } + + override def reset(): Unit = { + super[ImplicitRunInfo].reset() + super[ConstraintRunInfo].reset() + myCtx = null + myUnits = Nil + myUnitsCached = Nil + } + + /** Produces the following contexts, from outermost to innermost + * + * bootStrap: A context with next available runId and a scope consisting of + * the RootPackage _root_ + * start A context with RootClass as owner and the necessary initializations + * for type checking. + * imports For each element of RootImports, an import context + */ + protected def rootContext(using Context): Context = { + ctx.initialize() + ctx.base.setPhasePlan(comp.phases) + val rootScope = new MutableScope(0) + val bootstrap = ctx.fresh + .setPeriod(Period(comp.nextRunId, FirstPhaseId)) + .setScope(rootScope) + rootScope.enter(ctx.definitions.RootPackage)(using bootstrap) + var start = bootstrap.fresh + .setOwner(defn.RootClass) + .setTyper(new Typer) + .addMode(Mode.ImplicitsEnabled) + .setTyperState(ctx.typerState.fresh(ctx.reporter)) + if ctx.settings.YexplicitNulls.value && !Feature.enabledBySetting(nme.unsafeNulls) then + start = start.addMode(Mode.SafeNulls) + ctx.initialize()(using start) // re-initialize the base context with start + + // `this` must be unchecked for safe initialization because by being passed to setRun during + // initialization, it is not yet considered fully initialized by the initialization checker + start.setRun(this: @unchecked) + } + + private var myCtx: Context | Null = rootContext(using ictx) + + /** The context created for this run */ + given runContext[Dummy_so_its_a_def]: Context = myCtx.nn + assert(runContext.runId <= Periods.MaxPossibleRunId) +} diff --git a/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala b/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala new file mode 100644 index 000000000000..2e0d9a08f25d --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala @@ -0,0 +1,9 @@ +package dotty.tools.dotc + +import config.Properties._ +import config.CompilerCommand + +object ScalacCommand extends CompilerCommand: + override def cmdName: String = "scalac" + override def versionMsg: String = s"Scala compiler $versionString -- $copyrightString" + override def ifErrorsMsg: String = " scalac -help gives more information" diff --git a/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled b/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled new file mode 100644 index 000000000000..6bf7530faf24 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled @@ -0,0 +1,258 @@ +package dotty.tools +package dotc +package ast + +import core._ +import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ +import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._ + +// TODO: revise, integrate in a checking phase. +object CheckTrees { + + import tpd._ + + def check(p: Boolean, msg: => String = "")(using Context): Unit = assert(p, msg) + + def checkTypeArg(arg: Tree, bounds: TypeBounds)(using Context): Unit = { + check(arg.isValueType) + check(bounds contains arg.tpe) + } + + def escapingRefs(block: Block)(using Context): collection.Set[NamedType] = { + var hoisted: Set[Symbol] = Set() + lazy val locals = ctx.typeAssigner.localSyms(block.stats).toSet + def isLocal(sym: Symbol): Boolean = + (locals contains sym) && !isHoistableClass(sym) + def isHoistableClass(sym: Symbol) = + sym.isClass && { + (hoisted contains sym) || { + hoisted += sym + !classLeaks(sym.asClass) + } + } + def leakingTypes(tp: Type): collection.Set[NamedType] = + tp namedPartsWith (tp => isLocal(tp.symbol)) + def typeLeaks(tp: Type): Boolean = leakingTypes(tp).nonEmpty + def classLeaks(sym: ClassSymbol): Boolean = + (ctx.owner is Method) || // can't hoist classes out of method bodies + (sym.info.parents exists typeLeaks) || + (sym.decls.toList exists (t => typeLeaks(t.info))) + leakingTypes(block.tpe) + } + + def checkType(tree: Tree)(using Context): Unit = tree match { + case Ident(name) => + case Select(qualifier, name) => + check(qualifier.isValue) + check(qualifier.tpe =:= tree.tpe.normalizedPrefix) + val denot = qualifier.tpe.member(name) + check(denot.exists) + check(denot.hasAltWith(_.symbol == tree.symbol)) + case This(cls) => + case Super(qual, mixin) => + check(qual.isValue) + val cls = qual.tpe.typeSymbol + check(cls.isClass) + case Apply(fn, args) => + def checkArg(arg: Tree, name: Name, formal: Type): Unit = { + arg match { + case NamedArg(argName, _) => + check(argName == name) + case _ => + check(arg.isValue) + } + check(arg.tpe <:< formal) + } + val MethodType(paramNames, paramTypes) = fn.tpe.widen // checked already at construction + args.lazyZip(paramNames).lazyZip(paramTypes) foreach checkArg + case TypeApply(fn, args) => + val pt @ PolyType(_) = fn.tpe.widen // checked already at construction + args.lazyZip(pt.instantiateBounds(args map (_.tpe))) foreach checkTypeArg + case Literal(const: Constant) => + case New(tpt) => + check(tpt.isValueType) + val cls = tpt.tpe.typeSymbol + check(cls.isClass) + check(!(cls is AbstractOrTrait)) + case Pair(left, right) => + check(left.isValue) + check(right.isValue) + case Typed(expr, tpt) => + check(tpt.isValueType) + expr.tpe.widen match { + case tp: MethodType => + val cls = tpt.tpe.typeSymbol + check(cls.isClass) + check((cls is Trait) || + cls.primaryConstructor.info.paramTypess.flatten.isEmpty) + val absMembers = tpt.tpe.abstractTermMembers + check(absMembers.size == 1) + check(tp <:< absMembers.head.info) + case _ => + check(expr.isValueOrPattern) + check(expr.tpe <:< tpt.tpe.translateParameterized(defn.RepeatedParamClass, defn.SeqClass)) + } + case NamedArg(name, arg) => + case Assign(lhs, rhs) => + check(lhs.isValue); check(rhs.isValue) + lhs.tpe match { + case ltpe: TermRef => + check(ltpe.symbol is Mutable) + case _ => + check(false) + } + check(rhs.tpe <:< lhs.tpe.widen) + case tree @ Block(stats, expr) => + check(expr.isValue) + check(escapingRefs(tree).isEmpty) + case If(cond, thenp, elsep) => + check(cond.isValue); check(thenp.isValue); check(elsep.isValue) + check(cond.tpe isRef defn.BooleanClass) + case Closure(env, meth, target) => + meth.tpe.widen match { + case mt @ MethodType(_, paramTypes) => + if (target.isEmpty) { + check(env.length < paramTypes.length) + for ((arg, formal) <- env zip paramTypes) + check(arg.tpe <:< formal) + } + else + // env is stored in class, not method + target.tpe match { + case SAMType(targetMeth) => + check(mt <:< targetMeth.info) + } + } + case Match(selector, cases) => + check(selector.isValue) + // are any checks that relate selector and patterns desirable? + case CaseDef(pat, guard, body) => + check(pat.isValueOrPattern); check(guard.isValue); check(body.isValue) + check(guard.tpe.derivesFrom(defn.BooleanClass)) + case Return(expr, from) => + check(expr.isValue); check(from.isTerm) + check(from.tpe.termSymbol.isRealMethod) + case Try(block, handler, finalizer) => + check(block.isTerm) + check(finalizer.isTerm) + check(handler.isTerm) + check(handler.tpe derivesFrom defn.FunctionClass(1)) + check(handler.tpe.baseArgInfos(defn.FunctionClass(1)).head <:< defn.ThrowableType) + case Throw(expr) => + check(expr.isValue) + check(expr.tpe.derivesFrom(defn.ThrowableClass)) + case SeqLiteral(elems) => + val elemtp = tree.tpe.elemType + for (elem <- elems) { + check(elem.isValue) + check(elem.tpe <:< elemtp) + } + case TypeTree(original) => + if (!original.isEmpty) { + check(original.isValueType) + check(original.tpe == tree.tpe) + } + case SingletonTypeTree(ref) => + check(ref.isValue) + check(ref.symbol.isStable) + case SelectFromTypeTree(qualifier, name) => + check(qualifier.isValueType) + check(qualifier.tpe =:= tree.tpe.normalizedPrefix) + val denot = qualifier.tpe.member(name) + check(denot.exists) + check(denot.symbol == tree.symbol) + case AndTypeTree(left, right) => + check(left.isValueType); check(right.isValueType) + case OrTypeTree(left, right) => + check(left.isValueType); check(right.isValueType) + case RefinedTypeTree(tpt, refinements) => + check(tpt.isValueType) + def checkRefinements(forbidden: Set[Symbol], rs: List[Tree]): Unit = rs match { + case r :: rs1 => + val rsym = r.symbol + check(rsym.isTerm || rsym.isAbstractOrAliasType) + if (rsym.isAbstractType) check(tpt.tpe.member(rsym.name).exists) + check(rsym.info forallParts { + case nt: NamedType => !(forbidden contains nt.symbol) + case _ => true + }) + checkRefinements(forbidden - rsym, rs1) + case nil => + } + checkRefinements(ctx.typeAssigner.localSyms(refinements).toSet, refinements) + case AppliedTypeTree(tpt, args) => + check(tpt.isValueType) + val tparams = tpt.tpe.typeParams + check(sameLength(tparams, args)) + args.lazyZip(tparams map (_.info.bounds)) foreach checkTypeArg + case TypeBoundsTree(lo, hi) => + check(lo.isValueType); check(hi.isValueType) + check(lo.tpe <:< hi.tpe) + case Bind(sym, body) => + check(body.isValueOrPattern) + check(!(tree.symbol is Method)) + body match { + case Ident(nme.WILDCARD) => + case _ => check(body.tpe.widen =:= tree.symbol.info) + } + case Alternative(alts) => + for (alt <- alts) check(alt.isValueOrPattern) + case UnApply(fun, implicits, args) => // todo: review + check(fun.isTerm) + for (arg <- args) check(arg.isValueOrPattern) + val funtpe @ MethodType(_, _) = fun.tpe.widen + fun.symbol.name match { // check arg arity + case nme.unapplySeq => + // args need to be wrapped in (...: _*) + check(args.length == 1) + check(args.head.isInstanceOf[SeqLiteral]) + case nme.unapply => + val rtp = funtpe.resultType + if (rtp isRef defn.BooleanClass) + check(args.isEmpty) + else { + check(rtp isRef defn.OptionClass) + val normArgs = rtp.argTypesHi match { + case optionArg :: Nil => + optionArg.argTypesHi match { + case Nil => + optionArg :: Nil + case tupleArgs if defn.isTupleNType(optionArg) => + tupleArgs + } + case _ => + check(false) + Nil + } + check(sameLength(normArgs, args)) + } + } + case ValDef(mods, name, tpt, rhs) => + check(!(tree.symbol is Method)) + if (!rhs.isEmpty) { + check(rhs.isValue) + check(rhs.tpe <:< tpt.tpe) + } + case DefDef(mods, name, tparams, vparamss, tpt, rhs) => + check(tree.symbol is Method) + if (!rhs.isEmpty) { + check(rhs.isValue) + check(rhs.tpe <:< tpt.tpe) + } + case TypeDef(mods, name, tpt) => + check(tpt.isInstanceOf[Template] || tpt.tpe.isInstanceOf[TypeBounds]) + case Template(constr, parents, selfType, body) => + case Import(expr, selectors) => + check(expr.isValue) + check(expr.tpe.termSymbol.isStable) + case PackageDef(pid, stats) => + check(pid.isTerm) + check(pid.symbol is Package) + case Annotated(annot, arg) => + check(annot.isInstantiation) + check(annot.symbol.owner.isSubClass(defn.AnnotationClass)) + check(arg.isValueType || arg.isValue) + case EmptyTree => + } +} + diff --git a/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala b/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala new file mode 100644 index 000000000000..ba2c8f5f43e6 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala @@ -0,0 +1,1968 @@ +package dotty.tools +package dotc +package ast + +import core._ +import util.Spans._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._ +import Symbols._, StdNames._, Trees._, ContextOps._ +import Decorators._, transform.SymUtils._ +import NameKinds.{UniqueName, EvidenceParamName, DefaultGetterName, WildcardParamName} +import typer.{Namer, Checking} +import util.{Property, SourceFile, SourcePosition, Chars} +import config.Feature.{sourceVersion, migrateTo3, enabled} +import config.SourceVersion._ +import collection.mutable.ListBuffer +import reporting._ +import annotation.constructorOnly +import printing.Formatting.hl +import config.Printers + +import scala.annotation.internal.sharable + +object desugar { + import untpd._ + import DesugarEnums._ + + /** An attachment for companion modules of classes that have a `derives` clause. + * The position value indicates the start position of the template of the + * deriving class. + */ + val DerivingCompanion: Property.Key[SourcePosition] = Property.Key() + + /** An attachment for match expressions generated from a PatDef or GenFrom. + * Value of key == one of IrrefutablePatDef, IrrefutableGenFrom + */ + val CheckIrrefutable: Property.Key[MatchCheck] = Property.StickyKey() + + /** A multi-line infix operation with the infix operator starting a new line. + * Used for explaining potential errors. + */ + val MultiLineInfix: Property.Key[Unit] = Property.StickyKey() + + /** An attachment key to indicate that a ValDef originated from parameter untupling. + */ + val UntupledParam: Property.Key[Unit] = Property.StickyKey() + + /** What static check should be applied to a Match? */ + enum MatchCheck { + case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom + } + + /** Is `name` the name of a method that can be invalidated as a compiler-generated + * case class method if it clashes with a user-defined method? + */ + def isRetractableCaseClassMethodName(name: Name)(using Context): Boolean = name match { + case nme.apply | nme.unapply | nme.unapplySeq | nme.copy => true + case DefaultGetterName(nme.copy, _) => true + case _ => false + } + + /** Is `name` the name of a method that is added unconditionally to case classes? */ + def isDesugaredCaseClassMethodName(name: Name)(using Context): Boolean = + isRetractableCaseClassMethodName(name) || name.isSelectorName + +// ----- DerivedTypeTrees ----------------------------------- + + class SetterParamTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = tpd.TypeTree(sym.info.resultType) + } + + class TypeRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = tpd.TypeTree(sym.typeRef) + } + + class TermRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + def derivedTree(sym: Symbol)(using Context): tpd.Tree = tpd.ref(sym) + } + + /** A type tree that computes its type from an existing parameter. */ + class DerivedFromParamTree()(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + + /** Complete the appropriate constructors so that OriginalSymbol attachments are + * pushed to DerivedTypeTrees. + */ + override def ensureCompletions(using Context): Unit = { + def completeConstructor(sym: Symbol) = + sym.infoOrCompleter match { + case completer: Namer#ClassCompleter => + completer.completeConstructor(sym) + case _ => + } + + if (!ctx.owner.is(Package)) + if (ctx.owner.isClass) { + completeConstructor(ctx.owner) + if (ctx.owner.is(ModuleClass)) + completeConstructor(ctx.owner.linkedClass) + } + else ensureCompletions(using ctx.outer) + } + + /** Return info of original symbol, where all references to siblings of the + * original symbol (i.e. sibling and original symbol have the same owner) + * are rewired to same-named parameters or accessors in the scope enclosing + * the current scope. The current scope is the scope owned by the defined symbol + * itself, that's why we have to look one scope further out. If the resulting + * type is an alias type, dealias it. This is necessary because the + * accessor of a type parameter is a private type alias that cannot be accessed + * from subclasses. + */ + def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = { + val relocate = new TypeMap { + val originalOwner = sym.owner + def apply(tp: Type) = tp match { + case tp: NamedType if tp.symbol.exists && (tp.symbol.owner eq originalOwner) => + val defctx = mapCtx.outersIterator.dropWhile(_.scope eq mapCtx.scope).next() + var local = defctx.denotNamed(tp.name).suchThat(_.isParamOrAccessor).symbol + if (local.exists) (defctx.owner.thisType select local).dealiasKeepAnnots + else { + def msg = + s"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope.toList}" + ErrorType(msg).assertingErrorsReported(msg) + } + case _ => + mapOver(tp) + } + } + tpd.TypeTree(relocate(sym.info)) + } + } + + /** A type definition copied from `tdef` with a rhs typetree derived from it */ + def derivedTypeParam(tdef: TypeDef)(using Context): TypeDef = + cpy.TypeDef(tdef)( + rhs = DerivedFromParamTree().withSpan(tdef.rhs.span).watching(tdef) + ) + + /** A derived type definition watching `sym` */ + def derivedTypeParamWithVariance(sym: TypeSymbol)(using Context): TypeDef = + val variance = VarianceFlags & sym.flags + TypeDef(sym.name, DerivedFromParamTree().watching(sym)).withFlags(TypeParam | Synthetic | variance) + + /** A value definition copied from `vdef` with a tpt typetree derived from it */ + def derivedTermParam(vdef: ValDef)(using Context): ValDef = + cpy.ValDef(vdef)( + tpt = DerivedFromParamTree().withSpan(vdef.tpt.span).watching(vdef)) + +// ----- Desugar methods ------------------------------------------------- + + /** Setter generation is needed for: + * - non-private class members + * - all trait members + * - all package object members + */ + def isSetterNeeded(valDef: ValDef)(using Context): Boolean = { + val mods = valDef.mods + mods.is(Mutable) + && ctx.owner.isClass + && (!mods.is(Private) || ctx.owner.is(Trait) || ctx.owner.isPackageObject) + } + + /** var x: Int = expr + * ==> + * def x: Int = expr + * def x_=($1: ): Unit = () + * + * Generate setter where needed + */ + def valDef(vdef0: ValDef)(using Context): Tree = { + val vdef @ ValDef(_, tpt, rhs) = vdef0 + val mods = vdef.mods + + val valName = normalizeName(vdef, tpt).asTermName + val vdef1 = cpy.ValDef(vdef)(name = valName) + + if (isSetterNeeded(vdef)) { + // TODO: copy of vdef as getter needed? + // val getter = ValDef(mods, name, tpt, rhs) withPos vdef.pos? + // right now vdef maps via expandedTree to a thicket which concerns itself. + // I don't see a problem with that but if there is one we can avoid it by making a copy here. + val setterParam = makeSyntheticParameter(tpt = SetterParamTree().watching(vdef)) + // The rhs gets filled in later, when field is generated and getter has parameters (see Memoize miniphase) + val setterRhs = if (vdef.rhs.isEmpty) EmptyTree else unitLiteral + val setter = cpy.DefDef(vdef)( + name = valName.setterName, + paramss = (setterParam :: Nil) :: Nil, + tpt = TypeTree(defn.UnitType), + rhs = setterRhs + ).withMods((mods | Accessor) &~ (CaseAccessor | GivenOrImplicit | Lazy)) + .dropEndMarker() // the end marker should only appear on the getter definition + Thicket(vdef1, setter) + } + else vdef1 + } + + def makeImplicitParameters(tpts: List[Tree], implicitFlag: FlagSet, forPrimaryConstructor: Boolean = false)(using Context): List[ValDef] = + for (tpt <- tpts) yield { + val paramFlags: FlagSet = if (forPrimaryConstructor) LocalParamAccessor else Param + val epname = EvidenceParamName.fresh() + ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | implicitFlag) + } + + def mapParamss(paramss: List[ParamClause]) + (mapTypeParam: TypeDef => TypeDef) + (mapTermParam: ValDef => ValDef)(using Context): List[ParamClause] = + paramss.mapConserve { + case TypeDefs(tparams) => tparams.mapConserve(mapTypeParam) + case ValDefs(vparams) => vparams.mapConserve(mapTermParam) + case _ => unreachable() + } + + /** 1. Expand context bounds to evidence params. E.g., + * + * def f[T >: L <: H : B](params) + * ==> + * def f[T >: L <: H](params)(implicit evidence$0: B[T]) + * + * 2. Expand default arguments to default getters. E.g, + * + * def f[T: B](x: Int = 1)(y: String = x + "m") = ... + * ==> + * def f[T](x: Int)(y: String)(implicit evidence$0: B[T]) = ... + * def f$default$1[T] = 1 + * def f$default$2[T](x: Int) = x + "m" + */ + private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = + addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) + + private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = + val DefDef(_, paramss, tpt, rhs) = meth + val evidenceParamBuf = ListBuffer[ValDef]() + + def desugarContextBounds(rhs: Tree): Tree = rhs match + case ContextBounds(tbounds, cxbounds) => + val iflag = if sourceVersion.isAtLeast(`future`) then Given else Implicit + evidenceParamBuf ++= makeImplicitParameters( + cxbounds, iflag, forPrimaryConstructor = isPrimaryConstructor) + tbounds + case LambdaTypeTree(tparams, body) => + cpy.LambdaTypeTree(rhs)(tparams, desugarContextBounds(body)) + case _ => + rhs + + val paramssNoContextBounds = + mapParamss(paramss) { + tparam => cpy.TypeDef(tparam)(rhs = desugarContextBounds(tparam.rhs)) + }(identity) + + rhs match + case MacroTree(call) => + cpy.DefDef(meth)(rhs = call).withMods(meth.mods | Macro | Erased) + case _ => + addEvidenceParams( + cpy.DefDef(meth)( + name = normalizeName(meth, tpt).asTermName, + paramss = paramssNoContextBounds), + evidenceParamBuf.toList) + end elimContextBounds + + def addDefaultGetters(meth: DefDef)(using Context): Tree = + + /** The longest prefix of parameter lists in paramss whose total number of + * ValDefs does not exceed `n` + */ + def takeUpTo(paramss: List[ParamClause], n: Int): List[ParamClause] = paramss match + case ValDefs(vparams) :: paramss1 => + val len = vparams.length + if len <= n then vparams :: takeUpTo(paramss1, n - len) else Nil + case TypeDefs(tparams) :: paramss1 => + tparams :: takeUpTo(paramss1, n) + case _ => + Nil + + def dropContextBounds(tparam: TypeDef): TypeDef = + def dropInRhs(rhs: Tree): Tree = rhs match + case ContextBounds(tbounds, _) => + tbounds + case rhs @ LambdaTypeTree(tparams, body) => + cpy.LambdaTypeTree(rhs)(tparams, dropInRhs(body)) + case _ => + rhs + cpy.TypeDef(tparam)(rhs = dropInRhs(tparam.rhs)) + + def paramssNoRHS = mapParamss(meth.paramss)(identity) { + vparam => + if vparam.rhs.isEmpty then vparam + else cpy.ValDef(vparam)(rhs = EmptyTree).withMods(vparam.mods | HasDefault) + } + + def getterParamss(n: Int): List[ParamClause] = + mapParamss(takeUpTo(paramssNoRHS, n)) { + tparam => dropContextBounds(toDefParam(tparam, keepAnnotations = true)) + } { + vparam => toDefParam(vparam, keepAnnotations = true, keepDefault = false) + } + + def defaultGetters(paramss: List[ParamClause], n: Int): List[DefDef] = paramss match + case ValDefs(vparam :: vparams) :: paramss1 => + def defaultGetter: DefDef = + DefDef( + name = DefaultGetterName(meth.name, n), + paramss = getterParamss(n), + tpt = TypeTree(), + rhs = vparam.rhs + ) + .withMods(Modifiers( + meth.mods.flags & (AccessFlags | Synthetic) | (vparam.mods.flags & Inline), + meth.mods.privateWithin)) + val rest = defaultGetters(vparams :: paramss1, n + 1) + if vparam.rhs.isEmpty then rest else defaultGetter :: rest + case _ :: paramss1 => // skip empty parameter lists and type parameters + defaultGetters(paramss1, n) + case Nil => + Nil + + val defGetters = defaultGetters(meth.paramss, 0) + if defGetters.isEmpty then meth + else Thicket(cpy.DefDef(meth)(paramss = paramssNoRHS) :: defGetters) + end addDefaultGetters + + /** Add an explicit ascription to the `expectedTpt` to every tail splice. + * + * - `'{ x }` -> `'{ x }` + * - `'{ $x }` -> `'{ $x: T }` + * - `'{ if (...) $x else $y }` -> `'{ if (...) ($x: T) else ($y: T) }` + * + * Note that the splice `$t: T` will be typed as `${t: Expr[T]}` + */ + def quotedPattern(tree: untpd.Tree, expectedTpt: untpd.Tree)(using Context): untpd.Tree = { + def adaptToExpectedTpt(tree: untpd.Tree): untpd.Tree = tree match { + // Add the expected type as an ascription + case _: untpd.Splice => + untpd.Typed(tree, expectedTpt).withSpan(tree.span) + case Typed(expr: untpd.Splice, tpt) => + cpy.Typed(tree)(expr, untpd.makeAndType(tpt, expectedTpt).withSpan(tpt.span)) + + // Propagate down the expected type to the leafs of the expression + case Block(stats, expr) => + cpy.Block(tree)(stats, adaptToExpectedTpt(expr)) + case If(cond, thenp, elsep) => + cpy.If(tree)(cond, adaptToExpectedTpt(thenp), adaptToExpectedTpt(elsep)) + case untpd.Parens(expr) => + cpy.Parens(tree)(adaptToExpectedTpt(expr)) + case Match(selector, cases) => + val newCases = cases.map(cdef => cpy.CaseDef(cdef)(body = adaptToExpectedTpt(cdef.body))) + cpy.Match(tree)(selector, newCases) + case untpd.ParsedTry(expr, handler, finalizer) => + cpy.ParsedTry(tree)(adaptToExpectedTpt(expr), adaptToExpectedTpt(handler), finalizer) + + // Tree does not need to be ascribed + case _ => + tree + } + adaptToExpectedTpt(tree) + } + + /** Add all evidence parameters in `params` as implicit parameters to `meth`. + * If the parameters of `meth` end in an implicit parameter list or using clause, + * evidence parameters are added in front of that list. Otherwise they are added + * as a separate parameter clause. + */ + private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = + params match + case Nil => + meth + case evidenceParams => + val paramss1 = meth.paramss.reverse match + case ValDefs(vparams @ (vparam :: _)) :: rparamss if vparam.mods.isOneOf(GivenOrImplicit) => + ((evidenceParams ++ vparams) :: rparamss).reverse + case _ => + meth.paramss :+ evidenceParams + cpy.DefDef(meth)(paramss = paramss1) + + /** The implicit evidence parameters of `meth`, as generated by `desugar.defDef` */ + private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = + meth.paramss.reverse match { + case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => + vparams.takeWhile(_.name.is(EvidenceParamName)) + case _ => + Nil + } + + @sharable private val synthetic = Modifiers(Synthetic) + + private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { + var mods = tparam.rawMods + if (!keepAnnotations) mods = mods.withAnnotations(Nil) + tparam.withMods(mods & EmptyFlags | Param) + } + private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { + var mods = vparam.rawMods + if (!keepAnnotations) mods = mods.withAnnotations(Nil) + val hasDefault = if keepDefault then HasDefault else EmptyFlags + vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault) | Param) + } + + def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = + paramss.foldLeft(fn) { (fn, params) => params match + case TypeDefs(params) => + TypeApply(fn, params.map(refOfDef)) + case (vparam: ValDef) :: _ if vparam.mods.is(Given) => + Apply(fn, params.map(refOfDef)).setApplyKind(ApplyKind.Using) + case _ => + Apply(fn, params.map(refOfDef)) + } + + /** The expansion of a class definition. See inline comments for what is involved */ + def classDef(cdef: TypeDef)(using Context): Tree = { + val impl @ Template(constr0, _, self, _) = cdef.rhs: @unchecked + val className = normalizeName(cdef, impl).asTypeName + val parents = impl.parents + val mods = cdef.mods + val companionMods = mods + .withFlags((mods.flags & (AccessFlags | Final)).toCommonFlags) + .withMods(Nil) + .withAnnotations(Nil) + + var defaultGetters: List[Tree] = Nil + + def decompose(ddef: Tree): DefDef = ddef match { + case meth: DefDef => meth + case Thicket((meth: DefDef) :: defaults) => + defaultGetters = defaults + meth + } + + val constr1 = decompose(defDef(impl.constr, isPrimaryConstructor = true)) + + // The original type and value parameters in the constructor already have the flags + // needed to be type members (i.e. param, and possibly also private and local unless + // prefixed by type or val). `tparams` and `vparamss` are the type parameters that + // go in `constr`, the constructor after desugaring. + + /** Does `tree' look like a reference to AnyVal? Temporary test before we have inline classes */ + def isAnyVal(tree: Tree): Boolean = tree match { + case Ident(tpnme.AnyVal) => true + case Select(qual, tpnme.AnyVal) => isScala(qual) + case _ => false + } + def isScala(tree: Tree): Boolean = tree match { + case Ident(nme.scala) => true + case Select(Ident(nme.ROOTPKG), nme.scala) => true + case _ => false + } + + def namePos = cdef.sourcePos.withSpan(cdef.nameSpan) + + val isObject = mods.is(Module) + val isCaseClass = mods.is(Case) && !isObject + val isCaseObject = mods.is(Case) && isObject + val isEnum = mods.isEnumClass && !mods.is(Module) + def isEnumCase = mods.isEnumCase + def isNonEnumCase = !isEnumCase && (isCaseClass || isCaseObject) + val isValueClass = parents.nonEmpty && isAnyVal(parents.head) + // This is not watertight, but `extends AnyVal` will be replaced by `inline` later. + + val originalTparams = constr1.leadingTypeParams + val originalVparamss = asTermOnly(constr1.trailingParamss) + lazy val derivedEnumParams = enumClass.typeParams.map(derivedTypeParamWithVariance) + val impliedTparams = + if (isEnumCase) { + val tparamReferenced = typeParamIsReferenced( + enumClass.typeParams, originalTparams, originalVparamss, parents) + if (originalTparams.isEmpty && (parents.isEmpty || tparamReferenced)) + derivedEnumParams.map(tdef => tdef.withFlags(tdef.mods.flags | PrivateLocal)) + else originalTparams + } + else originalTparams + + if mods.is(Trait) then + for vparams <- originalVparamss; vparam <- vparams do + if isByNameType(vparam.tpt) then + report.error(em"implementation restriction: traits cannot have by name parameters", vparam.srcPos) + + // Annotations on class _type_ parameters are set on the derived parameters + // but not on the constructor parameters. The reverse is true for + // annotations on class _value_ parameters. + val constrTparams = impliedTparams.map(toDefParam(_, keepAnnotations = false)) + val constrVparamss = + if (originalVparamss.isEmpty) { // ensure parameter list is non-empty + if (isCaseClass) + report.error(CaseClassMissingParamList(cdef), namePos) + ListOfNil + } + else if (isCaseClass && originalVparamss.head.exists(_.mods.isOneOf(GivenOrImplicit))) { + report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) + ListOfNil + } + else originalVparamss.nestedMap(toDefParam(_, keepAnnotations = true, keepDefault = true)) + val derivedTparams = + constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => + derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) + val derivedVparamss = + constrVparamss.nestedMap(vparam => + derivedTermParam(vparam).withAnnotations(Nil)) + + val constr = cpy.DefDef(constr1)(paramss = joinParams(constrTparams, constrVparamss)) + + val (normalizedBody, enumCases, enumCompanionRef) = { + // Add constructor type parameters and evidence implicit parameters + // to auxiliary constructors; set defaultGetters as a side effect. + def expandConstructor(tree: Tree) = tree match { + case ddef: DefDef if ddef.name.isConstructorName => + decompose( + defDef( + addEvidenceParams( + cpy.DefDef(ddef)(paramss = joinParams(constrTparams, ddef.paramss)), + evidenceParams(constr1).map(toDefParam(_, keepAnnotations = false, keepDefault = false))))) + case stat => + stat + } + // The Identifiers defined by a case + def caseIds(tree: Tree): List[Ident] = tree match { + case tree: MemberDef => Ident(tree.name.toTermName) :: Nil + case PatDef(_, ids: List[Ident] @ unchecked, _, _) => ids + } + + val stats0 = impl.body.map(expandConstructor) + val stats = + if (ctx.owner eq defn.ScalaPackageClass) && defn.hasProblematicGetClass(className) then + stats0.filterConserve { + case ddef: DefDef => + ddef.name ne nme.getClass_ + case _ => + true + } + else + stats0 + + if (isEnum) { + val (enumCases, enumStats) = stats.partition(DesugarEnums.isEnumCase) + if (enumCases.isEmpty) + report.error(EnumerationsShouldNotBeEmpty(cdef), namePos) + else + enumCases.last.pushAttachment(DesugarEnums.DefinesEnumLookupMethods, ()) + val enumCompanionRef = TermRefTree() + val enumImport = + Import(enumCompanionRef, enumCases.flatMap(caseIds).map( + enumCase => + ImportSelector(enumCase.withSpan(enumCase.span.startPos)) + ) + ) + (enumImport :: enumStats, enumCases, enumCompanionRef) + } + else (stats, Nil, EmptyTree) + } + + def anyRef = ref(defn.AnyRefAlias.typeRef) + + val arity = constrVparamss.head.length + + val classTycon: Tree = TypeRefTree() // watching is set at end of method + + def appliedTypeTree(tycon: Tree, args: List[Tree]) = + (if (args.isEmpty) tycon else AppliedTypeTree(tycon, args)) + .withSpan(cdef.span.startPos) + + def isHK(tparam: Tree): Boolean = tparam match { + case TypeDef(_, LambdaTypeTree(tparams, body)) => true + case TypeDef(_, rhs: DerivedTypeTree) => isHK(rhs.watched) + case _ => false + } + + def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { + val targs = for (tparam <- tparams) yield { + val targ = refOfDef(tparam) + def fullyApplied(tparam: Tree): Tree = tparam match { + case TypeDef(_, LambdaTypeTree(tparams, body)) => + AppliedTypeTree(targ, tparams.map(_ => WildcardTypeBoundsTree())) + case TypeDef(_, rhs: DerivedTypeTree) => + fullyApplied(rhs.watched) + case _ => + targ + } + if (widenHK) fullyApplied(tparam) else targ + } + appliedTypeTree(tycon, targs) + } + + def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { + case PostfixOp(_, Ident(tpnme.raw.STAR)) => true + case _ => false + } + + // a reference to the class type bound by `cdef`, with type parameters coming from the constructor + val classTypeRef = appliedRef(classTycon) + + // a reference to `enumClass`, with type parameters coming from the case constructor + lazy val enumClassTypeRef = + if (enumClass.typeParams.isEmpty) + enumClassRef + else if (originalTparams.isEmpty) + appliedRef(enumClassRef) + else { + report.error(TypedCaseDoesNotExplicitlyExtendTypedEnum(enumClass, cdef) + , cdef.srcPos.startPos) + appliedTypeTree(enumClassRef, constrTparams map (_ => anyRef)) + } + + // new C[Ts](paramss) + lazy val creatorExpr = + val vparamss = constrVparamss match + case (vparam :: _) :: _ if vparam.mods.is(Implicit) => // add a leading () to match class parameters + Nil :: constrVparamss + case _ => + if constrVparamss.nonEmpty && constrVparamss.forall { + case vparam :: _ => vparam.mods.is(Given) + case _ => false + } + then constrVparamss :+ Nil // add a trailing () to match class parameters + else constrVparamss + val nu = vparamss.foldLeft(makeNew(classTypeRef)) { (nu, vparams) => + val app = Apply(nu, vparams.map(refOfDef)) + vparams match { + case vparam :: _ if vparam.mods.is(Given) => app.setApplyKind(ApplyKind.Using) + case _ => app + } + } + ensureApplied(nu) + + val copiedAccessFlags = if migrateTo3 then EmptyFlags else AccessFlags + + // Methods to add to a case class C[..](p1: T1, ..., pN: Tn)(moreParams) + // def _1: T1 = this.p1 + // ... + // def _N: TN = this.pN (unless already given as valdef or parameterless defdef) + // def copy(p1: T1 = p1..., pN: TN = pN)(moreParams) = + // new C[...](p1, ..., pN)(moreParams) + val (caseClassMeths, enumScaffolding) = { + def syntheticProperty(name: TermName, tpt: Tree, rhs: Tree) = + DefDef(name, Nil, tpt, rhs).withMods(synthetic) + + def productElemMeths = + val caseParams = derivedVparamss.head.toArray + val selectorNamesInBody = normalizedBody.collect { + case vdef: ValDef if vdef.name.isSelectorName => + vdef.name + case ddef: DefDef if ddef.name.isSelectorName && ddef.paramss.isEmpty => + ddef.name + } + for i <- List.range(0, arity) + selName = nme.selectorName(i) + if (selName ne caseParams(i).name) && !selectorNamesInBody.contains(selName) + yield syntheticProperty(selName, caseParams(i).tpt, + Select(This(EmptyTypeIdent), caseParams(i).name)) + + def enumCaseMeths = + if isEnumCase then + val (ordinal, scaffolding) = nextOrdinal(className, CaseKind.Class, definesEnumLookupMethods(cdef)) + (ordinalMethLit(ordinal) :: Nil, scaffolding) + else (Nil, Nil) + def copyMeths = { + val hasRepeatedParam = constrVparamss.nestedExists { + case ValDef(_, tpt, _) => isRepeated(tpt) + } + if (mods.is(Abstract) || hasRepeatedParam) Nil // cannot have default arguments for repeated parameters, hence copy method is not issued + else { + val copyFirstParams = derivedVparamss.head.map(vparam => + cpy.ValDef(vparam)(rhs = refOfDef(vparam))) + val copyRestParamss = derivedVparamss.tail.nestedMap(vparam => + cpy.ValDef(vparam)(rhs = EmptyTree)) + DefDef( + nme.copy, + joinParams(derivedTparams, copyFirstParams :: copyRestParamss), + TypeTree(), + creatorExpr + ).withMods(Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags, constr1.mods.privateWithin)) :: Nil + } + } + + if isCaseClass then + val (enumMeths, enumScaffolding) = enumCaseMeths + (copyMeths ::: enumMeths ::: productElemMeths, enumScaffolding) + else (Nil, Nil) + } + + var parents1: List[untpd.Tree] = parents // !cc! need explicit type to make capture checking pass + if (isEnumCase && parents.isEmpty) + parents1 = enumClassTypeRef :: Nil + if (isNonEnumCase) + parents1 = parents1 :+ scalaDot(str.Product.toTypeName) :+ scalaDot(nme.Serializable.toTypeName) + if (isEnum) + parents1 = parents1 :+ ref(defn.EnumClass) + + // derived type classes of non-module classes go to their companions + val (clsDerived, companionDerived) = + if (mods.is(Module)) (impl.derived, Nil) else (Nil, impl.derived) + + // The thicket which is the desugared version of the companion object + // synthetic object C extends parentTpt derives class-derived { defs } + def companionDefs(parentTpt: Tree, defs: List[Tree]) = { + val mdefs = moduleDef( + ModuleDef( + className.toTermName, Template(emptyConstructor, parentTpt :: Nil, companionDerived, EmptyValDef, defs)) + .withMods(companionMods | Synthetic)) + .withSpan(cdef.span).toList + if (companionDerived.nonEmpty) + for (case modClsDef @ TypeDef(_, _) <- mdefs) + modClsDef.putAttachment(DerivingCompanion, impl.srcPos.startPos) + mdefs + } + + val companionMembers = defaultGetters ::: enumCases + + // The companion object definitions, if a companion is needed, Nil otherwise. + // companion definitions include: + // 1. If class is a case class case class C[Ts](p1: T1, ..., pN: TN)(moreParams): + // def apply[Ts](p1: T1, ..., pN: TN)(moreParams) = new C[Ts](p1, ..., pN)(moreParams) (unless C is abstract) + // def unapply[Ts]($1: C[Ts]) = $1 // if not repeated + // def unapplySeq[Ts]($1: C[Ts]) = $1 // if repeated + // 2. The default getters of the constructor + // The parent of the companion object of a non-parameterized case class + // (T11, ..., T1N) => ... => (TM1, ..., TMN) => C + // For all other classes, the parent is AnyRef. + val companions = + if (isCaseClass) { + val applyMeths = + if (mods.is(Abstract)) Nil + else { + val appMods = + Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags).withPrivateWithin(constr1.mods.privateWithin) + val appParamss = + derivedVparamss.nestedZipWithConserve(constrVparamss)((ap, cp) => + ap.withMods(ap.mods | (cp.mods.flags & HasDefault))) + DefDef(nme.apply, joinParams(derivedTparams, appParamss), TypeTree(), creatorExpr) + .withMods(appMods) :: Nil + } + val unapplyMeth = { + val hasRepeatedParam = constrVparamss.head.exists { + case ValDef(_, tpt, _) => isRepeated(tpt) + } + val methName = if (hasRepeatedParam) nme.unapplySeq else nme.unapply + val unapplyParam = makeSyntheticParameter(tpt = classTypeRef) + val unapplyRHS = if (arity == 0) Literal(Constant(true)) else Ident(unapplyParam.name) + val unapplyResTp = if (arity == 0) Literal(Constant(true)) else TypeTree() + DefDef( + methName, + joinParams(derivedTparams, (unapplyParam :: Nil) :: Nil), + unapplyResTp, + unapplyRHS + ).withMods(synthetic) + } + val toStringMeth = + DefDef(nme.toString_, Nil, TypeTree(), Literal(Constant(className.toString))).withMods(Modifiers(Override | Synthetic)) + + companionDefs(anyRef, applyMeths ::: unapplyMeth :: toStringMeth :: companionMembers) + } + else if (companionMembers.nonEmpty || companionDerived.nonEmpty || isEnum) + companionDefs(anyRef, companionMembers) + else if (isValueClass) + companionDefs(anyRef, Nil) + else Nil + + enumCompanionRef match { + case ref: TermRefTree => // have the enum import watch the companion object + val (modVal: ValDef) :: _ = companions: @unchecked + ref.watching(modVal) + case _ => + } + + // For an implicit class C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, .., pMN: TMN), the method + // synthetic implicit C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, ..., pMN: TMN): C[Ts] = + // new C[Ts](p11, ..., p1N) ... (pM1, ..., pMN) = + val implicitWrappers = + if (!mods.isOneOf(GivenOrImplicit)) + Nil + else if (ctx.owner.is(Package)) { + report.error(TopLevelImplicitClass(cdef), cdef.srcPos) + Nil + } + else if (mods.is(Trait)) { + report.error(TypesAndTraitsCantBeImplicit(), cdef.srcPos) + Nil + } + else if (isCaseClass) { + report.error(ImplicitCaseClass(cdef), cdef.srcPos) + Nil + } + else if (arity != 1 && !mods.is(Given)) { + report.error(ImplicitClassPrimaryConstructorArity(), cdef.srcPos) + Nil + } + else { + val defParamss = constrVparamss match { + case Nil :: paramss => + paramss // drop leading () that got inserted by class + // TODO: drop this once we do not silently insert empty class parameters anymore + case paramss => paramss + } + // implicit wrapper is typechecked in same scope as constructor, so + // we can reuse the constructor parameters; no derived params are needed. + DefDef( + className.toTermName, joinParams(constrTparams, defParamss), + classTypeRef, creatorExpr) + .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | Final) + .withSpan(cdef.span) :: Nil + } + + val self1 = { + val selfType = if (self.tpt.isEmpty) classTypeRef else self.tpt + if (self.isEmpty) self + else cpy.ValDef(self)(tpt = selfType).withMods(self.mods | SelfName) + } + + val cdef1 = addEnumFlags { + val tparamAccessors = { + val impliedTparamsIt = impliedTparams.iterator + derivedTparams.map(_.withMods(impliedTparamsIt.next().mods)) + } + val caseAccessor = if (isCaseClass) CaseAccessor else EmptyFlags + val vparamAccessors = { + val originalVparamsIt = originalVparamss.iterator.flatten + derivedVparamss match { + case first :: rest => + first.map(_.withMods(originalVparamsIt.next().mods | caseAccessor)) ++ + rest.flatten.map(_.withMods(originalVparamsIt.next().mods)) + case _ => + Nil + } + } + if mods.isAllOf(Given | Inline | Transparent) then + report.error("inline given instances cannot be trasparent", cdef) + val classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + cpy.TypeDef(cdef: TypeDef)( + name = className, + rhs = cpy.Template(impl)(constr, parents1, clsDerived, self1, + tparamAccessors ::: vparamAccessors ::: normalizedBody ::: caseClassMeths) + ).withMods(classMods) + } + + // install the watch on classTycon + classTycon match { + case tycon: DerivedTypeTree => tycon.watching(cdef1) + case _ => + } + + flatTree(cdef1 :: companions ::: implicitWrappers ::: enumScaffolding) + }.showing(i"desugared: $cdef --> $result", Printers.desugar) + + /** Expand + * + * package object name { body } + * + * to: + * + * package name { + * object `package` { body } + * } + */ + def packageModuleDef(mdef: ModuleDef)(using Context): Tree = + val impl = mdef.impl + val mods = mdef.mods + val moduleName = normalizeName(mdef, impl).asTermName + if mods.is(Package) then + checkPackageName(mdef) + PackageDef(Ident(moduleName), + cpy.ModuleDef(mdef)(nme.PACKAGE, impl).withMods(mods &~ Package) :: Nil) + else + mdef + + /** Expand + * + * object name extends parents { self => body } + * + * to: + * + * val name: name$ = New(name$) + * final class name$ extends parents { self: name.type => body } + */ + def moduleDef(mdef: ModuleDef)(using Context): Tree = { + val impl = mdef.impl + val mods = mdef.mods + val moduleName = normalizeName(mdef, impl).asTermName + def isEnumCase = mods.isEnumCase + Checking.checkWellFormedModule(mdef) + + if (mods.is(Package)) + packageModuleDef(mdef) + else if (isEnumCase) { + typeParamIsReferenced(enumClass.typeParams, Nil, Nil, impl.parents) + // used to check there are no illegal references to enum's type parameters in parents + expandEnumModule(moduleName, impl, mods, definesEnumLookupMethods(mdef), mdef.span) + } + else { + val clsName = moduleName.moduleClassName + val clsRef = Ident(clsName) + val modul = ValDef(moduleName, clsRef, New(clsRef, Nil)) + .withMods(mods.toTermFlags & RetainedModuleValFlags | ModuleValCreationFlags) + .withSpan(mdef.span.startPos) + val ValDef(selfName, selfTpt, _) = impl.self + val selfMods = impl.self.mods + if (!selfTpt.isEmpty) report.error(ObjectMayNotHaveSelfType(mdef), impl.self.srcPos) + val clsSelf = ValDef(selfName, SingletonTypeTree(Ident(moduleName)), impl.self.rhs) + .withMods(selfMods) + .withSpan(impl.self.span.orElse(impl.span.startPos)) + val clsTmpl = cpy.Template(impl)(self = clsSelf, body = impl.body) + val cls = TypeDef(clsName, clsTmpl) + .withMods(mods.toTypeFlags & RetainedModuleClassFlags | ModuleClassCreationFlags) + .withEndMarker(copyFrom = mdef) // copy over the end marker position to the module class def + Thicket(modul, classDef(cls).withSpan(mdef.span)) + } + } + + def extMethod(mdef: DefDef, extParamss: List[ParamClause])(using Context): DefDef = + cpy.DefDef(mdef)( + name = normalizeName(mdef, mdef.tpt).asTermName, + paramss = + if mdef.name.isRightAssocOperatorName then + val (typaramss, paramss) = mdef.paramss.span(isTypeParamClause) // first extract type parameters + + paramss match + case params :: paramss1 => // `params` must have a single parameter and without `given` flag + + def badRightAssoc(problem: String) = + report.error(i"right-associative extension method $problem", mdef.srcPos) + extParamss ++ mdef.paramss + + params match + case ValDefs(vparam :: Nil) => + if !vparam.mods.is(Given) then + // we merge the extension parameters with the method parameters, + // swapping the operator arguments: + // e.g. + // extension [A](using B)(c: C)(using D) + // def %:[E](f: F)(g: G)(using H): Res = ??? + // will be encoded as + // def %:[A](using B)[E](f: F)(c: C)(using D)(g: G)(using H): Res = ??? + val (leadingUsing, otherExtParamss) = extParamss.span(isUsingOrTypeParamClause) + leadingUsing ::: typaramss ::: params :: otherExtParamss ::: paramss1 + else + badRightAssoc("cannot start with using clause") + case _ => + badRightAssoc("must start with a single parameter") + case _ => + // no value parameters, so not an infix operator. + extParamss ++ mdef.paramss + else + extParamss ++ mdef.paramss + ).withMods(mdef.mods | ExtensionMethod) + + /** Transform extension construct to list of extension methods */ + def extMethods(ext: ExtMethods)(using Context): Tree = flatTree { + ext.methods map { + case exp: Export => exp + case mdef: DefDef => defDef(extMethod(mdef, ext.paramss)) + } + } + /** Transforms + * + * type t >: Low <: Hi + * to + * + * @patternType type $T >: Low <: Hi + * + * if the type has a pattern variable name + */ + def quotedPatternTypeDef(tree: TypeDef)(using Context): TypeDef = { + assert(ctx.mode.is(Mode.QuotedPattern)) + if tree.name.isVarPattern && !tree.isBackquoted then + val patternTypeAnnot = New(ref(defn.QuotedRuntimePatterns_patternTypeAnnot.typeRef)).withSpan(tree.span) + val mods = tree.mods.withAddedAnnotation(patternTypeAnnot) + tree.withMods(mods) + else if tree.name.startsWith("$") && !tree.isBackquoted then + report.error( + """Quoted pattern variable names starting with $ are not supported anymore. + |Use lower cases type pattern name instead. + |""".stripMargin, + tree.srcPos) + tree + else tree + } + + def checkPackageName(mdef: ModuleDef | PackageDef)(using Context): Unit = + + def check(name: Name, errSpan: Span): Unit = name match + case name: SimpleName if !errSpan.isSynthetic && name.exists(Chars.willBeEncoded) => + report.warning(em"The package name `$name` will be encoded on the classpath, and can lead to undefined behaviour.", mdef.source.atSpan(errSpan)) + case _ => + + def loop(part: RefTree): Unit = part match + case part @ Ident(name) => check(name, part.span) + case part @ Select(qual: RefTree, name) => + check(name, part.nameSpan) + loop(qual) + case _ => + + mdef match + case pdef: PackageDef => loop(pdef.pid) + case mdef: ModuleDef if mdef.mods.is(Package) => check(mdef.name, mdef.nameSpan) + case _ => + end checkPackageName + + /** The normalized name of `mdef`. This means + * 1. Check that the name does not redefine a Scala core class. + * If it does redefine, issue an error and return a mangled name instead + * of the original one. + * 2. If the name is missing (this can be the case for instance definitions), + * invent one instead. + */ + def normalizeName(mdef: MemberDef, impl: Tree)(using Context): Name = { + var name = mdef.name + if (name.isEmpty) name = name.likeSpaced(inventGivenOrExtensionName(impl)) + def errPos = mdef.source.atSpan(mdef.nameSpan) + if (ctx.owner == defn.ScalaPackageClass && defn.reservedScalaClassNames.contains(name.toTypeName)) { + val kind = if (name.isTypeName) "class" else "object" + report.error(IllegalRedefinitionOfStandardKind(kind, name), errPos) + name = name.errorName + } + name + } + + /** Invent a name for an anonympus given of type or template `impl`. */ + def inventGivenOrExtensionName(impl: Tree)(using Context): SimpleName = + val str = impl match + case impl: Template => + if impl.parents.isEmpty then + report.error(AnonymousInstanceCannotBeEmpty(impl), impl.srcPos) + nme.ERROR.toString + else + impl.parents.map(inventTypeName(_)).mkString("given_", "_", "") + case impl: Tree => + "given_" ++ inventTypeName(impl) + str.toTermName.asSimpleName + + private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { + private def extractArgs(args: List[Tree])(using Context): String = + args.map(argNameExtractor.apply("", _)).mkString("_") + override def apply(x: String, tree: Tree)(using Context): String = + if (x.isEmpty) + tree match { + case Select(pre, nme.CONSTRUCTOR) => foldOver(x, pre) + case tree: RefTree => + if tree.name.isTypeName then tree.name.toString + else s"${tree.name}_type" + case tree: TypeDef => tree.name.toString + case tree: AppliedTypeTree if followArgs && tree.args.nonEmpty => + s"${apply(x, tree.tpt)}_${extractArgs(tree.args)}" + case InfixOp(left, op, right) => + if followArgs then s"${op.name}_${extractArgs(List(left, right))}" + else op.name.toString + case tree: LambdaTypeTree => + apply(x, tree.body) + case tree: Tuple => + extractArgs(tree.trees) + case tree: Function if tree.args.nonEmpty => + if followArgs then s"${extractArgs(tree.args)}_to_${apply("", tree.body)}" + else "Function" + case _ => foldOver(x, tree) + } + else x + } + private val typeNameExtractor = NameExtractor(followArgs = true) + private val argNameExtractor = NameExtractor(followArgs = false) + + private def inventTypeName(tree: Tree)(using Context): String = typeNameExtractor("", tree) + + /**This will check if this def tree is marked to define enum lookup methods, + * this is not recommended to call more than once per tree + */ + private def definesEnumLookupMethods(ddef: DefTree): Boolean = + ddef.removeAttachment(DefinesEnumLookupMethods).isDefined + + /** val p1, ..., pN: T = E + * ==> + * makePatDef[[val p1: T1 = E]]; ...; makePatDef[[val pN: TN = E]] + * + * case e1, ..., eN + * ==> + * expandSimpleEnumCase([case e1]); ...; expandSimpleEnumCase([case eN]) + */ + def patDef(pdef: PatDef)(using Context): Tree = flatTree { + val PatDef(mods, pats, tpt, rhs) = pdef + if mods.isEnumCase then + def expand(id: Ident, definesLookups: Boolean) = + expandSimpleEnumCase(id.name.asTermName, mods, definesLookups, + Span(id.span.start, id.span.end, id.span.start)) + + val ids = pats.asInstanceOf[List[Ident]] + if definesEnumLookupMethods(pdef) then + ids.init.map(expand(_, false)) ::: expand(ids.last, true) :: Nil + else + ids.map(expand(_, false)) + else { + val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) + pats1 map (makePatDef(pdef, mods, _, rhs)) + } + } + + /** The selector of a match, which depends of the given `checkMode`. + * @param sel the original selector + * @return if `checkMode` is + * - None : sel @unchecked + * - Exhaustive : sel + * - IrrefutablePatDef, + * IrrefutableGenFrom: sel with attachment `CheckIrrefutable -> checkMode` + */ + def makeSelector(sel: Tree, checkMode: MatchCheck)(using Context): Tree = + checkMode match + case MatchCheck.None => + Annotated(sel, New(ref(defn.UncheckedAnnot.typeRef))) + + case MatchCheck.Exhaustive => + sel + + case MatchCheck.IrrefutablePatDef | MatchCheck.IrrefutableGenFrom => + // TODO: use `pushAttachment` and investigate duplicate attachment + sel.withAttachment(CheckIrrefutable, checkMode) + sel + end match + + /** If `pat` is a variable pattern, + * + * val/var/lazy val p = e + * + * Otherwise, in case there is exactly one variable x_1 in pattern + * val/var/lazy val p = e ==> val/var/lazy val x_1 = (e: @unchecked) match (case p => (x_1)) + * + * in case there are zero or more than one variables in pattern + * val/var/lazy p = e ==> private[this] synthetic [lazy] val t$ = (e: @unchecked) match (case p => (x_1, ..., x_N)) + * val/var/def x_1 = t$._1 + * ... + * val/var/def x_N = t$._N + * If the original pattern variable carries a type annotation, so does the corresponding + * ValDef or DefDef. + */ + def makePatDef(original: Tree, mods: Modifiers, pat: Tree, rhs: Tree)(using Context): Tree = pat match { + case IdPattern(id, tpt) => + val id1 = + if id.name == nme.WILDCARD + then cpy.Ident(id)(WildcardParamName.fresh()) + else id + derivedValDef(original, id1, tpt, rhs, mods) + case _ => + + def filterWildcardGivenBinding(givenPat: Bind): Boolean = + givenPat.name != nme.WILDCARD + + def errorOnGivenBinding(bind: Bind)(using Context): Boolean = + report.error( + em"""${hl("given")} patterns are not allowed in a ${hl("val")} definition, + |please bind to an identifier and use an alias given.""".stripMargin, bind) + false + + def isTuplePattern(arity: Int): Boolean = pat match { + case Tuple(pats) if pats.size == arity => + pats.forall(isVarPattern) + case _ => false + } + val isMatchingTuple: Tree => Boolean = { + case Tuple(es) => isTuplePattern(es.length) + case _ => false + } + + // We can only optimize `val pat = if (...) e1 else e2` if: + // - `e1` and `e2` are both tuples of arity N + // - `pat` is a tuple of N variables or wildcard patterns like `(x1, x2, ..., xN)` + val tupleOptimizable = forallResults(rhs, isMatchingTuple) + + val inAliasGenerator = original match + case _: GenAlias => true + case _ => false + + val vars = + if (tupleOptimizable) // include `_` + pat match + case Tuple(pats) => pats.map { case id: Ident => id -> TypeTree() } + else + getVariables( + tree = pat, + shouldAddGiven = + if inAliasGenerator then + filterWildcardGivenBinding + else + errorOnGivenBinding + ) // no `_` + + val ids = for ((named, _) <- vars) yield Ident(named.name) + val matchExpr = + if (tupleOptimizable) rhs + else + val caseDef = CaseDef(pat, EmptyTree, makeTuple(ids)) + Match(makeSelector(rhs, MatchCheck.IrrefutablePatDef), caseDef :: Nil) + vars match { + case Nil if !mods.is(Lazy) => + matchExpr + case (named, tpt) :: Nil => + derivedValDef(original, named, tpt, matchExpr, mods) + case _ => + val tmpName = UniqueName.fresh() + val patMods = + mods & Lazy | Synthetic | (if (ctx.owner.isClass) PrivateLocal else EmptyFlags) + val firstDef = + ValDef(tmpName, TypeTree(), matchExpr) + .withSpan(pat.span.union(rhs.span)).withMods(patMods) + val useSelectors = vars.length <= 22 + def selector(n: Int) = + if useSelectors then Select(Ident(tmpName), nme.selectorName(n)) + else Apply(Select(Ident(tmpName), nme.apply), Literal(Constant(n)) :: Nil) + val restDefs = + for (((named, tpt), n) <- vars.zipWithIndex if named.name != nme.WILDCARD) + yield + if mods.is(Lazy) then + DefDef(named.name.asTermName, Nil, tpt, selector(n)) + .withMods(mods &~ Lazy) + .withSpan(named.span) + else + valDef( + ValDef(named.name.asTermName, tpt, selector(n)) + .withMods(mods) + .withSpan(named.span) + ) + flatTree(firstDef :: restDefs) + } + } + + /** Expand variable identifier x to x @ _ */ + def patternVar(tree: Tree)(using Context): Bind = { + val Ident(name) = unsplice(tree): @unchecked + Bind(name, Ident(nme.WILDCARD)).withSpan(tree.span) + } + + /** The type of tests that check whether a MemberDef is OK for some flag. + * The test succeeds if the partial function is defined and returns true. + */ + type MemberDefTest = PartialFunction[MemberDef, Boolean] + + val legalOpaque: MemberDefTest = { + case TypeDef(_, rhs) => + def rhsOK(tree: Tree): Boolean = tree match { + case bounds: TypeBoundsTree => !bounds.alias.isEmpty + case _: Template | _: MatchTypeTree => false + case LambdaTypeTree(_, body) => rhsOK(body) + case _ => true + } + rhsOK(rhs) + } + + def checkOpaqueAlias(tree: MemberDef)(using Context): MemberDef = + def check(rhs: Tree): MemberDef = rhs match + case bounds: TypeBoundsTree if bounds.alias.isEmpty => + report.error(i"opaque type must have a right-hand side", tree.srcPos) + tree.withMods(tree.mods.withoutFlags(Opaque)) + case LambdaTypeTree(_, body) => check(body) + case _ => tree + if !tree.mods.is(Opaque) then tree + else tree match + case TypeDef(_, rhs) => check(rhs) + case _ => tree + + /** Check that modifiers are legal for the definition `tree`. + * Right now, we only check for `opaque`. TODO: Move other modifier checks here. + */ + def checkModifiers(tree: Tree)(using Context): Tree = tree match { + case tree: MemberDef => + var tested: MemberDef = tree + def checkApplicable(flag: Flag, test: MemberDefTest): MemberDef = + if (tested.mods.is(flag) && !test.applyOrElse(tree, (md: MemberDef) => false)) { + report.error(ModifierNotAllowedForDefinition(flag), tree.srcPos) + tested.withMods(tested.mods.withoutFlags(flag)) + } else tested + tested = checkOpaqueAlias(tested) + tested = checkApplicable(Opaque, legalOpaque) + tested + case _ => + tree + } + + def defTree(tree: Tree)(using Context): Tree = + checkModifiers(tree) match { + case tree: ValDef => valDef(tree) + case tree: TypeDef => + if (tree.isClassDef) classDef(tree) + else if (ctx.mode.is(Mode.QuotedPattern)) quotedPatternTypeDef(tree) + else tree + case tree: DefDef => + if (tree.name.isConstructorName) tree // was already handled by enclosing classDef + else defDef(tree) + case tree: ModuleDef => moduleDef(tree) + case tree: PatDef => patDef(tree) + } + + /** { stats; } + * ==> + * { stats; () } + */ + def block(tree: Block)(using Context): Block = tree.expr match { + case EmptyTree => + cpy.Block(tree)(tree.stats, + unitLiteral.withSpan(if (tree.stats.isEmpty) tree.span else tree.span.endPos)) + case _ => + tree + } + + /** Translate infix operation expression + * + * l op r ==> l.op(r) if op is left-associative + * ==> r.op(l) if op is right-associative + */ + def binop(left: Tree, op: Ident, right: Tree)(using Context): Apply = { + def assignToNamedArg(arg: Tree) = arg match { + case Assign(Ident(name), rhs) => cpy.NamedArg(arg)(name, rhs) + case _ => arg + } + def makeOp(fn: Tree, arg: Tree, selectPos: Span) = + val sel = Select(fn, op.name).withSpan(selectPos) + if (left.sourcePos.endLine < op.sourcePos.startLine) + sel.pushAttachment(MultiLineInfix, ()) + arg match + case Parens(arg) => + Apply(sel, assignToNamedArg(arg) :: Nil) + case Tuple(args) if args.exists(_.isInstanceOf[Assign]) => + Apply(sel, args.mapConserve(assignToNamedArg)) + case Tuple(args) => + Apply(sel, arg :: Nil).setApplyKind(ApplyKind.InfixTuple) + case _ => + Apply(sel, arg :: Nil) + + if op.name.isRightAssocOperatorName then + makeOp(right, left, Span(op.span.start, right.span.end)) + else + makeOp(left, right, Span(left.span.start, op.span.end, op.span.start)) + } + + /** Translate throws type `A throws E1 | ... | En` to + * $throws[... $throws[A, E1] ... , En]. + */ + def throws(tpt: Tree, op: Ident, excepts: Tree)(using Context): AppliedTypeTree = excepts match + case Parens(excepts1) => + throws(tpt, op, excepts1) + case InfixOp(l, bar @ Ident(tpnme.raw.BAR), r) => + throws(throws(tpt, op, l), bar, r) + case e => + AppliedTypeTree( + TypeTree(defn.throwsAlias.typeRef).withSpan(op.span), tpt :: excepts :: Nil) + + /** Translate tuple expressions of arity <= 22 + * + * () ==> () + * (t) ==> t + * (t1, ..., tN) ==> TupleN(t1, ..., tN) + */ + def smallTuple(tree: Tuple)(using Context): Tree = { + val ts = tree.trees + val arity = ts.length + assert(arity <= Definitions.MaxTupleArity) + def tupleTypeRef = defn.TupleType(arity).nn + if (arity == 0) + if (ctx.mode is Mode.Type) TypeTree(defn.UnitType) else unitLiteral + else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts) + else Apply(ref(tupleTypeRef.classSymbol.companionModule.termRef), ts) + } + + private def isTopLevelDef(stat: Tree)(using Context): Boolean = stat match + case _: ValDef | _: PatDef | _: DefDef | _: Export | _: ExtMethods => true + case stat: ModuleDef => + stat.mods.isOneOf(GivenOrImplicit) + case stat: TypeDef => + !stat.isClassDef || stat.mods.isOneOf(GivenOrImplicit) + case _ => + false + + /** Assuming `src` contains top-level definition, returns the name that should + * be using for the package object that will wrap them. + */ + def packageObjectName(src: SourceFile): TermName = + val fileName = src.file.name + val sourceName = fileName.take(fileName.lastIndexOf('.')) + (sourceName ++ str.TOPLEVEL_SUFFIX).toTermName + + /** Group all definitions that can't be at the toplevel in + * an object named `$package` where `` is the name of the source file. + * Definitions that can't be at the toplevel are: + * + * - all pattern, value and method definitions + * - non-class type definitions + * - implicit classes and objects + * - "companion objects" of wrapped type definitions + * (i.e. objects having the same name as a wrapped type) + */ + def packageDef(pdef: PackageDef)(using Context): PackageDef = { + checkPackageName(pdef) + val wrappedTypeNames = pdef.stats.collect { + case stat: TypeDef if isTopLevelDef(stat) => stat.name + } + def inPackageObject(stat: Tree) = + isTopLevelDef(stat) || { + stat match + case stat: ModuleDef => + wrappedTypeNames.contains(stat.name.stripModuleClassSuffix.toTypeName) + case _ => + false + } + val (nestedStats, topStats) = pdef.stats.partition(inPackageObject) + if (nestedStats.isEmpty) pdef + else { + val name = packageObjectName(ctx.source) + val grouped = + ModuleDef(name, Template(emptyConstructor, Nil, Nil, EmptyValDef, nestedStats)) + .withMods(Modifiers(Synthetic)) + cpy.PackageDef(pdef)(pdef.pid, topStats :+ grouped) + } + } + + /** Make closure corresponding to function. + * params => body + * ==> + * def $anonfun(params) = body + * Closure($anonfun) + */ + def makeClosure(params: List[ValDef], body: Tree, tpt: Tree | Null = null, isContextual: Boolean, span: Span)(using Context): Block = + Block( + DefDef(nme.ANON_FUN, params :: Nil, if (tpt == null) TypeTree() else tpt, body) + .withSpan(span) + .withMods(synthetic | Artifact), + Closure(Nil, Ident(nme.ANON_FUN), if (isContextual) ContextualEmptyTree else EmptyTree)) + + /** If `nparams` == 1, expand partial function + * + * { cases } + * ==> + * x$1 => (x$1 @unchecked?) match { cases } + * + * If `nparams` != 1, expand instead to + * + * (x$1, ..., x$n) => (x$0, ..., x${n-1} @unchecked?) match { cases } + */ + def makeCaseLambda(cases: List[CaseDef], checkMode: MatchCheck, nparams: Int = 1)(using Context): Function = { + val params = (1 to nparams).toList.map(makeSyntheticParameter(_)) + val selector = makeTuple(params.map(p => Ident(p.name))) + Function(params, Match(makeSelector(selector, checkMode), cases)) + } + + /** Map n-ary function `(x1: T1, ..., xn: Tn) => body` where n != 1 to unary function as follows: + * + * (x$1: (T1, ..., Tn)) => { + * def x1: T1 = x$1._1 + * ... + * def xn: Tn = x$1._n + * body + * } + * + * or if `isGenericTuple` + * + * (x$1: (T1, ... Tn) => { + * def x1: T1 = x$1.apply(0) + * ... + * def xn: Tn = x$1.apply(n-1) + * body + * } + * + * If some of the Ti's are absent, omit the : (T1, ..., Tn) type ascription + * in the selector. + */ + def makeTupledFunction(params: List[ValDef], body: Tree, isGenericTuple: Boolean)(using Context): Tree = { + val param = makeSyntheticParameter( + tpt = + if params.exists(_.tpt.isEmpty) then TypeTree() + else Tuple(params.map(_.tpt))) + def selector(n: Int) = + if (isGenericTuple) Apply(Select(refOfDef(param), nme.apply), Literal(Constant(n))) + else Select(refOfDef(param), nme.selectorName(n)) + val vdefs = + params.zipWithIndex.map { + case (param, idx) => + ValDef(param.name, param.tpt, selector(idx)) + .withSpan(param.span) + .withAttachment(UntupledParam, ()) + .withFlags(Synthetic) + } + Function(param :: Nil, Block(vdefs, body)) + } + + /** Convert a tuple pattern with given `elems` to a sequence of `ValDefs`, + * skipping elements that are not convertible. + */ + def patternsToParams(elems: List[Tree])(using Context): List[ValDef] = + def toParam(elem: Tree, tpt: Tree): Tree = + elem match + case Annotated(elem1, _) => toParam(elem1, tpt) + case Typed(elem1, tpt1) => toParam(elem1, tpt1) + case Ident(id: TermName) => ValDef(id, tpt, EmptyTree).withFlags(Param) + case _ => EmptyTree + elems.map(param => toParam(param, TypeTree()).withSpan(param.span)).collect { + case vd: ValDef => vd + } + + def makeContextualFunction(formals: List[Tree], body: Tree, isErased: Boolean)(using Context): Function = { + val mods = if (isErased) Given | Erased else Given + val params = makeImplicitParameters(formals, mods) + FunctionWithMods(params, body, Modifiers(mods)) + } + + private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { + val vdef = ValDef(named.name.asTermName, tpt, rhs) + .withMods(mods) + .withSpan(original.span.withPoint(named.span.start)) + val mayNeedSetter = valDef(vdef) + mayNeedSetter + } + + private def derivedDefDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(implicit src: SourceFile) = + DefDef(named.name.asTermName, Nil, tpt, rhs) + .withMods(mods) + .withSpan(original.span.withPoint(named.span.start)) + + /** Main desugaring method */ + def apply(tree: Tree, pt: Type = NoType)(using Context): Tree = { + + /** Create tree for for-comprehension `` or + * `` where mapName and flatMapName are chosen + * corresponding to whether this is a for-do or a for-yield. + * The creation performs the following rewrite rules: + * + * 1. + * + * for (P <- G) E ==> G.foreach (P => E) + * + * Here and in the following (P => E) is interpreted as the function (P => E) + * if P is a variable pattern and as the partial function { case P => E } otherwise. + * + * 2. + * + * for (P <- G) yield E ==> G.map (P => E) + * + * 3. + * + * for (P_1 <- G_1; P_2 <- G_2; ...) ... + * ==> + * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...) + * + * 4. + * + * for (P <- G; E; ...) ... + * => + * for (P <- G.filter (P => E); ...) ... + * + * 5. For any N: + * + * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...) + * ==> + * for (TupleN(P_1, P_2, ... P_N) <- + * for (x_1 @ P_1 <- G) yield { + * val x_2 @ P_2 = E_2 + * ... + * val x_N & P_N = E_N + * TupleN(x_1, ..., x_N) + * } ...) + * + * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated + * and the variable constituting P_i is used instead of x_i + * + * @param mapName The name to be used for maps (either map or foreach) + * @param flatMapName The name to be used for flatMaps (either flatMap or foreach) + * @param enums The enumerators in the for expression + * @param body The body of the for expression + */ + def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Tree], body: Tree): Tree = trace(i"make for ${ForYield(enums, body)}", show = true) { + + /** Let `pat` be `gen`'s pattern. Make a function value `pat => body`. + * If `pat` is a var pattern `id: T` then this gives `(id: T) => body`. + * Otherwise this gives `{ case pat => body }`, where `pat` is checked to be + * irrefutable if `gen`'s checkMode is GenCheckMode.Check. + */ + def makeLambda(gen: GenFrom, body: Tree): Tree = gen.pat match { + case IdPattern(named, tpt) if gen.checkMode != GenCheckMode.FilterAlways => + Function(derivedValDef(gen.pat, named, tpt, EmptyTree, Modifiers(Param)) :: Nil, body) + case _ => + val matchCheckMode = + if (gen.checkMode == GenCheckMode.Check || gen.checkMode == GenCheckMode.CheckAndFilter) MatchCheck.IrrefutableGenFrom + else MatchCheck.None + makeCaseLambda(CaseDef(gen.pat, EmptyTree, body) :: Nil, matchCheckMode) + } + + /** If `pat` is not an Identifier, a Typed(Ident, _), or a Bind, wrap + * it in a Bind with a fresh name. Return the transformed pattern, and the identifier + * that refers to the bound variable for the pattern. Wildcard Binds are + * also replaced by Binds with fresh names. + */ + def makeIdPat(pat: Tree): (Tree, Ident) = pat match { + case bind @ Bind(name, pat1) => + if name == nme.WILDCARD then + val name = UniqueName.fresh() + (cpy.Bind(pat)(name, pat1).withMods(bind.mods), Ident(name)) + else (pat, Ident(name)) + case id: Ident if isVarPattern(id) && id.name != nme.WILDCARD => (id, id) + case Typed(id: Ident, _) if isVarPattern(id) && id.name != nme.WILDCARD => (pat, id) + case _ => + val name = UniqueName.fresh() + (Bind(name, pat), Ident(name)) + } + + /** Make a pattern filter: + * rhs.withFilter { case pat => true case _ => false } + * + * On handling irrefutable patterns: + * The idea is to wait until the pattern matcher sees a call + * + * xs withFilter { cases } + * + * where cases can be proven to be refutable i.e. cases would be + * equivalent to { case _ => true } + * + * In that case, compile to + * + * xs withFilter alwaysTrue + * + * where `alwaysTrue` is a predefined function value: + * + * val alwaysTrue: Any => Boolean = true + * + * In the libraries operations can take advantage of alwaysTrue to shortcircuit the + * withFilter call. + * + * def withFilter(f: Elem => Boolean) = + * if (f eq alwaysTrue) this // or rather identity filter monadic applied to this + * else real withFilter + */ + def makePatFilter(rhs: Tree, pat: Tree): Tree = { + val cases = List( + CaseDef(pat, EmptyTree, Literal(Constant(true))), + CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))) + Apply(Select(rhs, nme.withFilter), makeCaseLambda(cases, MatchCheck.None)) + } + + /** Is pattern `pat` irrefutable when matched against `rhs`? + * We only can do a simple syntactic check here; a more refined check + * is done later in the pattern matcher (see discussion in @makePatFilter). + */ + def isIrrefutable(pat: Tree, rhs: Tree): Boolean = { + def matchesTuple(pats: List[Tree], rhs: Tree): Boolean = rhs match { + case Tuple(trees) => (pats corresponds trees)(isIrrefutable) + case Parens(rhs1) => matchesTuple(pats, rhs1) + case Block(_, rhs1) => matchesTuple(pats, rhs1) + case If(_, thenp, elsep) => matchesTuple(pats, thenp) && matchesTuple(pats, elsep) + case Match(_, cases) => cases forall (matchesTuple(pats, _)) + case CaseDef(_, _, rhs1) => matchesTuple(pats, rhs1) + case Throw(_) => true + case _ => false + } + pat match { + case Bind(_, pat1) => isIrrefutable(pat1, rhs) + case Parens(pat1) => isIrrefutable(pat1, rhs) + case Tuple(pats) => matchesTuple(pats, rhs) + case _ => isVarPattern(pat) + } + } + + /** Is `pat` of the form `x`, `x T`, or `given T`? when used as the lhs of a generator, + * these are all considered irrefutable. + */ + def isVarBinding(pat: Tree): Boolean = pat match + case pat @ Bind(_, pat1) if pat.mods.is(Given) => isVarBinding(pat1) + case IdPattern(_) => true + case _ => false + + def needsNoFilter(gen: GenFrom): Boolean = gen.checkMode match + case GenCheckMode.FilterAlways => false // pattern was prefixed by `case` + case GenCheckMode.FilterNow | GenCheckMode.CheckAndFilter => isVarBinding(gen.pat) || isIrrefutable(gen.pat, gen.expr) + case GenCheckMode.Check => true + case GenCheckMode.Ignore => true + + /** rhs.name with a pattern filter on rhs unless `pat` is irrefutable when + * matched against `rhs`. + */ + def rhsSelect(gen: GenFrom, name: TermName) = { + val rhs = if (needsNoFilter(gen)) gen.expr else makePatFilter(gen.expr, gen.pat) + Select(rhs, name) + } + + enums match { + case (gen: GenFrom) :: Nil => + Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) + case (gen: GenFrom) :: (rest @ (GenFrom(_, _, _) :: _)) => + val cont = makeFor(mapName, flatMapName, rest, body) + Apply(rhsSelect(gen, flatMapName), makeLambda(gen, cont)) + case (gen: GenFrom) :: (rest @ GenAlias(_, _) :: _) => + val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias]) + val pats = valeqs map { case GenAlias(pat, _) => pat } + val rhss = valeqs map { case GenAlias(_, rhs) => rhs } + val (defpat0, id0) = makeIdPat(gen.pat) + val (defpats, ids) = (pats map makeIdPat).unzip + val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => + val mods = defpat match + case defTree: DefTree => defTree.mods + case _ => Modifiers() + makePatDef(valeq, mods, defpat, rhs) + } + val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, gen.expr, gen.checkMode) :: Nil, Block(pdefs, makeTuple(id0 :: ids))) + val allpats = gen.pat :: pats + val vfrom1 = GenFrom(makeTuple(allpats), rhs1, GenCheckMode.Ignore) + makeFor(mapName, flatMapName, vfrom1 :: rest1, body) + case (gen: GenFrom) :: test :: rest => + val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) + val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Ignore) + makeFor(mapName, flatMapName, genFrom :: rest, body) + case _ => + EmptyTree //may happen for erroneous input + } + } + + def makePolyFunction(targs: List[Tree], body: Tree, pt: Type): Tree = body match { + case Parens(body1) => + makePolyFunction(targs, body1, pt) + case Block(Nil, body1) => + makePolyFunction(targs, body1, pt) + case Function(vargs, res) => + assert(targs.nonEmpty) + // TODO: Figure out if we need a `PolyFunctionWithMods` instead. + val mods = body match { + case body: FunctionWithMods => body.mods + case _ => untpd.EmptyModifiers + } + val polyFunctionTpt = ref(defn.PolyFunctionType) + val applyTParams = targs.asInstanceOf[List[TypeDef]] + if (ctx.mode.is(Mode.Type)) { + // Desugar [T_1, ..., T_M] -> (P_1, ..., P_N) => R + // Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } + + val applyVParams = vargs.zipWithIndex.map { + case (p: ValDef, _) => p.withAddedFlags(mods.flags) + case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags) + } + RefinedTypeTree(polyFunctionTpt, List( + DefDef(nme.apply, applyTParams :: applyVParams :: Nil, res, EmptyTree).withFlags(Synthetic) + )) + } + else { + // Desugar [T_1, ..., T_M] -> (x_1: P_1, ..., x_N: P_N) => body + // with pt [S_1, ..., S_M] -> (O_1, ..., O_N) => R + // Into new scala.PolyFunction { def apply[T_1, ..., T_M](x_1: P_1, ..., x_N: P_N): R2 = body } + // where R2 is R, with all references to S_1..S_M replaced with T1..T_M. + + def typeTree(tp: Type) = tp match + case RefinedType(parent, nme.apply, PolyType(_, mt)) if parent.typeSymbol eq defn.PolyFunctionClass => + var bail = false + def mapper(tp: Type, topLevel: Boolean = false): Tree = tp match + case tp: TypeRef => ref(tp) + case tp: TypeParamRef => Ident(applyTParams(tp.paramNum).name) + case AppliedType(tycon, args) => AppliedTypeTree(mapper(tycon), args.map(mapper(_))) + case _ => if topLevel then TypeTree() else { bail = true; genericEmptyTree } + val mapped = mapper(mt.resultType, topLevel = true) + if bail then TypeTree() else mapped + case _ => TypeTree() + + val applyVParams = vargs.asInstanceOf[List[ValDef]] + .map(varg => varg.withAddedFlags(mods.flags | Param)) + New(Template(emptyConstructor, List(polyFunctionTpt), Nil, EmptyValDef, + List(DefDef(nme.apply, applyTParams :: applyVParams :: Nil, typeTree(pt), res)) + )) + } + case _ => + // may happen for erroneous input. An error will already have been reported. + assert(ctx.reporter.errorsReported) + EmptyTree + } + + // begin desugar + + // Special case for `Parens` desugaring: unlike all the desugarings below, + // its output is not a new tree but an existing one whose position should + // be preserved, so we shouldn't call `withPos` on it. + tree match { + case Parens(t) => + return t + case _ => + } + + val desugared = tree match { + case PolyFunction(targs, body) => + makePolyFunction(targs, body, pt) orElse tree + case SymbolLit(str) => + Apply( + ref(defn.ScalaSymbolClass.companionModule.termRef), + Literal(Constant(str)) :: Nil) + case InterpolatedString(id, segments) => + val strs = segments map { + case ts: Thicket => ts.trees.head + case t => t + } + val elems = segments flatMap { + case ts: Thicket => ts.trees.tail + case t => Nil + } map { (t: Tree) => t match + // !cc! explicitly typed parameter (t: Tree) is needed since otherwise + // we get an error similar to #16268. (The explicit type constrains the type of `segments` + // which is otherwise List[{*} tree]) + case Block(Nil, EmptyTree) => Literal(Constant(())) // for s"... ${} ..." + case Block(Nil, expr) => expr // important for interpolated string as patterns, see i1773.scala + case t => t + } + // This is a deliberate departure from scalac, where StringContext is not rooted (See #4732) + Apply(Select(Apply(scalaDot(nme.StringContext), strs), id).withSpan(tree.span), elems) + case PostfixOp(t, op) => + if (ctx.mode is Mode.Type) && !isBackquoted(op) && op.name == tpnme.raw.STAR then + if ctx.isJava then + AppliedTypeTree(ref(defn.RepeatedParamType), t) + else + Annotated( + AppliedTypeTree(ref(defn.SeqType), t), + New(ref(defn.RepeatedAnnot.typeRef), Nil :: Nil)) + else + assert(ctx.mode.isExpr || ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive), ctx.mode) + Select(t, op.name) + case PrefixOp(op, t) => + val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme + Select(t, nspace.UNARY_PREFIX ++ op.name) + case ForDo(enums, body) => + makeFor(nme.foreach, nme.foreach, enums, body) orElse tree + case ForYield(enums, body) => + makeFor(nme.map, nme.flatMap, enums, body) orElse tree + case PatDef(mods, pats, tpt, rhs) => + val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) + flatTree(pats1 map (makePatDef(tree, mods, _, rhs))) + case ext: ExtMethods => + Block(List(ext), Literal(Constant(())).withSpan(ext.span)) + case CapturingTypeTree(refs, parent) => + // convert `{refs} T` to `T @retains refs` + // `{refs}-> T` to `-> (T @retainsByName refs)` + def annotate(annotName: TypeName, tp: Tree) = + Annotated(tp, New(scalaAnnotationDot(annotName), List(refs))) + parent match + case ByNameTypeTree(restpt) => + cpy.ByNameTypeTree(parent)(annotate(tpnme.retainsByName, restpt)) + case _ => + annotate(tpnme.retains, parent) + } + desugared.withSpan(tree.span) + } + + /** Turn a fucntion value `handlerFun` into a catch case for a try. + * If `handlerFun` is a partial function, translate to + * + * case ex => + * val ev$1 = handlerFun + * if ev$1.isDefinedAt(ex) then ev$1.apply(ex) else throw ex + * + * Otherwise translate to + * + * case ex => handlerFun.apply(ex) + */ + def makeTryCase(handlerFun: tpd.Tree)(using Context): CaseDef = + val handler = TypedSplice(handlerFun) + val excId = Ident(nme.DEFAULT_EXCEPTION_NAME) + val rhs = + if handlerFun.tpe.widen.isRef(defn.PartialFunctionClass) then + val tmpName = UniqueName.fresh() + val tmpId = Ident(tmpName) + val init = ValDef(tmpName, TypeTree(), handler) + val test = If( + Apply(Select(tmpId, nme.isDefinedAt), excId), + Apply(Select(tmpId, nme.apply), excId), + Throw(excId)) + Block(init :: Nil, test) + else + Apply(Select(handler, nme.apply), excId) + CaseDef(excId, EmptyTree, rhs) + + /** Create a class definition with the same info as the refined type given by `parent` + * and `refinements`. + * + * parent { refinements } + * ==> + * trait extends core { this: self => refinements } + * + * Here, `core` is the (possibly parameterized) class part of `parent`. + * If `parent` is the same as `core`, self is empty. Otherwise `self` is `parent`. + * + * Example: Given + * + * class C + * type T1 = C { type T <: A } + * + * the refined type + * + * T1 { type T <: B } + * + * is expanded to + * + * trait extends C { this: T1 => type T <: A } + * + * The result of this method is used for validity checking, is thrown away afterwards. + * @param parent The type of `parent` + */ + def refinedTypeToClass(parent: tpd.Tree, refinements: List[Tree])(using Context): TypeDef = { + def stripToCore(tp: Type): List[Type] = tp match { + case tp: AppliedType => tp :: Nil + case tp: TypeRef if tp.symbol.isClass => tp :: Nil // monomorphic class type + case tp: TypeProxy => stripToCore(tp.underlying) + case AndType(tp1, tp2) => stripToCore(tp1) ::: stripToCore(tp2) + case _ => defn.AnyType :: Nil + } + val parentCores = stripToCore(parent.tpe) + val untpdParent = TypedSplice(parent) + val (classParents, self) = + if (parentCores.length == 1 && (parent.tpe eq parentCores.head)) (untpdParent :: Nil, EmptyValDef) + else (parentCores map TypeTree, ValDef(nme.WILDCARD, untpdParent, EmptyTree)) + val impl = Template(emptyConstructor, classParents, Nil, self, refinements) + TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) + } + + /** Returns list of all pattern variables, possibly with their types, + * without duplicates + */ + private def getVariables(tree: Tree, shouldAddGiven: Context ?=> Bind => Boolean)(using Context): List[VarInfo] = { + val buf = ListBuffer[VarInfo]() + def seenName(name: Name) = buf exists (_._1.name == name) + def add(named: NameTree, t: Tree): Unit = + if (!seenName(named.name) && named.name.isTermName) buf += ((named, t)) + def collect(tree: Tree): Unit = tree match { + case tree @ Bind(nme.WILDCARD, tree1) => + if tree.mods.is(Given) then + val Typed(_, tpt) = tree1: @unchecked + if shouldAddGiven(tree) then + add(tree, tpt) + collect(tree1) + case tree @ Bind(_, Typed(tree1, tpt)) => + if !(tree.mods.is(Given) && !shouldAddGiven(tree)) then + add(tree, tpt) + collect(tree1) + case tree @ Bind(_, tree1) => + add(tree, TypeTree()) + collect(tree1) + case Typed(id: Ident, t) if isVarPattern(id) && id.name != nme.WILDCARD && !isWildcardStarArg(tree) => + add(id, t) + case id: Ident if isVarPattern(id) && id.name != nme.WILDCARD => + add(id, TypeTree()) + case Apply(_, args) => + args foreach collect + case Typed(expr, _) => + collect(expr) + case NamedArg(_, arg) => + collect(arg) + case SeqLiteral(elems, _) => + elems foreach collect + case Alternative(trees) => + for (tree <- trees; (vble, _) <- getVariables(tree, shouldAddGiven)) + report.error(IllegalVariableInPatternAlternative(vble.symbol.name), vble.srcPos) + case Annotated(arg, _) => + collect(arg) + case InterpolatedString(_, segments) => + segments foreach collect + case InfixOp(left, _, right) => + collect(left) + collect(right) + case PrefixOp(_, od) => + collect(od) + case Parens(tree) => + collect(tree) + case Tuple(trees) => + trees foreach collect + case Thicket(trees) => + trees foreach collect + case Block(Nil, expr) => + collect(expr) + case Quote(expr) => + new UntypedTreeTraverser { + def traverse(tree: untpd.Tree)(using Context): Unit = tree match { + case Splice(expr) => collect(expr) + case _ => traverseChildren(tree) + } + }.traverse(expr) + case CapturingTypeTree(refs, parent) => + collect(parent) + case _ => + } + collect(tree) + buf.toList + } +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala b/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala new file mode 100644 index 000000000000..096a885dcf32 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala @@ -0,0 +1,310 @@ +package dotty.tools +package dotc +package ast + +import core._ +import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ +import Symbols._, StdNames._, Trees._ +import Decorators._ +import util.{Property, SourceFile} +import typer.ErrorReporting._ +import transform.SyntheticMembers.ExtendsSingletonMirror + +import scala.annotation.internal.sharable + +/** Helper methods to desugar enums */ +object DesugarEnums { + import untpd._ + + enum CaseKind: + case Simple, Object, Class + + final case class EnumConstraints(minKind: CaseKind, maxKind: CaseKind, enumCases: List[(Int, RefTree)]): + require(minKind.ordinal <= maxKind.ordinal && !(cached && enumCases.isEmpty)) + def requiresCreator = minKind == CaseKind.Simple + def isEnumeration = maxKind.ordinal < CaseKind.Class.ordinal + def cached = minKind.ordinal < CaseKind.Class.ordinal + end EnumConstraints + + /** Attachment containing the number of enum cases, the smallest kind that was seen so far, + * and a list of all the value cases with their ordinals. + */ + val EnumCaseCount: Property.Key[(Int, CaseKind, CaseKind, List[(Int, TermName)])] = Property.Key() + + /** Attachment signalling that when this definition is desugared, it should add any additional + * lookup methods for enums. + */ + val DefinesEnumLookupMethods: Property.Key[Unit] = Property.Key() + + /** The enumeration class that belongs to an enum case. This works no matter + * whether the case is still in the enum class or it has been transferred to the + * companion object. + */ + def enumClass(using Context): Symbol = { + val cls = ctx.owner + if (cls.is(Module)) cls.linkedClass else cls + } + + def enumCompanion(using Context): Symbol = { + val cls = ctx.owner + if (cls.is(Module)) cls.sourceModule else cls.linkedClass.sourceModule + } + + /** Is `tree` an (untyped) enum case? */ + def isEnumCase(tree: Tree)(using Context): Boolean = tree match { + case tree: MemberDef => tree.mods.isEnumCase + case PatDef(mods, _, _, _) => mods.isEnumCase + case _ => false + } + + /** A reference to the enum class `E`, possibly followed by type arguments. + * Each covariant type parameter is approximated by its lower bound. + * Each contravariant type parameter is approximated by its upper bound. + * It is an error if a type parameter is non-variant, or if its approximation + * refers to pther type parameters. + */ + def interpolatedEnumParent(span: Span)(using Context): Tree = { + val tparams = enumClass.typeParams + def isGround(tp: Type) = tp.subst(tparams, tparams.map(_ => NoType)) eq tp + val targs = tparams map { tparam => + if (tparam.is(Covariant) && isGround(tparam.info.bounds.lo)) + tparam.info.bounds.lo + else if (tparam.is(Contravariant) && isGround(tparam.info.bounds.hi)) + tparam.info.bounds.hi + else { + def problem = + if (!tparam.isOneOf(VarianceFlags)) "is invariant" + else "has bounds that depend on a type parameter in the same parameter list" + errorType(i"""cannot determine type argument for enum parent $enumClass, + |type parameter $tparam $problem""", ctx.source.atSpan(span)) + } + } + TypeTree(enumClass.typeRef.appliedTo(targs)).withSpan(span) + } + + /** A type tree referring to `enumClass` */ + def enumClassRef(using Context): Tree = + if (enumClass.exists) TypeTree(enumClass.typeRef) else TypeTree() + + /** Add implied flags to an enum class or an enum case */ + def addEnumFlags(cdef: TypeDef)(using Context): TypeDef = + if (cdef.mods.isEnumClass) cdef.withMods(cdef.mods.withAddedFlags(Abstract | Sealed, cdef.span)) + else if (isEnumCase(cdef)) cdef.withMods(cdef.mods.withAddedFlags(Final, cdef.span)) + else cdef + + private def valuesDot(name: PreName)(implicit src: SourceFile) = + Select(Ident(nme.DOLLAR_VALUES), name.toTermName) + + private def ArrayLiteral(values: List[Tree], tpt: Tree)(using Context): Tree = + val clazzOf = TypeApply(ref(defn.Predef_classOf.termRef), tpt :: Nil) + val ctag = Apply(TypeApply(ref(defn.ClassTagModule_apply.termRef), tpt :: Nil), clazzOf :: Nil) + val apply = Select(ref(defn.ArrayModule.termRef), nme.apply) + Apply(Apply(TypeApply(apply, tpt :: Nil), values), ctag :: Nil) + + /** The following lists of definitions for an enum type E and known value cases e_0, ..., e_n: + * + * private val $values = Array[E](this.e_0,...,this.e_n)(ClassTag[E](classOf[E])) + * def values = $values.clone + * def valueOf($name: String) = $name match { + * case "e_0" => this.e_0 + * ... + * case "e_n" => this.e_n + * case _ => throw new IllegalArgumentException("case not found: " + $name) + * } + */ + private def enumScaffolding(enumValues: List[RefTree])(using Context): List[Tree] = { + val rawEnumClassRef = rawRef(enumClass.typeRef) + extension (tpe: NamedType) def ofRawEnum = AppliedTypeTree(ref(tpe), rawEnumClassRef) + + val privateValuesDef = + ValDef(nme.DOLLAR_VALUES, TypeTree(), ArrayLiteral(enumValues, rawEnumClassRef)) + .withFlags(Private | Synthetic) + + val valuesDef = + DefDef(nme.values, Nil, defn.ArrayType.ofRawEnum, valuesDot(nme.clone_)) + .withFlags(Synthetic) + + val valuesOfBody: Tree = + val defaultCase = + val msg = Apply(Select(Literal(Constant("enum case not found: ")), nme.PLUS), Ident(nme.nameDollar)) + CaseDef(Ident(nme.WILDCARD), EmptyTree, + Throw(New(TypeTree(defn.IllegalArgumentExceptionType), List(msg :: Nil)))) + val stringCases = enumValues.map(enumValue => + CaseDef(Literal(Constant(enumValue.name.toString)), EmptyTree, enumValue) + ) ::: defaultCase :: Nil + Match(Ident(nme.nameDollar), stringCases) + val valueOfDef = DefDef(nme.valueOf, List(param(nme.nameDollar, defn.StringType) :: Nil), + TypeTree(), valuesOfBody) + .withFlags(Synthetic) + + privateValuesDef :: + valuesDef :: + valueOfDef :: Nil + } + + private def enumLookupMethods(constraints: EnumConstraints)(using Context): List[Tree] = + def scaffolding: List[Tree] = + if constraints.isEnumeration then enumScaffolding(constraints.enumCases.map(_._2)) else Nil + def valueCtor: List[Tree] = if constraints.requiresCreator then enumValueCreator :: Nil else Nil + def fromOrdinal: Tree = + def throwArg(ordinal: Tree) = + Throw(New(TypeTree(defn.NoSuchElementExceptionType), List(Select(ordinal, nme.toString_) :: Nil))) + if !constraints.cached then + fromOrdinalMeth(throwArg) + else + def default(ordinal: Tree) = + CaseDef(Ident(nme.WILDCARD), EmptyTree, throwArg(ordinal)) + if constraints.isEnumeration then + fromOrdinalMeth(ordinal => + Try(Apply(valuesDot(nme.apply), ordinal), default(ordinal) :: Nil, EmptyTree)) + else + fromOrdinalMeth(ordinal => + Match(ordinal, + constraints.enumCases.map((i, enumValue) => CaseDef(Literal(Constant(i)), EmptyTree, enumValue)) + :+ default(ordinal))) + + if !enumClass.exists then + // in the case of a double definition of an enum that only defines class cases (see tests/neg/i4470c.scala) + // it seems `enumClass` might be `NoSymbol`; in this case we provide no scaffolding. + Nil + else + scaffolding ::: valueCtor ::: fromOrdinal :: Nil + end enumLookupMethods + + /** A creation method for a value of enum type `E`, which is defined as follows: + * + * private def $new(_$ordinal: Int, $name: String) = new E with scala.runtime.EnumValue { + * def ordinal = _$ordinal // if `E` does not derive from `java.lang.Enum` + * } + */ + private def enumValueCreator(using Context) = { + val creator = New(Template( + constr = emptyConstructor, + parents = enumClassRef :: scalaRuntimeDot(tpnme.EnumValue) :: Nil, + derived = Nil, + self = EmptyValDef, + body = Nil + ).withAttachment(ExtendsSingletonMirror, ())) + DefDef(nme.DOLLAR_NEW, + List(List(param(nme.ordinalDollar_, defn.IntType), param(nme.nameDollar, defn.StringType))), + TypeTree(), creator).withFlags(Private | Synthetic) + } + + /** Is a type parameter in `enumTypeParams` referenced from an enum class case that has + * given type parameters `caseTypeParams`, value parameters `vparamss` and parents `parents`? + * Issues an error if that is the case but the reference is illegal. + * The reference could be illegal for two reasons: + * - explicit type parameters are given + * - it's a value case, i.e. no value parameters are given + */ + def typeParamIsReferenced( + enumTypeParams: List[TypeSymbol], + caseTypeParams: List[TypeDef], + vparamss: List[List[ValDef]], + parents: List[Tree])(using Context): Boolean = { + + object searchRef extends UntypedTreeAccumulator[Boolean] { + var tparamNames = enumTypeParams.map(_.name).toSet[Name] + def underBinders(binders: List[MemberDef], op: => Boolean): Boolean = { + val saved = tparamNames + tparamNames = tparamNames -- binders.map(_.name) + try op + finally tparamNames = saved + } + def apply(x: Boolean, tree: Tree)(using Context): Boolean = x || { + tree match { + case Ident(name) => + val matches = tparamNames.contains(name) + if (matches && (caseTypeParams.nonEmpty || vparamss.isEmpty)) + report.error(i"illegal reference to type parameter $name from enum case", tree.srcPos) + matches + case LambdaTypeTree(lambdaParams, body) => + underBinders(lambdaParams, foldOver(x, tree)) + case RefinedTypeTree(parent, refinements) => + val refinementDefs = refinements collect { case r: MemberDef => r } + underBinders(refinementDefs, foldOver(x, tree)) + case _ => foldOver(x, tree) + } + } + def apply(tree: Tree)(using Context): Boolean = + underBinders(caseTypeParams, apply(false, tree)) + } + + def typeHasRef(tpt: Tree) = searchRef(tpt) + def valDefHasRef(vd: ValDef) = typeHasRef(vd.tpt) + def parentHasRef(parent: Tree): Boolean = parent match { + case Apply(fn, _) => parentHasRef(fn) + case TypeApply(_, targs) => targs.exists(typeHasRef) + case Select(nu, nme.CONSTRUCTOR) => parentHasRef(nu) + case New(tpt) => typeHasRef(tpt) + case parent => parent.isType && typeHasRef(parent) + } + + vparamss.nestedExists(valDefHasRef) || parents.exists(parentHasRef) + } + + /** A pair consisting of + * - the next enum tag + * - scaffolding containing the necessary definitions for singleton enum cases + * unless that scaffolding was already generated by a previous call to `nextEnumKind`. + */ + def nextOrdinal(name: Name, kind: CaseKind, definesLookups: Boolean)(using Context): (Int, List[Tree]) = { + val (ordinal, seenMinKind, seenMaxKind, seenCases) = + ctx.tree.removeAttachment(EnumCaseCount).getOrElse((0, CaseKind.Class, CaseKind.Simple, Nil)) + val minKind = if kind.ordinal < seenMinKind.ordinal then kind else seenMinKind + val maxKind = if kind.ordinal > seenMaxKind.ordinal then kind else seenMaxKind + val cases = name match + case name: TermName => (ordinal, name) :: seenCases + case _ => seenCases + if definesLookups then + val thisRef = This(EmptyTypeIdent) + val cachedValues = cases.reverse.map((i, name) => (i, Select(thisRef, name))) + (ordinal, enumLookupMethods(EnumConstraints(minKind, maxKind, cachedValues))) + else + ctx.tree.pushAttachment(EnumCaseCount, (ordinal + 1, minKind, maxKind, cases)) + (ordinal, Nil) + } + + def param(name: TermName, typ: Type)(using Context): ValDef = param(name, TypeTree(typ)) + def param(name: TermName, tpt: Tree)(using Context): ValDef = ValDef(name, tpt, EmptyTree).withFlags(Param) + + def ordinalMeth(body: Tree)(using Context): DefDef = + DefDef(nme.ordinal, Nil, TypeTree(defn.IntType), body).withAddedFlags(Synthetic) + + def ordinalMethLit(ord: Int)(using Context): DefDef = + ordinalMeth(Literal(Constant(ord))) + + def fromOrdinalMeth(body: Tree => Tree)(using Context): DefDef = + DefDef(nme.fromOrdinal, (param(nme.ordinal, defn.IntType) :: Nil) :: Nil, + rawRef(enumClass.typeRef), body(Ident(nme.ordinal))).withFlags(Synthetic) + + /** Expand a module definition representing a parameterless enum case */ + def expandEnumModule(name: TermName, impl: Template, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = { + assert(impl.body.isEmpty) + if (!enumClass.exists) EmptyTree + else if (impl.parents.isEmpty) + expandSimpleEnumCase(name, mods, definesLookups, span) + else { + val (tag, scaffolding) = nextOrdinal(name, CaseKind.Object, definesLookups) + val impl1 = cpy.Template(impl)(parents = impl.parents :+ scalaRuntimeDot(tpnme.EnumValue), body = Nil) + .withAttachment(ExtendsSingletonMirror, ()) + val vdef = ValDef(name, TypeTree(), New(impl1)).withMods(mods.withAddedFlags(EnumValue, span)) + flatTree(vdef :: scaffolding).withSpan(span) + } + } + + /** Expand a simple enum case */ + def expandSimpleEnumCase(name: TermName, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = + if (!enumClass.exists) EmptyTree + else if (enumClass.typeParams.nonEmpty) { + val parent = interpolatedEnumParent(span) + val impl = Template(emptyConstructor, parent :: Nil, Nil, EmptyValDef, Nil) + expandEnumModule(name, impl, mods, definesLookups, span) + } + else { + val (tag, scaffolding) = nextOrdinal(name, CaseKind.Simple, definesLookups) + val creator = Apply(Ident(nme.DOLLAR_NEW), List(Literal(Constant(tag)), Literal(Constant(name.toString)))) + val vdef = ValDef(name, enumClassRef, creator).withMods(mods.withAddedFlags(EnumValue, span)) + flatTree(vdef :: scaffolding).withSpan(span) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala b/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala new file mode 100644 index 000000000000..040582476e96 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala @@ -0,0 +1,449 @@ +package dotty.tools.dotc +package ast + +import core._ +import Symbols._, Types._, Contexts._, Decorators._, util.Spans._, Flags._, Constants._ +import StdNames.{nme, tpnme} +import ast.Trees._ +import Names.Name +import Comments.Comment +import NameKinds.DefaultGetterName +import Annotations.Annotation + +object MainProxies { + + /** Generate proxy classes for @main functions and @myMain functions where myMain <:< MainAnnotation */ + def proxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + mainAnnotationProxies(stats) ++ mainProxies(stats) + } + + /** Generate proxy classes for @main functions. + * A function like + * + * @main def f(x: S, ys: T*) = ... + * + * would be translated to something like + * + * import CommandLineParser._ + * class f { + * @static def main(args: Array[String]): Unit = + * try + * f( + * parseArgument[S](args, 0), + * parseRemainingArguments[T](args, 1): _* + * ) + * catch case err: ParseError => showError(err) + * } + */ + private def mainProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + import tpd._ + def mainMethods(stats: List[Tree]): List[Symbol] = stats.flatMap { + case stat: DefDef if stat.symbol.hasAnnotation(defn.MainAnnot) => + stat.symbol :: Nil + case stat @ TypeDef(name, impl: Template) if stat.symbol.is(Module) => + mainMethods(impl.body) + case _ => + Nil + } + mainMethods(stats).flatMap(mainProxy) + } + + import untpd._ + private def mainProxy(mainFun: Symbol)(using Context): List[TypeDef] = { + val mainAnnotSpan = mainFun.getAnnotation(defn.MainAnnot).get.tree.span + def pos = mainFun.sourcePos + val argsRef = Ident(nme.args) + + def addArgs(call: untpd.Tree, mt: MethodType, idx: Int): untpd.Tree = + if (mt.isImplicitMethod) { + report.error(s"@main method cannot have implicit parameters", pos) + call + } + else { + val args = mt.paramInfos.zipWithIndex map { + (formal, n) => + val (parserSym, formalElem) = + if (formal.isRepeatedParam) (defn.CLP_parseRemainingArguments, formal.argTypes.head) + else (defn.CLP_parseArgument, formal) + val arg = Apply( + TypeApply(ref(parserSym.termRef), TypeTree(formalElem) :: Nil), + argsRef :: Literal(Constant(idx + n)) :: Nil) + if (formal.isRepeatedParam) repeated(arg) else arg + } + val call1 = Apply(call, args) + mt.resType match { + case restpe: MethodType => + if (mt.paramInfos.lastOption.getOrElse(NoType).isRepeatedParam) + report.error(s"varargs parameter of @main method must come last", pos) + addArgs(call1, restpe, idx + args.length) + case _ => + call1 + } + } + + var result: List[TypeDef] = Nil + if (!mainFun.owner.isStaticOwner) + report.error(s"@main method is not statically accessible", pos) + else { + var call = ref(mainFun.termRef) + mainFun.info match { + case _: ExprType => + case mt: MethodType => + call = addArgs(call, mt, 0) + case _: PolyType => + report.error(s"@main method cannot have type parameters", pos) + case _ => + report.error(s"@main can only annotate a method", pos) + } + val errVar = Ident(nme.error) + val handler = CaseDef( + Typed(errVar, TypeTree(defn.CLP_ParseError.typeRef)), + EmptyTree, + Apply(ref(defn.CLP_showError.termRef), errVar :: Nil)) + val body = Try(call, handler :: Nil, EmptyTree) + val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) + .withFlags(Param) + /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. + * The annotations will be retype-checked in another scope that may not have the same imports. + */ + def insertTypeSplices = new TreeMap { + override def transform(tree: Tree)(using Context): Tree = tree match + case tree: tpd.Ident @unchecked => TypedSplice(tree) + case tree => super.transform(tree) + } + val annots = mainFun.annotations + .filterNot(_.matches(defn.MainAnnot)) + .map(annot => insertTypeSplices.transform(annot.tree)) + val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) + .withFlags(JavaStatic | Synthetic) + .withAnnotations(annots) + val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) + val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) + .withFlags(Final | Invisible) + + if (!ctx.reporter.hasErrors) + result = mainCls.withSpan(mainAnnotSpan.toSynthetic) :: Nil + } + result + } + + private type DefaultValueSymbols = Map[Int, Symbol] + private type ParameterAnnotationss = Seq[Seq[Annotation]] + + /** + * Generate proxy classes for main functions. + * A function like + * + * /** + * * Lorem ipsum dolor sit amet + * * consectetur adipiscing elit. + * * + * * @param x my param x + * * @param ys all my params y + * */ + * @myMain(80) def f( + * @myMain.Alias("myX") x: S, + * y: S, + * ys: T* + * ) = ... + * + * would be translated to something like + * + * final class f { + * static def main(args: Array[String]): Unit = { + * val annotation = new myMain(80) + * val info = new Info( + * name = "f", + * documentation = "Lorem ipsum dolor sit amet consectetur adipiscing elit.", + * parameters = Seq( + * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))), + * new scala.annotation.MainAnnotation.Parameter("y", "S", true, false, "", Seq()), + * new scala.annotation.MainAnnotation.Parameter("ys", "T", false, true, "all my params y", Seq()) + * ) + * ), + * val command = annotation.command(info, args) + * if command.isDefined then + * val cmd = command.get + * val args0: () => S = annotation.argGetter[S](info.parameters(0), cmd(0), None) + * val args1: () => S = annotation.argGetter[S](info.parameters(1), mainArgs(1), Some(() => sum$default$1())) + * val args2: () => Seq[T] = annotation.varargGetter[T](info.parameters(2), cmd.drop(2)) + * annotation.run(() => f(args0(), args1(), args2()*)) + * } + * } + */ + private def mainAnnotationProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + import tpd._ + + /** + * Computes the symbols of the default values of the function. Since they cannot be inferred anymore at this + * point of the compilation, they must be explicitly passed by [[mainProxy]]. + */ + def defaultValueSymbols(scope: Tree, funSymbol: Symbol): DefaultValueSymbols = + scope match { + case TypeDef(_, template: Template) => + template.body.flatMap((_: Tree) match { + case dd: DefDef if dd.name.is(DefaultGetterName) && dd.name.firstPart == funSymbol.name => + val DefaultGetterName.NumberedInfo(index) = dd.name.info: @unchecked + List(index -> dd.symbol) + case _ => Nil + }).toMap + case _ => Map.empty + } + + /** Computes the list of main methods present in the code. */ + def mainMethods(scope: Tree, stats: List[Tree]): List[(Symbol, ParameterAnnotationss, DefaultValueSymbols, Option[Comment])] = stats.flatMap { + case stat: DefDef => + val sym = stat.symbol + sym.annotations.filter(_.matches(defn.MainAnnotationClass)) match { + case Nil => + Nil + case _ :: Nil => + val paramAnnotations = stat.paramss.flatMap(_.map( + valdef => valdef.symbol.annotations.filter(_.matches(defn.MainAnnotationParameterAnnotation)) + )) + (sym, paramAnnotations.toVector, defaultValueSymbols(scope, sym), stat.rawComment) :: Nil + case mainAnnot :: others => + report.error(s"method cannot have multiple main annotations", mainAnnot.tree) + Nil + } + case stat @ TypeDef(_, impl: Template) if stat.symbol.is(Module) => + mainMethods(stat, impl.body) + case _ => + Nil + } + + // Assuming that the top-level object was already generated, all main methods will have a scope + mainMethods(EmptyTree, stats).flatMap(mainAnnotationProxy) + } + + private def mainAnnotationProxy(mainFun: Symbol, paramAnnotations: ParameterAnnotationss, defaultValueSymbols: DefaultValueSymbols, docComment: Option[Comment])(using Context): Option[TypeDef] = { + val mainAnnot = mainFun.getAnnotation(defn.MainAnnotationClass).get + def pos = mainFun.sourcePos + + val documentation = new Documentation(docComment) + + /** () => value */ + def unitToValue(value: Tree): Tree = + val defDef = DefDef(nme.ANON_FUN, List(Nil), TypeTree(), value) + Block(defDef, Closure(Nil, Ident(nme.ANON_FUN), EmptyTree)) + + /** Generate a list of trees containing the ParamInfo instantiations. + * + * A ParamInfo has the following shape + * ``` + * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))) + * ``` + */ + def parameterInfos(mt: MethodType): List[Tree] = + extension (tree: Tree) def withProperty(sym: Symbol, args: List[Tree]) = + Apply(Select(tree, sym.name), args) + + for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield + val param = paramName.toString + val paramType0 = if formal.isRepeatedParam then formal.argTypes.head.dealias else formal.dealias + val paramType = paramType0.dealias + val paramTypeOwner = paramType.typeSymbol.owner + val paramTypeStr = + if paramTypeOwner == defn.EmptyPackageClass then paramType.show + else paramTypeOwner.showFullName + "." + paramType.show + val hasDefault = defaultValueSymbols.contains(idx) + val isRepeated = formal.isRepeatedParam + val paramDoc = documentation.argDocs.getOrElse(param, "") + val paramAnnots = + val annotationTrees = paramAnnotations(idx).map(instantiateAnnotation).toList + Apply(ref(defn.SeqModule.termRef), annotationTrees) + + val constructorArgs = List(param, paramTypeStr, hasDefault, isRepeated, paramDoc) + .map(value => Literal(Constant(value))) + + New(TypeTree(defn.MainAnnotationParameter.typeRef), List(constructorArgs :+ paramAnnots)) + + end parameterInfos + + /** + * Creates a list of references and definitions of arguments. + * The goal is to create the + * `val args0: () => S = annotation.argGetter[S](0, cmd(0), None)` + * part of the code. + */ + def argValDefs(mt: MethodType): List[ValDef] = + for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield + val argName = nme.args ++ idx.toString + val isRepeated = formal.isRepeatedParam + val formalType = if isRepeated then formal.argTypes.head else formal + val getterName = if isRepeated then nme.varargGetter else nme.argGetter + val defaultValueGetterOpt = defaultValueSymbols.get(idx) match + case None => ref(defn.NoneModule.termRef) + case Some(dvSym) => + val value = unitToValue(ref(dvSym.termRef)) + Apply(ref(defn.SomeClass.companionModule.termRef), value) + val argGetter0 = TypeApply(Select(Ident(nme.annotation), getterName), TypeTree(formalType) :: Nil) + val index = Literal(Constant(idx)) + val paramInfo = Apply(Select(Ident(nme.info), nme.parameters), index) + val argGetter = + if isRepeated then Apply(argGetter0, List(paramInfo, Apply(Select(Ident(nme.cmd), nme.drop), List(index)))) + else Apply(argGetter0, List(paramInfo, Apply(Ident(nme.cmd), List(index)), defaultValueGetterOpt)) + ValDef(argName, TypeTree(), argGetter) + end argValDefs + + + /** Create a list of argument references that will be passed as argument to the main method. + * `args0`, ...`argn*` + */ + def argRefs(mt: MethodType): List[Tree] = + for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield + val argRef = Apply(Ident(nme.args ++ idx.toString), Nil) + if formal.isRepeatedParam then repeated(argRef) else argRef + end argRefs + + + /** Turns an annotation (e.g. `@main(40)`) into an instance of the class (e.g. `new scala.main(40)`). */ + def instantiateAnnotation(annot: Annotation): Tree = + val argss = { + def recurse(t: tpd.Tree, acc: List[List[Tree]]): List[List[Tree]] = t match { + case Apply(t, args: List[tpd.Tree]) => recurse(t, extractArgs(args) :: acc) + case _ => acc + } + + def extractArgs(args: List[tpd.Tree]): List[Tree] = + args.flatMap { + case Typed(SeqLiteral(varargs, _), _) => varargs.map(arg => TypedSplice(arg)) + case arg: Select if arg.name.is(DefaultGetterName) => Nil // Ignore default values, they will be added later by the compiler + case arg => List(TypedSplice(arg)) + } + + recurse(annot.tree, Nil) + } + + New(TypeTree(annot.symbol.typeRef), argss) + end instantiateAnnotation + + def generateMainClass(mainCall: Tree, args: List[Tree], parameterInfos: List[Tree]): TypeDef = + val cmdInfo = + val nameTree = Literal(Constant(mainFun.showName)) + val docTree = Literal(Constant(documentation.mainDoc)) + val paramInfos = Apply(ref(defn.SeqModule.termRef), parameterInfos) + New(TypeTree(defn.MainAnnotationInfo.typeRef), List(List(nameTree, docTree, paramInfos))) + + val annotVal = ValDef( + nme.annotation, + TypeTree(), + instantiateAnnotation(mainAnnot) + ) + val infoVal = ValDef( + nme.info, + TypeTree(), + cmdInfo + ) + val command = ValDef( + nme.command, + TypeTree(), + Apply( + Select(Ident(nme.annotation), nme.command), + List(Ident(nme.info), Ident(nme.args)) + ) + ) + val argsVal = ValDef( + nme.cmd, + TypeTree(), + Select(Ident(nme.command), nme.get) + ) + val run = Apply(Select(Ident(nme.annotation), nme.run), mainCall) + val body0 = If( + Select(Ident(nme.command), nme.isDefined), + Block(argsVal :: args, run), + EmptyTree + ) + val body = Block(List(annotVal, infoVal, command), body0) // TODO add `if (cmd.nonEmpty)` + + val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) + .withFlags(Param) + /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. + * The annotations will be retype-checked in another scope that may not have the same imports. + */ + def insertTypeSplices = new TreeMap { + override def transform(tree: Tree)(using Context): Tree = tree match + case tree: tpd.Ident @unchecked => TypedSplice(tree) + case tree => super.transform(tree) + } + val annots = mainFun.annotations + .filterNot(_.matches(defn.MainAnnotationClass)) + .map(annot => insertTypeSplices.transform(annot.tree)) + val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) + .withFlags(JavaStatic) + .withAnnotations(annots) + val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) + val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) + .withFlags(Final | Invisible) + mainCls.withSpan(mainAnnot.tree.span.toSynthetic) + end generateMainClass + + if (!mainFun.owner.isStaticOwner) + report.error(s"main method is not statically accessible", pos) + None + else mainFun.info match { + case _: ExprType => + Some(generateMainClass(unitToValue(ref(mainFun.termRef)), Nil, Nil)) + case mt: MethodType => + if (mt.isImplicitMethod) + report.error(s"main method cannot have implicit parameters", pos) + None + else mt.resType match + case restpe: MethodType => + report.error(s"main method cannot be curried", pos) + None + case _ => + Some(generateMainClass(unitToValue(Apply(ref(mainFun.termRef), argRefs(mt))), argValDefs(mt), parameterInfos(mt))) + case _: PolyType => + report.error(s"main method cannot have type parameters", pos) + None + case _ => + report.error(s"main can only annotate a method", pos) + None + } + } + + /** A class responsible for extracting the docstrings of a method. */ + private class Documentation(docComment: Option[Comment]): + import util.CommentParsing._ + + /** The main part of the documentation. */ + lazy val mainDoc: String = _mainDoc + /** The parameters identified by @param. Maps from parameter name to its documentation. */ + lazy val argDocs: Map[String, String] = _argDocs + + private var _mainDoc: String = "" + private var _argDocs: Map[String, String] = Map() + + docComment match { + case Some(comment) => if comment.isDocComment then parseDocComment(comment.raw) else _mainDoc = comment.raw + case None => + } + + private def cleanComment(raw: String): String = + var lines: Seq[String] = raw.trim.nn.split('\n').nn.toSeq + lines = lines.map(l => l.substring(skipLineLead(l, -1), l.length).nn.trim.nn) + var s = lines.foldLeft("") { + case ("", s2) => s2 + case (s1, "") if s1.last == '\n' => s1 // Multiple newlines are kept as single newlines + case (s1, "") => s1 + '\n' + case (s1, s2) if s1.last == '\n' => s1 + s2 + case (s1, s2) => s1 + ' ' + s2 + } + s.replaceAll(raw"\[\[", "").nn.replaceAll(raw"\]\]", "").nn.trim.nn + + private def parseDocComment(raw: String): Unit = + // Positions of the sections (@) in the docstring + val tidx: List[(Int, Int)] = tagIndex(raw) + + // Parse main comment + var mainComment: String = raw.substring(skipLineLead(raw, 0), startTag(raw, tidx)).nn + _mainDoc = cleanComment(mainComment) + + // Parse arguments comments + val argsCommentsSpans: Map[String, (Int, Int)] = paramDocs(raw, "@param", tidx) + val argsCommentsTextSpans = argsCommentsSpans.view.mapValues(extractSectionText(raw, _)) + val argsCommentsTexts = argsCommentsTextSpans.mapValues({ case (beg, end) => raw.substring(beg, end).nn }) + _argDocs = argsCommentsTexts.mapValues(cleanComment(_)).toMap + end Documentation +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala b/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala new file mode 100644 index 000000000000..054ffe66f323 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala @@ -0,0 +1,129 @@ +package dotty.tools.dotc +package ast + +import core.Contexts._ +import core.Decorators._ +import util.Spans._ +import Trees.{MemberDef, DefTree, WithLazyField} +import dotty.tools.dotc.core.Types.AnnotatedType +import dotty.tools.dotc.core.Types.ImportType +import dotty.tools.dotc.core.Types.Type + +/** Utility functions to go from typed to untyped ASTs */ +// TODO: Handle trees with mixed source files +object NavigateAST { + + /** The untyped tree corresponding to typed tree `tree` in the compilation + * unit specified by `ctx` + */ + def toUntyped(tree: tpd.Tree)(using Context): untpd.Tree = + untypedPath(tree, exactMatch = true) match { + case (utree: untpd.Tree) :: _ => + utree + case _ => + val loosePath = untypedPath(tree, exactMatch = false) + throw new + Error(i"""no untyped tree for $tree, pos = ${tree.sourcePos} + |best matching path =\n$loosePath%\n====\n% + |path positions = ${loosePath.map(_.sourcePos)}""") + } + + /** The reverse path of untyped trees starting with a tree that closest matches + * `tree` and ending in the untyped tree at the root of the compilation unit + * specified by `ctx`. + * @param exactMatch If `true`, the path must start with a node that exactly + * matches `tree`, or `Nil` is returned. + * If `false` the path might start with a node enclosing + * the logical position of `tree`. + * Note: A complication concerns member definitions. ValDefs and DefDefs + * have after desugaring a position that spans just the name of the symbol being + * defined and nothing else. So we look instead for an untyped tree approximating the + * envelope of the definition, and declare success if we find another DefTree. + */ + def untypedPath(tree: tpd.Tree, exactMatch: Boolean = false)(using Context): List[Positioned] = + tree match { + case tree: MemberDef[?] => + untypedPath(tree.span) match { + case path @ (last: DefTree[?]) :: _ => path + case path if !exactMatch => path + case _ => Nil + } + case _ => + untypedPath(tree.span) match { + case (path @ last :: _) if last.span == tree.span || !exactMatch => path + case _ => Nil + } + } + + /** The reverse part of the untyped root of the compilation unit of `ctx` to + * the given `span`. + */ + def untypedPath(span: Span)(using Context): List[Positioned] = + pathTo(span, List(ctx.compilationUnit.untpdTree)) + + + /** The reverse path from any node in `from` to the node that closest encloses `span`, + * or `Nil` if no such path exists. If a non-empty path is returned it starts with + * the node closest enclosing `span` and ends with one of the nodes in `from`. + * + * @param skipZeroExtent If true, skip over zero-extent nodes in the search. These nodes + * do not correspond to code the user wrote since their start and + * end point are the same, so this is useful when trying to reconcile + * nodes with source code. + */ + def pathTo(span: Span, from: List[Positioned], skipZeroExtent: Boolean = false)(using Context): List[Positioned] = { + def childPath(it: Iterator[Any], path: List[Positioned]): List[Positioned] = { + var bestFit: List[Positioned] = path + while (it.hasNext) { + val path1 = it.next() match { + case p: Positioned => singlePath(p, path) + case m: untpd.Modifiers => childPath(m.productIterator, path) + case xs: List[?] => childPath(xs.iterator, path) + case _ => path + } + if ((path1 ne path) && + ((bestFit eq path) || + bestFit.head.span != path1.head.span && + bestFit.head.span.contains(path1.head.span))) + bestFit = path1 + } + bestFit + } + /* + * Annotations trees are located in the Type + */ + def unpackAnnotations(t: Type, path: List[Positioned]): List[Positioned] = + t match { + case ann: AnnotatedType => + unpackAnnotations(ann.parent, childPath(ann.annot.tree.productIterator, path)) + case imp: ImportType => + childPath(imp.expr.productIterator, path) + case other => + path + } + def singlePath(p: Positioned, path: List[Positioned]): List[Positioned] = + if (p.span.exists && !(skipZeroExtent && p.span.isZeroExtent) && p.span.contains(span)) { + // FIXME: We shouldn't be manually forcing trees here, we should replace + // our usage of `productIterator` by something in `Positioned` that takes + // care of low-level details like this for us. + p match { + case p: WithLazyField[?] => + p.forceIfLazy + case _ => + } + val iterator = p match + case defdef: DefTree[?] => + p.productIterator ++ defdef.mods.productIterator + case _ => + p.productIterator + childPath(iterator, p :: path) + } + else { + p match { + case t: untpd.TypeTree => unpackAnnotations(t.typeOpt, path) + case _ => path + } + } + childPath(from.iterator, Nil) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala new file mode 100644 index 000000000000..fd30d441a6ee --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala @@ -0,0 +1,246 @@ +package dotty.tools +package dotc +package ast + +import util.Spans._ +import util.{SourceFile, SourcePosition, SrcPos} +import core.Contexts._ +import core.Decorators._ +import core.NameOps._ +import core.Flags.{JavaDefined, ExtensionMethod} +import core.StdNames.nme +import ast.Trees.mods +import annotation.constructorOnly +import annotation.internal.sharable + +/** A base class for things that have positions (currently: modifiers and trees) + */ +abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable, caps.Pure { + import Positioned.{ids, nextId, debugId} + + private var mySpan: Span = _ + + private var mySource: SourceFile = src + + /** A unique identifier in case -Yshow-tree-ids, or -Ydebug-tree-with-id + * is set, -1 otherwise. + */ + def uniqueId: Int = + if ids != null && ids.nn.containsKey(this) then ids.nn.get(this).nn else -1 + + private def allocateId() = + if ids != null then + val ownId = nextId + nextId += 1 + ids.nn.put(this: @unchecked, ownId) + if ownId == debugId then + println(s"Debug tree (id=$debugId) creation \n${this: @unchecked}\n") + Thread.dumpStack() + + allocateId() + + /** The span part of the item's position */ + def span: Span = mySpan + + def span_=(span: Span): Unit = + mySpan = span + + span = envelope(src) + + def source: SourceFile = mySource + + def sourcePos(using Context): SourcePosition = source.atSpan(span) + + /** This positioned item, widened to `SrcPos`. Used to make clear we only need the + * position, typically for error reporting. + */ + final def srcPos: SrcPos = this + + /** A positioned item like this one with given `span`. + * If the positioned item is source-derived, a clone is returned. + * If the positioned item is synthetic, the position is updated + * destructively and the item itself is returned. + */ + def withSpan(span: Span): this.type = + if (span == mySpan) this + else { + val newpd: this.type = + if !mySpan.exists then + if span.exists then envelope(source, span.startPos) // fill in children spans + this + else + cloneIn(source) + newpd.span = span + newpd + } + + /** The union of startSpan and the spans of all positioned children that + * have the same source as this node, except that Inlined nodes only + * consider their `call` child. + * + * Side effect: Any descendants without spans have but with the same source as this + * node have their span set to the end position of the envelope of all children to + * the left, or, if that one does not exist, to the start position of the envelope + * of all children to the right. + */ + def envelope(src: SourceFile, startSpan: Span = NoSpan): Span = (this: @unchecked) match { + case Trees.Inlined(call, _, _) => + call.span + case _ => + def include(span: Span, x: Any): Span = x match { + case p: Positioned => + if (p.source != src) span + else if (p.span.exists) span.union(p.span) + else if (span.exists) { + if (span.end != MaxOffset) + p.span = p.envelope(src, span.endPos) + span + } + else // No span available to assign yet, signal this by returning a span with MaxOffset end + Span(MaxOffset, MaxOffset) + case m: untpd.Modifiers => + include(include(span, m.mods), m.annotations) + case y :: ys => + include(include(span, y), ys) + case _ => span + } + val limit = productArity + def includeChildren(span: Span, n: Int): Span = + if (n < limit) includeChildren(include(span, productElement(n): @unchecked), n + 1) + else span + val span1 = includeChildren(startSpan, 0) + val span2 = + if (!span1.exists || span1.end != MaxOffset) + span1 + else if (span1.start == MaxOffset) + // No positioned child was found + NoSpan + else + ///println(s"revisit $uniqueId with $span1") + // We have some children left whose span could not be assigned. + // Go through it again with the known start position. + includeChildren(span1.startPos, 0) + span2.toSynthetic + } + + /** Clone this node but assign it a fresh id which marks it as a node in `file`. */ + def cloneIn(src: SourceFile): this.type = { + val newpd: this.type = clone.asInstanceOf[this.type] + newpd.allocateId() + newpd.mySource = src + newpd + } + + def contains(that: Positioned): Boolean = { + def isParent(x: Any): Boolean = x match { + case x: Positioned => + x.contains(that) + case m: untpd.Modifiers => + m.mods.exists(isParent) || m.annotations.exists(isParent) + case xs: List[?] => + xs.exists(isParent) + case _ => + false + } + (this eq that) || + (this.span contains that.span) && { + var n = productArity + var found = false + while (!found && n > 0) { + n -= 1 + found = isParent(productElement(n)) + } + found + } + } + + /** Check that all positioned items in this tree satisfy the following conditions: + * - Parent spans contain child spans + * - If item is a non-empty tree, it has a position + */ + def checkPos(nonOverlapping: Boolean)(using Context): Unit = try { + import untpd._ + var lastPositioned: Positioned | Null = null + var lastSpan = NoSpan + def check(p: Any): Unit = p match { + case p: Positioned => + assert(span contains p.span, + i"""position error, parent span does not contain child span + |parent = $this # $uniqueId, + |parent span = $span, + |child = $p # ${p.uniqueId}, + |child span = ${p.span}""".stripMargin) + p match { + case tree: Tree if !tree.isEmpty => + assert(tree.span.exists, + s"position error: position not set for $tree # ${tree.uniqueId}") + case _ => + } + if nonOverlapping then + this match { + case _: XMLBlock => + // FIXME: Trees generated by the XML parser do not satisfy `checkPos` + case _: WildcardFunction + if lastPositioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] => + // ignore transition from last wildcard parameter to body + case _ => + assert(!lastSpan.exists || !p.span.exists || lastSpan.end <= p.span.start, + i"""position error, child positions overlap or in wrong order + |parent = $this + |1st child = $lastPositioned + |1st child span = $lastSpan + |2nd child = $p + |2nd child span = ${p.span}""".stripMargin) + } + lastPositioned = p + lastSpan = p.span + p.checkPos(nonOverlapping) + case m: untpd.Modifiers => + m.annotations.foreach(check) + m.mods.foreach(check) + case xs: List[?] => + xs.foreach(check) + case _ => + } + this match { + case tree: DefDef if tree.name == nme.CONSTRUCTOR && tree.mods.is(JavaDefined) => + // Special treatment for constructors coming from Java: + // Leave out leading type params, they are copied with wrong positions from parent class + check(tree.mods) + check(tree.trailingParamss) + case tree: DefDef if tree.mods.is(ExtensionMethod) => + tree.paramss match + case vparams1 :: vparams2 :: rest if tree.name.isRightAssocOperatorName => + // omit check for right-associatiove extension methods; their parameters were swapped + case _ => + check(tree.paramss) + check(tree.tpt) + check(tree.rhs) + case _ => + val end = productArity + var n = 0 + while (n < end) { + check(productElement(n)) + n += 1 + } + } + } + catch { + case ex: AssertionError => + println(i"error while checking $this") + throw ex + } +} + +object Positioned { + @sharable private var debugId = Int.MinValue + @sharable private var ids: java.util.WeakHashMap[Positioned, Int] | Null = null + @sharable private var nextId: Int = 0 + + def init(using Context): Unit = + debugId = ctx.settings.YdebugTreeWithId.value + if ids == null && ctx.settings.YshowTreeIds.value + || debugId != ctx.settings.YdebugTreeWithId.default + then + ids = java.util.WeakHashMap() +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala new file mode 100644 index 000000000000..ff59a795d818 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala @@ -0,0 +1,1075 @@ +package dotty.tools +package dotc +package ast + +import core._ +import Flags._, Trees._, Types._, Contexts._ +import Names._, StdNames._, NameOps._, Symbols._ +import typer.ConstFold +import reporting.trace +import dotty.tools.dotc.transform.SymUtils._ +import Decorators._ +import Constants.Constant +import scala.collection.mutable + +import scala.annotation.tailrec + +trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => + + // Note: the <: Type constraint looks necessary (and is needed to make the file compile in dotc). + // But Scalac accepts the program happily without it. Need to find out why. + + def unsplice(tree: Trees.Tree[T]): Trees.Tree[T] = tree + + def isDeclarationOrTypeDef(tree: Tree): Boolean = unsplice(tree) match { + case DefDef(_, _, _, EmptyTree) + | ValDef(_, _, EmptyTree) + | TypeDef(_, _) => true + case _ => false + } + + def isOpAssign(tree: Tree): Boolean = unsplice(tree) match { + case Apply(fn, _ :: _) => + unsplice(fn) match { + case Select(_, name) if name.isOpAssignmentName => true + case _ => false + } + case _ => false + } + + class MatchingArgs(params: List[Symbol], args: List[Tree])(using Context) { + def foreach(f: (Symbol, Tree) => Unit): Boolean = { + def recur(params: List[Symbol], args: List[Tree]): Boolean = params match { + case Nil => args.isEmpty + case param :: params1 => + if (param.info.isRepeatedParam) { + for (arg <- args) f(param, arg) + true + } + else args match { + case Nil => false + case arg :: args1 => + f(param, args.head) + recur(params1, args1) + } + } + recur(params, args) + } + def zipped: List[(Symbol, Tree)] = map((_, _)) + def map[R](f: (Symbol, Tree) => R): List[R] = { + val b = List.newBuilder[R] + foreach(b += f(_, _)) + b.result() + } + } + + /** The method part of an application node, possibly enclosed in a block + * with only valdefs as statements. the reason for also considering blocks + * is that named arguments can transform a call into a block, e.g. + * (b = foo, a = bar) + * is transformed to + * { val x$1 = foo + * val x$2 = bar + * (x$2, x$1) + * } + */ + def methPart(tree: Tree): Tree = stripApply(tree) match { + case TypeApply(fn, _) => methPart(fn) + case AppliedTypeTree(fn, _) => methPart(fn) // !!! should not be needed + case Block(stats, expr) => methPart(expr) + case mp => mp + } + + /** If this is an application, its function part, stripping all + * Apply nodes (but leaving TypeApply nodes in). Otherwise the tree itself. + */ + def stripApply(tree: Tree): Tree = unsplice(tree) match { + case Apply(fn, _) => stripApply(fn) + case _ => tree + } + + /** If this is a block, its expression part */ + def stripBlock(tree: Tree): Tree = unsplice(tree) match { + case Block(_, expr) => stripBlock(expr) + case Inlined(_, _, expr) => stripBlock(expr) + case _ => tree + } + + def stripInlined(tree: Tree): Tree = unsplice(tree) match { + case Inlined(_, _, expr) => stripInlined(expr) + case _ => tree + } + + def stripAnnotated(tree: Tree): Tree = tree match { + case Annotated(arg, _) => arg + case _ => tree + } + + /** The number of arguments in an application */ + def numArgs(tree: Tree): Int = unsplice(tree) match { + case Apply(fn, args) => numArgs(fn) + args.length + case TypeApply(fn, _) => numArgs(fn) + case Block(_, expr) => numArgs(expr) + case _ => 0 + } + + /** All term arguments of an application in a single flattened list */ + def allArguments(tree: Tree): List[Tree] = unsplice(tree) match { + case Apply(fn, args) => allArguments(fn) ::: args + case TypeApply(fn, _) => allArguments(fn) + case Block(_, expr) => allArguments(expr) + case _ => Nil + } + + /** Is tree explicitly parameterized with type arguments? */ + def hasExplicitTypeArgs(tree: Tree): Boolean = tree match + case TypeApply(tycon, args) => + args.exists(arg => !arg.span.isZeroExtent && !tycon.span.contains(arg.span)) + case _ => false + + /** Is tree a path? */ + def isPath(tree: Tree): Boolean = unsplice(tree) match { + case Ident(_) | This(_) | Super(_, _) => true + case Select(qual, _) => isPath(qual) + case _ => false + } + + /** Is tree a self constructor call this(...)? I.e. a call to a constructor of the + * same object? + */ + def isSelfConstrCall(tree: Tree): Boolean = methPart(tree) match { + case Ident(nme.CONSTRUCTOR) | Select(This(_), nme.CONSTRUCTOR) => true + case _ => false + } + + /** Is tree a super constructor call? + */ + def isSuperConstrCall(tree: Tree): Boolean = methPart(tree) match { + case Select(Super(_, _), nme.CONSTRUCTOR) => true + case _ => false + } + + def isSuperSelection(tree: Tree): Boolean = unsplice(tree) match { + case Select(Super(_, _), _) => true + case _ => false + } + + def isSelfOrSuperConstrCall(tree: Tree): Boolean = methPart(tree) match { + case Ident(nme.CONSTRUCTOR) + | Select(This(_), nme.CONSTRUCTOR) + | Select(Super(_, _), nme.CONSTRUCTOR) => true + case _ => false + } + + /** Is tree a backquoted identifier or definition */ + def isBackquoted(tree: Tree): Boolean = tree.hasAttachment(Backquoted) + + /** Is tree a variable pattern? */ + def isVarPattern(pat: Tree): Boolean = unsplice(pat) match { + case x: Ident => x.name.isVarPattern && !isBackquoted(x) + case _ => false + } + + /** The first constructor definition in `stats` */ + def firstConstructor(stats: List[Tree]): Tree = stats match { + case (meth: DefDef) :: _ if meth.name.isConstructorName => meth + case stat :: stats => firstConstructor(stats) + case nil => EmptyTree + } + + /** Is tpt a vararg type of the form T* or => T*? */ + def isRepeatedParamType(tpt: Tree)(using Context): Boolean = stripByNameType(tpt) match { + case tpt: TypeTree => tpt.typeOpt.isRepeatedParam + case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS), _) => true + case _ => false + } + + /** Is this argument node of the form *, or is it a reference to + * such an argument ? The latter case can happen when an argument is lifted. + */ + def isWildcardStarArg(tree: Tree)(using Context): Boolean = unbind(tree) match { + case Typed(Ident(nme.WILDCARD_STAR), _) => true + case Typed(_, Ident(tpnme.WILDCARD_STAR)) => true + case Typed(_, tpt: TypeTree) => tpt.typeOpt.isRepeatedParam + case NamedArg(_, arg) => isWildcardStarArg(arg) + case arg => arg.typeOpt.widen.isRepeatedParam + } + + /** Is tree a type tree of the form `=> T` or (under pureFunctions) `{refs}-> T`? */ + def isByNameType(tree: Tree)(using Context): Boolean = + stripByNameType(tree) ne tree + + /** Strip `=> T` to `T` and (under pureFunctions) `{refs}-> T` to `T` */ + def stripByNameType(tree: Tree)(using Context): Tree = unsplice(tree) match + case ByNameTypeTree(t1) => t1 + case untpd.CapturingTypeTree(_, parent) => + val parent1 = stripByNameType(parent) + if parent1 eq parent then tree else parent1 + case _ => tree + + /** All type and value parameter symbols of this DefDef */ + def allParamSyms(ddef: DefDef)(using Context): List[Symbol] = + ddef.paramss.flatten.map(_.symbol) + + /** Does this argument list end with an argument of the form : _* ? */ + def isWildcardStarArgList(trees: List[Tree])(using Context): Boolean = + trees.nonEmpty && isWildcardStarArg(trees.last) + + /** Is the argument a wildcard argument of the form `_` or `x @ _`? + */ + def isWildcardArg(tree: Tree): Boolean = unbind(tree) match { + case Ident(nme.WILDCARD) => true + case _ => false + } + + /** Does this list contain a named argument tree? */ + def hasNamedArg(args: List[Any]): Boolean = args exists isNamedArg + val isNamedArg: Any => Boolean = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[_]] + + /** Is this pattern node a catch-all (wildcard or variable) pattern? */ + def isDefaultCase(cdef: CaseDef): Boolean = cdef match { + case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat) + case _ => false + } + + /** Does this CaseDef catch Throwable? */ + def catchesThrowable(cdef: CaseDef)(using Context): Boolean = + catchesAllOf(cdef, defn.ThrowableType) + + /** Does this CaseDef catch everything of a certain Type? */ + def catchesAllOf(cdef: CaseDef, threshold: Type)(using Context): Boolean = + isDefaultCase(cdef) || + cdef.guard.isEmpty && { + unbind(cdef.pat) match { + case Typed(Ident(nme.WILDCARD), tpt) => threshold <:< tpt.typeOpt + case _ => false + } + } + + /** Is this case guarded? */ + def isGuardedCase(cdef: CaseDef): Boolean = cdef.guard ne EmptyTree + + /** Is this parameter list a using clause? */ + def isUsingClause(params: ParamClause)(using Context): Boolean = params match + case ValDefs(vparam :: _) => + val sym = vparam.symbol + if sym.exists then sym.is(Given) else vparam.mods.is(Given) + case _ => + false + + def isUsingOrTypeParamClause(params: ParamClause)(using Context): Boolean = params match + case TypeDefs(_) => true + case _ => isUsingClause(params) + + def isTypeParamClause(params: ParamClause)(using Context): Boolean = params match + case TypeDefs(_) => true + case _ => false + + private val languageSubCategories = Set(nme.experimental, nme.deprecated) + + /** If `path` looks like a language import, `Some(name)` where name + * is `experimental` if that sub-module is imported, and the empty + * term name otherwise. + */ + def languageImport(path: Tree): Option[TermName] = path match + case Select(p1, name: TermName) if languageSubCategories.contains(name) => + languageImport(p1) match + case Some(EmptyTermName) => Some(name) + case _ => None + case p1: RefTree if p1.name == nme.language => + p1.qualifier match + case EmptyTree => Some(EmptyTermName) + case p2: RefTree if p2.name == nme.scala => + p2.qualifier match + case EmptyTree => Some(EmptyTermName) + case Ident(nme.ROOTPKG) => Some(EmptyTermName) + case _ => None + case _ => None + case _ => None + + /** The underlying pattern ignoring any bindings */ + def unbind(x: Tree): Tree = unsplice(x) match { + case Bind(_, y) => unbind(y) + case y => y + } + + /** The largest subset of {NoInits, PureInterface} that a + * trait or class with these parents can have as flags. + */ + def parentsKind(parents: List[Tree])(using Context): FlagSet = parents match { + case Nil => NoInitsInterface + case Apply(_, _ :: _) :: _ => EmptyFlags + case _ :: parents1 => parentsKind(parents1) + } + + /** Checks whether predicate `p` is true for all result parts of this expression, + * where we zoom into Ifs, Matches, and Blocks. + */ + def forallResults(tree: Tree, p: Tree => Boolean): Boolean = tree match { + case If(_, thenp, elsep) => forallResults(thenp, p) && forallResults(elsep, p) + case Match(_, cases) => cases forall (c => forallResults(c.body, p)) + case Block(_, expr) => forallResults(expr, p) + case _ => p(tree) + } +} + +trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => + import untpd._ + + /** The underlying tree when stripping any TypedSplice or Parens nodes */ + override def unsplice(tree: Tree): Tree = tree match { + case TypedSplice(tree1) => tree1 + case Parens(tree1) => unsplice(tree1) + case _ => tree + } + + def functionWithUnknownParamType(tree: Tree): Option[Tree] = tree match { + case Function(args, _) => + if (args.exists { + case ValDef(_, tpt, _) => tpt.isEmpty + case _ => false + }) Some(tree) + else None + case Match(EmptyTree, _) => + Some(tree) + case Block(Nil, expr) => + functionWithUnknownParamType(expr) + case _ => + None + } + + def isFunctionWithUnknownParamType(tree: Tree): Boolean = + functionWithUnknownParamType(tree).isDefined + + def isFunction(tree: Tree): Boolean = tree match + case Function(_, _) | Match(EmptyTree, _) => true + case Block(Nil, expr) => isFunction(expr) + case _ => false + + /** Is `tree` an context function or closure, possibly nested in a block? */ + def isContextualClosure(tree: Tree)(using Context): Boolean = unsplice(tree) match { + case tree: FunctionWithMods => tree.mods.is(Given) + case Function((param: untpd.ValDef) :: _, _) => param.mods.is(Given) + case Closure(_, meth, _) => true + case Block(Nil, expr) => isContextualClosure(expr) + case Block(DefDef(nme.ANON_FUN, params :: _, _, _) :: Nil, cl: Closure) => + if params.isEmpty then + cl.tpt.eq(untpd.ContextualEmptyTree) || defn.isContextFunctionType(cl.tpt.typeOpt) + else + isUsingClause(params) + case _ => false + } + + /** The largest subset of {NoInits, PureInterface} that a + * trait or class enclosing this statement can have as flags. + */ + private def defKind(tree: Tree)(using Context): FlagSet = unsplice(tree) match { + case EmptyTree | _: Import => NoInitsInterface + case tree: TypeDef => if (tree.isClassDef) NoInits else NoInitsInterface + case tree: DefDef => + if tree.unforcedRhs == EmptyTree + && tree.paramss.forall { + case ValDefs(vparams) => vparams.forall(_.rhs.isEmpty) + case _ => true + } + then + NoInitsInterface + else if tree.mods.is(Given) && tree.paramss.isEmpty then + EmptyFlags // might become a lazy val: TODO: check whether we need to suppress NoInits once we have new lazy val impl + else + NoInits + case tree: ValDef => if (tree.unforcedRhs == EmptyTree) NoInitsInterface else EmptyFlags + case _ => EmptyFlags + } + + /** The largest subset of {NoInits, PureInterface} that a + * trait or class with this body can have as flags. + */ + def bodyKind(body: List[Tree])(using Context): FlagSet = + body.foldLeft(NoInitsInterface)((fs, stat) => fs & defKind(stat)) + + /** Info of a variable in a pattern: The named tree and its type */ + type VarInfo = (NameTree, Tree) + + /** An extractor for trees of the form `id` or `id: T` */ + object IdPattern { + def unapply(tree: Tree)(using Context): Option[VarInfo] = tree match { + case id: Ident if id.name != nme.WILDCARD => Some(id, TypeTree()) + case Typed(id: Ident, tpt) => Some((id, tpt)) + case _ => None + } + } + + /** Under pureFunctions: A builder and extractor for `=> T`, which is an alias for `{*}-> T`. + * Only trees of the form `=> T` are matched; trees written directly as `{*}-> T` + * are ignored by the extractor. + */ + object ImpureByNameTypeTree: + + def apply(tp: ByNameTypeTree)(using Context): untpd.CapturingTypeTree = + untpd.CapturingTypeTree( + untpd.captureRoot.withSpan(tp.span.startPos) :: Nil, tp) + + def unapply(tp: Tree)(using Context): Option[ByNameTypeTree] = tp match + case untpd.CapturingTypeTree(id @ Select(_, nme.CAPTURE_ROOT) :: Nil, bntp: ByNameTypeTree) + if id.span == bntp.span.startPos => Some(bntp) + case _ => None + end ImpureByNameTypeTree +} + +trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => + import TreeInfo._ + import tpd._ + + /** The purity level of this statement. + * @return Pure if statement has no side effects + * Idempotent if running the statement a second time has no side effects + * Impure otherwise + */ + def statPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match { + case EmptyTree + | TypeDef(_, _) + | Import(_, _) + | DefDef(_, _, _, _) => + Pure + case vdef @ ValDef(_, _, _) => + if (vdef.symbol.flags is Mutable) Impure else exprPurity(vdef.rhs) `min` Pure + case _ => + Impure + // TODO: It seem like this should be exprPurity(tree) + // But if we do that the repl/vars test break. Need to figure out why that's the case. + } + + /** The purity level of this expression. See docs for PurityLevel for what that means + * + * Note that purity and idempotency are treated differently. + * References to modules and lazy vals are impure (side-effecting) both because + * side-effecting code may be executed and because the first reference + * takes a different code path than all to follow; but they are idempotent + * because running the expression a second time gives the cached result. + */ + def exprPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match { + case EmptyTree + | This(_) + | Super(_, _) + | Literal(_) => + PurePath + case Ident(_) => + refPurity(tree) + case Select(qual, _) => + if (tree.symbol.is(Erased)) Pure + else refPurity(tree) `min` exprPurity(qual) + case New(_) | Closure(_, _, _) => + Pure + case TypeApply(fn, _) => + if (fn.symbol.is(Erased) || fn.symbol == defn.QuotedTypeModule_of || fn.symbol == defn.Predef_classOf) Pure else exprPurity(fn) + case Apply(fn, args) => + if isPureApply(tree, fn) then + minOf(exprPurity(fn), args.map(exprPurity)) `min` Pure + else if fn.symbol.is(Erased) then + Pure + else if fn.symbol.isStableMember /* && fn.symbol.is(Lazy) */ then + minOf(exprPurity(fn), args.map(exprPurity)) `min` Idempotent + else + Impure + case Typed(expr, _) => + exprPurity(expr) + case Block(stats, expr) => + minOf(exprPurity(expr), stats.map(statPurity)) + case Inlined(_, bindings, expr) => + minOf(exprPurity(expr), bindings.map(statPurity)) + case NamedArg(_, expr) => + exprPurity(expr) + case _ => + Impure + } + + private def minOf(l0: PurityLevel, ls: List[PurityLevel]) = ls.foldLeft(l0)(_ `min` _) + + def isPurePath(tree: Tree)(using Context): Boolean = tree.tpe match { + case tpe: ConstantType => exprPurity(tree) >= Pure + case _ => exprPurity(tree) == PurePath + } + + def isPureExpr(tree: Tree)(using Context): Boolean = + exprPurity(tree) >= Pure + + def isIdempotentPath(tree: Tree)(using Context): Boolean = tree.tpe match { + case tpe: ConstantType => exprPurity(tree) >= Idempotent + case _ => exprPurity(tree) >= IdempotentPath + } + + def isIdempotentExpr(tree: Tree)(using Context): Boolean = + exprPurity(tree) >= Idempotent + + def isPureBinding(tree: Tree)(using Context): Boolean = statPurity(tree) >= Pure + + /** Is the application `tree` with function part `fn` known to be pure? + * Function value and arguments can still be impure. + */ + def isPureApply(tree: Tree, fn: Tree)(using Context): Boolean = + def isKnownPureOp(sym: Symbol) = + sym.owner.isPrimitiveValueClass + || sym.owner == defn.StringClass + || defn.pureMethods.contains(sym) + tree.tpe.isInstanceOf[ConstantType] && tree.symbol != NoSymbol && isKnownPureOp(tree.symbol) // A constant expression with pure arguments is pure. + || fn.symbol.isStableMember && !fn.symbol.is(Lazy) // constructors of no-inits classes are stable + + /** The purity level of this reference. + * @return + * PurePath if reference is (nonlazy and stable) + * or to a parameterized function + * or its type is a constant type + * IdempotentPath if reference is lazy and stable + * Impure otherwise + * @DarkDimius: need to make sure that lazy accessor methods have Lazy and Stable + * flags set. + */ + def refPurity(tree: Tree)(using Context): PurityLevel = { + val sym = tree.symbol + if (!tree.hasType) Impure + else if !tree.tpe.widen.isParameterless then PurePath + else if sym.is(Erased) then PurePath + else if tree.tpe.isInstanceOf[ConstantType] then PurePath + else if (!sym.isStableMember) Impure + else if (sym.is(Module)) + if (sym.moduleClass.isNoInitsRealClass) PurePath else IdempotentPath + else if (sym.is(Lazy)) IdempotentPath + else if sym.isAllOf(InlineParam) then Impure + else PurePath + } + + def isPureRef(tree: Tree)(using Context): Boolean = + refPurity(tree) == PurePath + def isIdempotentRef(tree: Tree)(using Context): Boolean = + refPurity(tree) >= IdempotentPath + + /** (1) If `tree` is a constant expression, its value as a Literal, + * or `tree` itself otherwise. + * + * Note: Demanding idempotency instead of purity in literalize is strictly speaking too loose. + * Example + * + * object O { final val x = 42; println("43") } + * O.x + * + * Strictly speaking we can't replace `O.x` with `42`. But this would make + * most expressions non-constant. Maybe we can change the spec to accept this + * kind of eliding behavior. Or else enforce true purity in the compiler. + * The choice will be affected by what we will do with `inline` and with + * Singleton type bounds (see SIP 23). Presumably + * + * object O1 { val x: Singleton = 42; println("43") } + * object O2 { inline val x = 42; println("43") } + * + * should behave differently. + * + * O1.x should have the same effect as { println("43"); 42 } + * + * whereas + * + * O2.x = 42 + * + * Revisit this issue once we have standardized on `inline`. Then we can demand + * purity of the prefix unless the selection goes to a inline val. + * + * Note: This method should be applied to all term tree nodes that are not literals, + * that can be idempotent, and that can have constant types. So far, only nodes + * of the following classes qualify: + * + * Ident + * Select + * TypeApply + * + * (2) A primitive unary operator expression `pre.op` where `op` is one of `+`, `-`, `~`, `!` + * that has a constant type `ConstantType(v)` but that is not a constant expression + * (i.e. `pre` has side-effects) is translated to + * + * { pre; v } + * + * (3) An expression `pre.getClass[..]()` that has a constant type `ConstantType(v)` but where + * `pre` has side-effects is translated to: + * + * { pre; v } + * + * This avoids the situation where we have a Select node that does not have a symbol. + */ + def constToLiteral(tree: Tree)(using Context): Tree = { + assert(!tree.isType) + val tree1 = ConstFold(tree) + tree1.tpe.widenTermRefExpr.dealias.normalized match { + case ConstantType(Constant(_: Type)) if tree.isInstanceOf[Block] => + // We can't rewrite `{ class A; classOf[A] }` to `classOf[A]`, so we leave + // blocks returning a class literal alone, even if they're idempotent. + tree1 + case ConstantType(value) => + def dropOp(t: Tree): Tree = t match + case Select(pre, _) if t.tpe.isInstanceOf[ConstantType] => + // it's a primitive unary operator + pre + case Apply(TypeApply(Select(pre, nme.getClass_), _), Nil) => + pre + case _ => + tree1 + + val countsAsPure = + if dropOp(tree1).symbol.isInlineVal + then isIdempotentExpr(tree1) + else isPureExpr(tree1) + + if countsAsPure then Literal(value).withSpan(tree.span) + else + val pre = dropOp(tree1) + if pre eq tree1 then tree1 + else + // it's a primitive unary operator or getClass call; + // Simplify `pre.op` to `{ pre; v }` where `v` is the value of `pre.op` + Block(pre :: Nil, Literal(value)).withSpan(tree.span) + case _ => tree1 + } + } + + def isExtMethodApply(tree: Tree)(using Context): Boolean = methPart(tree) match + case Inlined(call, _, _) => isExtMethodApply(call) + case tree @ Select(qual, nme.apply) => tree.symbol.is(ExtensionMethod) || isExtMethodApply(qual) + case tree => tree.symbol.is(ExtensionMethod) + + /** Is symbol potentially a getter of a mutable variable? + */ + def mayBeVarGetter(sym: Symbol)(using Context): Boolean = { + def maybeGetterType(tpe: Type): Boolean = tpe match { + case _: ExprType => true + case tpe: MethodType => tpe.isImplicitMethod + case tpe: PolyType => maybeGetterType(tpe.resultType) + case _ => false + } + sym.owner.isClass && !sym.isStableMember && maybeGetterType(sym.info) + } + + /** Is tree a reference to a mutable variable, or to a potential getter + * that has a setter in the same class? + */ + def isVariableOrGetter(tree: Tree)(using Context): Boolean = { + def sym = tree.symbol + def isVar = sym.is(Mutable) + def isGetter = + mayBeVarGetter(sym) && sym.owner.info.member(sym.name.asTermName.setterName).exists + + unsplice(tree) match { + case Ident(_) => isVar + case Select(_, _) => isVar || isGetter + case Apply(_, _) => + methPart(tree) match { + case Select(qual, nme.apply) => qual.tpe.member(nme.update).exists + case _ => false + } + case _ => false + } + } + + /** Is tree a `this` node which belongs to `enclClass`? */ + def isSelf(tree: Tree, enclClass: Symbol)(using Context): Boolean = unsplice(tree) match { + case This(_) => tree.symbol == enclClass + case _ => false + } + + /** Strips layers of `.asInstanceOf[T]` / `_.$asInstanceOf[T]()` from an expression */ + def stripCast(tree: Tree)(using Context): Tree = { + def isCast(sel: Tree) = sel.symbol.isTypeCast + unsplice(tree) match { + case TypeApply(sel @ Select(inner, _), _) if isCast(sel) => + stripCast(inner) + case Apply(TypeApply(sel @ Select(inner, _), _), Nil) if isCast(sel) => + stripCast(inner) + case t => + t + } + } + + /** The type arguments of a possibly curried call */ + def typeArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case TypeApply(fn, args) => loop(fn, args :: argss) + case Apply(fn, args) => loop(fn, argss) + case _ => argss + loop(tree, Nil) + + /** The term arguments of a possibly curried call */ + def termArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case Apply(fn, args) => loop(fn, args :: argss) + case TypeApply(fn, args) => loop(fn, argss) + case _ => argss + loop(tree, Nil) + + /** The type and term arguments of a possibly curried call, in the order they are given */ + def allArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case tree: GenericApply => loop(tree.fun, tree.args :: argss) + case _ => argss + loop(tree, Nil) + + /** The function part of a possibly curried call. Unlike `methPart` this one does + * not decompose blocks + */ + def funPart(tree: Tree): Tree = tree match + case tree: GenericApply => funPart(tree.fun) + case tree => tree + + /** Decompose a template body into parameters and other statements */ + def decomposeTemplateBody(body: List[Tree])(using Context): (List[Tree], List[Tree]) = + body.partition { + case stat: TypeDef => stat.symbol is Flags.Param + case stat: ValOrDefDef => + stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter + case _ => false + } + + /** An extractor for closures, either contained in a block or standalone. + */ + object closure { + def unapply(tree: Tree): Option[(List[Tree], Tree, Tree)] = tree match { + case Block(_, expr) => unapply(expr) + case Closure(env, meth, tpt) => Some(env, meth, tpt) + case Typed(expr, _) => unapply(expr) + case _ => None + } + } + + /** An extractor for def of a closure contained the block of the closure. */ + object closureDef { + def unapply(tree: Tree)(using Context): Option[DefDef] = tree match { + case Block((meth : DefDef) :: Nil, closure: Closure) if meth.symbol == closure.meth.symbol => + Some(meth) + case Block(Nil, expr) => + unapply(expr) + case Inlined(_, bindings, expr) if bindings.forall(isPureBinding) => + unapply(expr) + case _ => + None + } + } + + /** If tree is a closure, its body, otherwise tree itself */ + def closureBody(tree: Tree)(using Context): Tree = tree match { + case closureDef(meth) => meth.rhs + case _ => tree + } + + /** The variables defined by a pattern, in reverse order of their appearance. */ + def patVars(tree: Tree)(using Context): List[Symbol] = { + val acc = new TreeAccumulator[List[Symbol]] { + def apply(syms: List[Symbol], tree: Tree)(using Context) = tree match { + case Bind(_, body) => apply(tree.symbol :: syms, body) + case Annotated(tree, id @ Ident(tpnme.BOUNDTYPE_ANNOT)) => apply(id.symbol :: syms, tree) + case _ => foldOver(syms, tree) + } + } + acc(Nil, tree) + } + + /** Is this pattern node a catch-all or type-test pattern? */ + def isCatchCase(cdef: CaseDef)(using Context): Boolean = cdef match { + case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) => + isSimpleThrowable(tpt.tpe) + case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) => + isSimpleThrowable(tpt.tpe) + case _ => + isDefaultCase(cdef) + } + + private def isSimpleThrowable(tp: Type)(using Context): Boolean = tp match { + case tp @ TypeRef(pre, _) => + (pre == NoPrefix || pre.typeSymbol.isStatic) && + (tp.symbol derivesFrom defn.ThrowableClass) && !tp.symbol.is(Trait) + case _ => + false + } + + /** The symbols defined locally in a statement list */ + def localSyms(stats: List[Tree])(using Context): List[Symbol] = + val locals = new mutable.ListBuffer[Symbol] + for stat <- stats do + if stat.isDef && stat.symbol.exists then locals += stat.symbol + locals.toList + + /** If `tree` is a DefTree, the symbol defined by it, otherwise NoSymbol */ + def definedSym(tree: Tree)(using Context): Symbol = + if (tree.isDef) tree.symbol else NoSymbol + + /** Going from child to parent, the path of tree nodes that starts + * with a definition of symbol `sym` and ends with `root`, or Nil + * if no such path exists. + * Pre: `sym` must have a position. + */ + def defPath(sym: Symbol, root: Tree)(using Context): List[Tree] = trace.onDebug(s"defpath($sym with position ${sym.span}, ${root.show})") { + require(sym.span.exists, sym) + object accum extends TreeAccumulator[List[Tree]] { + def apply(x: List[Tree], tree: Tree)(using Context): List[Tree] = + if (tree.span.contains(sym.span)) + if (definedSym(tree) == sym) tree :: x + else { + val x1 = foldOver(x, tree) + if (x1 ne x) tree :: x1 else x1 + } + else x + } + accum(Nil, root) + } + + /** The top level classes in this tree, including only those module classes that + * are not a linked class of some other class in the result. + */ + def topLevelClasses(tree: Tree)(using Context): List[ClassSymbol] = tree match { + case PackageDef(_, stats) => stats.flatMap(topLevelClasses) + case tdef: TypeDef if tdef.symbol.isClass => tdef.symbol.asClass :: Nil + case _ => Nil + } + + /** The tree containing only the top-level classes and objects matching either `cls` or its companion object */ + def sliceTopLevel(tree: Tree, cls: ClassSymbol)(using Context): List[Tree] = tree match { + case PackageDef(pid, stats) => + val slicedStats = stats.flatMap(sliceTopLevel(_, cls)) + val isEffectivelyEmpty = slicedStats.forall(_.isInstanceOf[Import]) + if isEffectivelyEmpty then Nil + else cpy.PackageDef(tree)(pid, slicedStats) :: Nil + case tdef: TypeDef => + val sym = tdef.symbol + assert(sym.isClass) + if (cls == sym || cls == sym.linkedClass) tdef :: Nil + else Nil + case vdef: ValDef => + val sym = vdef.symbol + assert(sym.is(Module)) + if (cls == sym.companionClass || cls == sym.moduleClass) vdef :: Nil + else Nil + case tree => + tree :: Nil + } + + /** The statement sequence that contains a definition of `sym`, or Nil + * if none was found. + * For a tree to be found, The symbol must have a position and its definition + * tree must be reachable from come tree stored in an enclosing context. + */ + def definingStats(sym: Symbol)(using Context): List[Tree] = + if (!sym.span.exists || (ctx eq NoContext) || (ctx.compilationUnit eq NoCompilationUnit)) Nil + else defPath(sym, ctx.compilationUnit.tpdTree) match { + case defn :: encl :: _ => + def verify(stats: List[Tree]) = + if (stats exists (definedSym(_) == sym)) stats else Nil + encl match { + case Block(stats, _) => verify(stats) + case encl: Template => verify(encl.body) + case PackageDef(_, stats) => verify(stats) + case _ => Nil + } + case nil => + Nil + } + + /** If `tree` is an instance of `TupleN[...](e1, ..., eN)`, the arguments `e1, ..., eN` + * otherwise the empty list. + */ + def tupleArgs(tree: Tree)(using Context): List[Tree] = tree match { + case Block(Nil, expr) => tupleArgs(expr) + case Inlined(_, Nil, expr) => tupleArgs(expr) + case Apply(fn: NameTree, args) + if fn.name == nme.apply && + fn.symbol.owner.is(Module) && + defn.isTupleClass(fn.symbol.owner.companionClass) => args + case _ => Nil + } + + /** The qualifier part of a Select or Ident. + * For an Ident, this is the `This` of the current class. + */ + def qualifier(tree: Tree)(using Context): Tree = tree match { + case Select(qual, _) => qual + case tree: Ident => desugarIdentPrefix(tree) + case _ => This(ctx.owner.enclosingClass.asClass) + } + + /** Is this a (potentially applied) selection of a member of a structural type + * that is not a member of an underlying class or trait? + */ + def isStructuralTermSelectOrApply(tree: Tree)(using Context): Boolean = { + def isStructuralTermSelect(tree: Select) = + def hasRefinement(qualtpe: Type): Boolean = qualtpe.dealias match + case RefinedType(parent, rname, rinfo) => + rname == tree.name || hasRefinement(parent) + case tp: TypeProxy => + hasRefinement(tp.superType) + case tp: AndType => + hasRefinement(tp.tp1) || hasRefinement(tp.tp2) + case tp: OrType => + hasRefinement(tp.tp1) || hasRefinement(tp.tp2) + case _ => + false + !tree.symbol.exists + && tree.isTerm + && { + val qualType = tree.qualifier.tpe + hasRefinement(qualType) && !qualType.derivesFrom(defn.PolyFunctionClass) + } + def loop(tree: Tree): Boolean = tree match + case TypeApply(fun, _) => + loop(fun) + case Apply(fun, _) => + loop(fun) + case tree: Select => + isStructuralTermSelect(tree) + case _ => + false + loop(tree) + } + + /** Return a pair consisting of (supercall, rest) + * + * - supercall: the superclass call, excluding trait constr calls + * + * The supercall is always the first statement (if it exists) + */ + final def splitAtSuper(constrStats: List[Tree])(implicit ctx: Context): (List[Tree], List[Tree]) = + constrStats.toList match { + case (sc: Apply) :: rest if sc.symbol.isConstructor => (sc :: Nil, rest) + case (block @ Block(_, sc: Apply)) :: rest if sc.symbol.isConstructor => (block :: Nil, rest) + case stats => (Nil, stats) + } + + /** Structural tree comparison (since == on trees is reference equality). + * For the moment, only Ident, Select, Literal, Apply and TypeApply are supported + */ + extension (t1: Tree) { + def === (t2: Tree)(using Context): Boolean = (t1, t2) match { + case (t1: Ident, t2: Ident) => + t1.symbol == t2.symbol + case (t1 @ Select(q1, _), t2 @ Select(q2, _)) => + t1.symbol == t2.symbol && q1 === q2 + case (Literal(c1), Literal(c2)) => + c1 == c2 + case (Apply(f1, as1), Apply(f2, as2)) => + f1 === f2 && as1.corresponds(as2)(_ === _) + case (TypeApply(f1, ts1), TypeApply(f2, ts2)) => + f1 === f2 && ts1.tpes.corresponds(ts2.tpes)(_ =:= _) + case _ => + false + } + def hash(using Context): Int = + t1.getClass.hashCode * 37 + { + t1 match { + case t1: Ident => t1.symbol.hashCode + case t1 @ Select(q1, _) => t1.symbol.hashCode * 41 + q1.hash + case Literal(c1) => c1.hashCode + case Apply(f1, as1) => as1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.hash) + case TypeApply(f1, ts1) => ts1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.tpe.hash) + case _ => t1.hashCode + } + } + } + + def assertAllPositioned(tree: Tree)(using Context): Unit = + tree.foreachSubTree { + case t: WithoutTypeOrPos[_] => + case t => assert(t.span.exists, i"$t") + } + + /** Extractors for quotes */ + object Quoted { + /** Extracts the content of a quoted tree. + * The result can be the contents of a term or type quote, which + * will return a term or type tree respectively. + */ + def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = + if tree.symbol == defn.QuotedRuntime_exprQuote then + // quoted.runtime.Expr.quote[T]() + Some(tree.args.head) + else if tree.symbol == defn.QuotedTypeModule_of then + // quoted.Type.of[](quotes) + val TypeApply(_, body :: _) = tree.fun: @unchecked + Some(body) + else None + } + + /** Extractors for splices */ + object Spliced { + /** Extracts the content of a spliced expression tree. + * The result can be the contents of a term splice, which + * will return a term tree. + */ + def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = + if tree.symbol.isExprSplice then Some(tree.args.head) else None + } + + /** Extractors for type splices */ + object SplicedType { + /** Extracts the content of a spliced type tree. + * The result can be the contents of a type splice, which + * will return a type tree. + */ + def unapply(tree: tpd.Select)(using Context): Option[tpd.Tree] = + if tree.symbol.isTypeSplice then Some(tree.qualifier) else None + } + + /** Extractor for not-null assertions. + * A not-null assertion for reference `x` has the form `x.$asInstanceOf$[x.type & T]`. + */ + object AssertNotNull : + def apply(tree: tpd.Tree, tpnn: Type)(using Context): tpd.Tree = + tree.select(defn.Any_typeCast).appliedToType(AndType(tree.tpe, tpnn)) + + def unapply(tree: tpd.TypeApply)(using Context): Option[tpd.Tree] = tree match + case TypeApply(Select(qual: RefTree, nme.asInstanceOfPM), arg :: Nil) => + arg.tpe match + case AndType(ref, nn1) if qual.tpe eq ref => + qual.tpe.widen match + case OrNull(nn2) if nn1 eq nn2 => + Some(qual) + case _ => None + case _ => None + case _ => None + end AssertNotNull + + object ConstantValue { + def unapply(tree: Tree)(using Context): Option[Any] = + tree match + case Typed(expr, _) => unapply(expr) + case Inlined(_, Nil, expr) => unapply(expr) + case Block(Nil, expr) => unapply(expr) + case _ => + tree.tpe.widenTermRefExpr.normalized match + case ConstantType(Constant(x)) => Some(x) + case _ => None + } +} + +object TreeInfo { + /** A purity level is represented as a bitset (expressed as an Int) */ + class PurityLevel(val x: Int) extends AnyVal { + /** `this` contains the bits of `that` */ + def >= (that: PurityLevel): Boolean = (x & that.x) == that.x + + /** The intersection of the bits of `this` and `that` */ + def min(that: PurityLevel): PurityLevel = new PurityLevel(x & that.x) + } + + /** An expression is a stable path. Requires that expression is at least idempotent */ + val Path: PurityLevel = new PurityLevel(4) + + /** The expression has no side effects */ + val Pure: PurityLevel = new PurityLevel(3) + + /** Running the expression a second time has no side effects. Implied by `Pure`. */ + val Idempotent: PurityLevel = new PurityLevel(1) + + val Impure: PurityLevel = new PurityLevel(0) + + /** A stable path that is evaluated without side effects */ + val PurePath: PurityLevel = new PurityLevel(Pure.x | Path.x) + + /** A stable path that is also idempotent */ + val IdempotentPath: PurityLevel = new PurityLevel(Idempotent.x | Path.x) +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala new file mode 100644 index 000000000000..caf8d68442f6 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala @@ -0,0 +1,82 @@ +package dotty.tools.dotc +package ast + +import Trees._ +import core.Contexts._ +import core.ContextOps.enter +import core.Flags._ +import core.Symbols._ +import core.TypeError + +/** A TreeMap that maintains the necessary infrastructure to support + * contextual implicit searches (type-scope implicits are supported anyway). + * + * This incudes implicits defined in scope as well as imported implicits. + */ +class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { + import tpd._ + + def transformSelf(vd: ValDef)(using Context): ValDef = + cpy.ValDef(vd)(tpt = transform(vd.tpt)) + + private def nestedScopeCtx(defs: List[Tree])(using Context): Context = { + val nestedCtx = ctx.fresh.setNewScope + defs foreach { + case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => nestedCtx.enter(d.symbol) + case _ => + } + nestedCtx + } + + private def patternScopeCtx(pattern: Tree)(using Context): Context = { + val nestedCtx = ctx.fresh.setNewScope + new TreeTraverser { + def traverse(tree: Tree)(using Context): Unit = { + tree match { + case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => + nestedCtx.enter(d.symbol) + case _ => + } + traverseChildren(tree) + } + }.traverse(pattern) + nestedCtx + } + + override def transform(tree: Tree)(using Context): Tree = { + try tree match { + case Block(stats, expr) => + super.transform(tree)(using nestedScopeCtx(stats)) + case tree: DefDef => + inContext(localCtx(tree)) { + cpy.DefDef(tree)( + tree.name, + transformParamss(tree.paramss), + transform(tree.tpt), + transform(tree.rhs)(using nestedScopeCtx(tree.paramss.flatten))) + } + case impl @ Template(constr, parents, self, _) => + cpy.Template(tree)( + transformSub(constr), + transform(parents)(using ctx.superCallContext), + Nil, + transformSelf(self), + transformStats(impl.body, tree.symbol)) + case tree: CaseDef => + val patCtx = patternScopeCtx(tree.pat)(using ctx) + cpy.CaseDef(tree)( + transform(tree.pat), + transform(tree.guard)(using patCtx), + transform(tree.body)(using patCtx) + ) + case _ => + super.transform(tree) + } + catch { + case ex: TypeError => + report.error(ex, tree.srcPos) + tree + } + } +} + diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala new file mode 100644 index 000000000000..5139a46d6352 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala @@ -0,0 +1,232 @@ +package dotty.tools +package dotc +package ast + +import core._ +import Types._, Contexts._, Flags._ +import Symbols._, Annotations._, Trees._, Symbols._, Constants.Constant +import Decorators._ +import dotty.tools.dotc.transform.SymUtils._ +import language.experimental.pureFunctions + +/** A map that applies three functions and a substitution together to a tree and + * makes sure they are coordinated so that the result is well-typed. The functions are + * @param typeMap A function from Type to Type that gets applied to the + * type of every tree node and to all locally defined symbols, + * followed by the substitution [substFrom := substTo]. + * @param treeMap A transformer that translates all encountered subtrees in + * prefix traversal orders + * @param oldOwners Previous owners. If a top-level local symbol in the mapped tree + * has one of these as an owner, the owner is replaced by the corresponding + * symbol in `newOwners`. + * @param newOwners New owners, replacing previous owners. + * @param substFrom The symbols that need to be substituted. + * @param substTo The substitution targets. + * + * The reason the substitution is broken out from the rest of the type map is + * that all symbols have to be substituted at the same time. If we do not do this, + * we risk data races on named types. Example: Say we have `outer#1.inner#2` and we + * have two substitutions S1 = [outer#1 := outer#3], S2 = [inner#2 := inner#4] where + * hashtags precede symbol ids. If we do S1 first, we get outer#2.inner#3. If we then + * do S2 we get outer#2.inner#4. But that means that the named type outer#2.inner + * gets two different denotations in the same period. Hence, if -Yno-double-bindings is + * set, we would get a data race assertion error. + */ +class TreeTypeMap( + val typeMap: Type -> Type = IdentityTypeMap, + val treeMap: tpd.Tree -> tpd.Tree = identity[tpd.Tree](_), // !cc! need explicit instantiation of default argument + val oldOwners: List[Symbol] = Nil, + val newOwners: List[Symbol] = Nil, + val substFrom: List[Symbol] = Nil, + val substTo: List[Symbol] = Nil, + cpy: tpd.TreeCopier = tpd.cpy)(using Context) extends tpd.TreeMap(cpy) { + import tpd._ + + def copy( + typeMap: Type -> Type, + treeMap: tpd.Tree -> tpd.Tree, + oldOwners: List[Symbol], + newOwners: List[Symbol], + substFrom: List[Symbol], + substTo: List[Symbol])(using Context): TreeTypeMap = + new TreeTypeMap(typeMap, treeMap, oldOwners, newOwners, substFrom, substTo) + + /** If `sym` is one of `oldOwners`, replace by corresponding symbol in `newOwners` */ + def mapOwner(sym: Symbol): Symbol = sym.subst(oldOwners, newOwners) + + /** Replace occurrences of `This(oldOwner)` in some prefix of a type + * by the corresponding `This(newOwner)`. + */ + private val mapOwnerThis = new TypeMap with cc.CaptureSet.IdempotentCaptRefMap { + private def mapPrefix(from: List[Symbol], to: List[Symbol], tp: Type): Type = from match { + case Nil => tp + case (cls: ClassSymbol) :: from1 => mapPrefix(from1, to.tail, tp.substThis(cls, to.head.thisType)) + case _ :: from1 => mapPrefix(from1, to.tail, tp) + } + def apply(tp: Type): Type = tp match { + case tp: NamedType => tp.derivedSelect(mapPrefix(oldOwners, newOwners, tp.prefix)) + case _ => mapOver(tp) + } + } + + def mapType(tp: Type): Type = + mapOwnerThis(typeMap(tp).substSym(substFrom, substTo)) + + private def updateDecls(prevStats: List[Tree], newStats: List[Tree]): Unit = + if (prevStats.isEmpty) assert(newStats.isEmpty) + else { + prevStats.head match { + case pdef: MemberDef => + val prevSym = pdef.symbol + val newSym = newStats.head.symbol + val newCls = newSym.owner.asClass + if (prevSym != newSym) newCls.replace(prevSym, newSym) + case _ => + } + updateDecls(prevStats.tail, newStats.tail) + } + + def transformInlined(tree: tpd.Inlined)(using Context): tpd.Tree = + val Inlined(call, bindings, expanded) = tree + val (tmap1, bindings1) = transformDefs(bindings) + val expanded1 = tmap1.transform(expanded) + cpy.Inlined(tree)(call, bindings1, expanded1) + + override def transform(tree: tpd.Tree)(using Context): tpd.Tree = treeMap(tree) match { + case impl @ Template(constr, parents, self, _) => + val tmap = withMappedSyms(localSyms(impl :: self :: Nil)) + cpy.Template(impl)( + constr = tmap.transformSub(constr), + parents = parents.mapconserve(transform), + self = tmap.transformSub(self), + body = impl.body mapconserve + (tmap.transform(_)(using ctx.withOwner(mapOwner(impl.symbol.owner)))) + ).withType(tmap.mapType(impl.tpe)) + case tree1 => + tree1.withType(mapType(tree1.tpe)) match { + case id: Ident if tpd.needsSelect(id.tpe) => + ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + case ddef @ DefDef(name, paramss, tpt, _) => + val (tmap1, paramss1) = transformAllParamss(paramss) + val res = cpy.DefDef(ddef)(name, paramss1, tmap1.transform(tpt), tmap1.transform(ddef.rhs)) + res.symbol.setParamssFromDefs(paramss1) + res.symbol.transformAnnotations { + case ann: BodyAnnotation => ann.derivedAnnotation(transform(ann.tree)) + case ann => ann + } + res + case tdef @ LambdaTypeTree(tparams, body) => + val (tmap1, tparams1) = transformDefs(tparams) + cpy.LambdaTypeTree(tdef)(tparams1, tmap1.transform(body)) + case blk @ Block(stats, expr) => + val (tmap1, stats1) = transformDefs(stats) + val expr1 = tmap1.transform(expr) + cpy.Block(blk)(stats1, expr1) + case inlined: Inlined => + transformInlined(inlined) + case cdef @ CaseDef(pat, guard, rhs) => + val tmap = withMappedSyms(patVars(pat)) + val pat1 = tmap.transform(pat) + val guard1 = tmap.transform(guard) + val rhs1 = tmap.transform(rhs) + cpy.CaseDef(cdef)(pat1, guard1, rhs1) + case labeled @ Labeled(bind, expr) => + val tmap = withMappedSyms(bind.symbol :: Nil) + val bind1 = tmap.transformSub(bind) + val expr1 = tmap.transform(expr) + cpy.Labeled(labeled)(bind1, expr1) + case tree @ Hole(_, _, args, content, tpt) => + val args1 = args.mapConserve(transform) + val content1 = transform(content) + val tpt1 = transform(tpt) + cpy.Hole(tree)(args = args1, content = content1, tpt = tpt1) + case lit @ Literal(Constant(tpe: Type)) => + cpy.Literal(lit)(Constant(mapType(tpe))) + case tree1 => + super.transform(tree1) + } + } + + override def transformStats(trees: List[tpd.Tree], exprOwner: Symbol)(using Context): List[Tree] = + transformDefs(trees)._2 + + def transformDefs[TT <: tpd.Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = { + val tmap = withMappedSyms(tpd.localSyms(trees)) + (tmap, tmap.transformSub(trees)) + } + + private def transformAllParamss(paramss: List[ParamClause]): (TreeTypeMap, List[ParamClause]) = paramss match + case params :: paramss1 => + val (tmap1, params1: ParamClause) = ((params: @unchecked) match + case ValDefs(vparams) => transformDefs(vparams) + case TypeDefs(tparams) => transformDefs(tparams) + ): @unchecked + val (tmap2, paramss2) = tmap1.transformAllParamss(paramss1) + (tmap2, params1 :: paramss2) + case nil => + (this, paramss) + + def apply[ThisTree <: tpd.Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree] + + def apply(annot: Annotation): Annotation = annot.derivedAnnotation(apply(annot.tree)) + + /** The current tree map composed with a substitution [from -> to] */ + def withSubstitution(from: List[Symbol], to: List[Symbol]): TreeTypeMap = + if (from eq to) this + else { + // assert that substitution stays idempotent, assuming its parts are + // TODO: It might be better to cater for the asserted-away conditions, by + // setting up a proper substitution abstraction with a compose operator that + // guarantees idempotence. But this might be too inefficient in some cases. + // We'll cross that bridge when we need to. + assert(!from.exists(substTo contains _)) + assert(!to.exists(substFrom contains _)) + assert(!from.exists(newOwners contains _)) + assert(!to.exists(oldOwners contains _)) + copy( + typeMap, + treeMap, + from ++ oldOwners, + to ++ newOwners, + from ++ substFrom, + to ++ substTo) + } + + /** Apply `typeMap` and `ownerMap` to given symbols `syms` + * and return a treemap that contains the substitution + * between original and mapped symbols. + */ + def withMappedSyms(syms: List[Symbol]): TreeTypeMap = + withMappedSyms(syms, mapSymbols(syms, this)) + + /** The tree map with the substitution between originals `syms` + * and mapped symbols `mapped`. Also goes into mapped classes + * and substitutes their declarations. + */ + def withMappedSyms(syms: List[Symbol], mapped: List[Symbol]): TreeTypeMap = + if syms eq mapped then this + else + val substMap = withSubstitution(syms, mapped) + lazy val origCls = mapped.zip(syms).filter(_._1.isClass).toMap + mapped.filter(_.isClass).foldLeft(substMap) { (tmap, cls) => + val origDcls = cls.info.decls.toList.filterNot(_.is(TypeParam)) + val tmap0 = tmap.withSubstitution(origCls(cls).typeParams, cls.typeParams) + val mappedDcls = mapSymbols(origDcls, tmap0, mapAlways = true) + val tmap1 = tmap.withMappedSyms( + origCls(cls).typeParams ::: origDcls, + cls.typeParams ::: mappedDcls) + origDcls.lazyZip(mappedDcls).foreach(cls.asClass.replace) + tmap1 + } + + override def toString = + def showSyms(syms: List[Symbol]) = + syms.map(sym => s"$sym#${sym.id}").mkString(", ") + s"""TreeTypeMap( + |typeMap = $typeMap + |treeMap = $treeMap + |oldOwners = ${showSyms(oldOwners)} + |newOwners = ${showSyms(newOwners)} + |substFrom = ${showSyms(substFrom)} + |substTo = ${showSyms(substTo)}""".stripMargin +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala new file mode 100644 index 000000000000..82b027b0231a --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala @@ -0,0 +1,1789 @@ +package dotty.tools +package dotc +package ast + +import core._ +import Types._, Names._, NameOps._, Flags._, util.Spans._, Contexts._, Constants._ +import typer.{ ConstFold, ProtoTypes } +import SymDenotations._, Symbols._, Denotations._, StdNames._, Comments._ +import collection.mutable.ListBuffer +import printing.Printer +import printing.Texts.Text +import util.{Stats, Attachment, Property, SourceFile, NoSource, SrcPos, SourcePosition} +import config.Config +import config.Printers.overload +import annotation.internal.sharable +import annotation.unchecked.uncheckedVariance +import annotation.constructorOnly +import Decorators._ +import annotation.retains +import language.experimental.pureFunctions + +object Trees { + + type Untyped = Nothing + + /** The total number of created tree nodes, maintained if Stats.enabled */ + @sharable var ntrees: Int = 0 + + /** Property key for trees with documentation strings attached */ + val DocComment: Property.StickyKey[Comments.Comment] = Property.StickyKey() + + /** Property key for backquoted identifiers and definitions */ + val Backquoted: Property.StickyKey[Unit] = Property.StickyKey() + + /** Trees take a parameter indicating what the type of their `tpe` field + * is. Two choices: `Type` or `Untyped`. + * Untyped trees have type `Tree[Untyped]`. + * + * Tree typing uses a copy-on-write implementation: + * + * - You can never observe a `tpe` which is `null` (throws an exception) + * - So when creating a typed tree with `withType` we can re-use + * the existing tree transparently, assigning its `tpe` field. + * - It is impossible to embed untyped trees in typed ones. + * - Typed trees can be embedded in untyped ones provided they are rooted + * in a TypedSplice node. + * - Type checking an untyped tree should remove all embedded `TypedSplice` + * nodes. + */ + abstract class Tree[-T >: Untyped](implicit @constructorOnly src: SourceFile) + extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable, caps.Pure { + + if (Stats.enabled) ntrees += 1 + + /** The type constructor at the root of the tree */ + type ThisTree[T >: Untyped] <: Tree[T] + + protected var myTpe: T @uncheckedVariance = _ + + /** Destructively set the type of the tree. This should be called only when it is known that + * it is safe under sharing to do so. One use-case is in the withType method below + * which implements copy-on-write. Another use-case is in method interpolateAndAdapt in Typer, + * where we overwrite with a simplified version of the type itself. + */ + private[dotc] def overwriteType(tpe: T): Unit = + myTpe = tpe + + /** The type of the tree. In case of an untyped tree, + * an UnAssignedTypeException is thrown. (Overridden by empty trees) + */ + final def tpe: T @uncheckedVariance = { + if (myTpe == null) + throw UnAssignedTypeException(this) + myTpe + } + + /** Copy `tpe` attribute from tree `from` into this tree, independently + * whether it is null or not. + final def copyAttr[U >: Untyped](from: Tree[U]): ThisTree[T] = { + val t1 = this.withSpan(from.span) + val t2 = + if (from.myTpe != null) t1.withType(from.myTpe.asInstanceOf[Type]) + else t1 + t2.asInstanceOf[ThisTree[T]] + } + */ + + /** Return a typed tree that's isomorphic to this tree, but has given + * type. (Overridden by empty trees) + */ + def withType(tpe: Type)(using Context): ThisTree[Type] = { + if (tpe.isInstanceOf[ErrorType]) + assert(!Config.checkUnreportedErrors || + ctx.reporter.errorsReported || + ctx.settings.YshowPrintErrors.value + // under -Yshow-print-errors, errors might arise during printing, but they do not count as reported + ) + else if (Config.checkTreesConsistent) + checkChildrenTyped(productIterator) + withTypeUnchecked(tpe) + } + + /** Check that typed trees don't refer to untyped ones, except if + * - the parent tree is an import, or + * - the child tree is an identifier, or + * - errors were reported + */ + private def checkChildrenTyped(it: Iterator[Any])(using Context): Unit = + if (!this.isInstanceOf[Import[?]]) + while (it.hasNext) + it.next() match { + case x: Ident[?] => // untyped idents are used in a number of places in typed trees + case x: Tree[?] => + assert(x.hasType || ctx.reporter.errorsReported, + s"$this has untyped child $x") + case xs: List[?] => checkChildrenTyped(xs.iterator) + case _ => + } + + def withTypeUnchecked(tpe: Type): ThisTree[Type] = { + val tree = + (if (myTpe == null || + (myTpe.asInstanceOf[AnyRef] eq tpe.asInstanceOf[AnyRef])) this + else cloneIn(source)).asInstanceOf[Tree[Type]] + tree overwriteType tpe + tree.asInstanceOf[ThisTree[Type]] + } + + /** Does the tree have its type field set? Note: this operation is not + * referentially transparent, because it can observe the withType + * modifications. Should be used only in special circumstances (we + * need it for printing trees with optional type info). + */ + final def hasType: Boolean = myTpe != null + + final def typeOpt: Type = myTpe match { + case tp: Type => tp + case _ => NoType + } + + /** The denotation referred to by this tree. + * Defined for `DenotingTree`s and `ProxyTree`s, NoDenotation for other + * kinds of trees + */ + def denot(using Context): Denotation = NoDenotation + + /** Shorthand for `denot.symbol`. */ + final def symbol(using Context): Symbol = denot.symbol + + /** Does this tree represent a type? */ + def isType: Boolean = false + + /** Does this tree represent a term? */ + def isTerm: Boolean = false + + /** Is this a legal part of a pattern which is not at the same time a term? */ + def isPattern: Boolean = false + + /** Does this tree define a new symbol that is not defined elsewhere? */ + def isDef: Boolean = false + + /** Is this tree either the empty tree or the empty ValDef or an empty type ident? */ + def isEmpty: Boolean = false + + /** Convert tree to a list. Gives a singleton list, except + * for thickets which return their element trees. + */ + def toList: List[Tree[T]] = this :: Nil + + /** if this tree is the empty tree, the alternative, else this tree */ + inline def orElse[U >: Untyped <: T](inline that: Tree[U]): Tree[U] = + if (this eq genericEmptyTree) that else this + + /** The number of nodes in this tree */ + def treeSize: Int = { + var s = 1 + def addSize(elem: Any): Unit = elem match { + case t: Tree[?] => s += t.treeSize + case ts: List[?] => ts foreach addSize + case _ => + } + productIterator foreach addSize + s + } + + /** If this is a thicket, perform `op` on each of its trees + * otherwise, perform `op` ion tree itself. + */ + def foreachInThicket(op: Tree[T] => Unit): Unit = op(this) + + override def toText(printer: Printer): Text = printer.toText(this) + + def sameTree(that: Tree[?]): Boolean = { + def isSame(x: Any, y: Any): Boolean = + x.asInstanceOf[AnyRef].eq(y.asInstanceOf[AnyRef]) || { + x match { + case x: Tree[?] => + y match { + case y: Tree[?] => x.sameTree(y) + case _ => false + } + case x: List[?] => + y match { + case y: List[?] => x.corresponds(y)(isSame) + case _ => false + } + case _ => + false + } + } + this.getClass == that.getClass && { + val it1 = this.productIterator + val it2 = that.productIterator + it1.corresponds(it2)(isSame) + } + } + + override def hashCode(): Int = System.identityHashCode(this) + override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] + } + + class UnAssignedTypeException[T >: Untyped](tree: Tree[T]) extends RuntimeException { + override def getMessage: String = s"type of $tree is not assigned" + } + + type LazyTree[-T >: Untyped] = Tree[T] | Lazy[Tree[T]] + type LazyTreeList[-T >: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] + + // ------ Categories of trees ----------------------------------- + + /** Instances of this class are trees for which isType is definitely true. + * Note that some trees have isType = true without being TypTrees (e.g. Ident, Annotated) + */ + trait TypTree[-T >: Untyped] extends Tree[T] { + type ThisTree[-T >: Untyped] <: TypTree[T] + override def isType: Boolean = true + } + + /** Instances of this class are trees for which isTerm is definitely true. + * Note that some trees have isTerm = true without being TermTrees (e.g. Ident, Annotated) + */ + trait TermTree[-T >: Untyped] extends Tree[T] { + type ThisTree[-T >: Untyped] <: TermTree[T] + override def isTerm: Boolean = true + } + + /** Instances of this class are trees which are not terms but are legal + * parts of patterns. + */ + trait PatternTree[-T >: Untyped] extends Tree[T] { + type ThisTree[-T >: Untyped] <: PatternTree[T] + override def isPattern: Boolean = true + } + + /** Tree's denotation can be derived from its type */ + abstract class DenotingTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[-T >: Untyped] <: DenotingTree[T] + override def denot(using Context): Denotation = typeOpt.stripped match + case tpe: NamedType => tpe.denot + case tpe: ThisType => tpe.cls.denot + case _ => NoDenotation + } + + /** Tree's denot/isType/isTerm properties come from a subtree + * identified by `forwardTo`. + */ + abstract class ProxyTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[-T >: Untyped] <: ProxyTree[T] + def forwardTo: Tree[T] + override def denot(using Context): Denotation = forwardTo.denot + override def isTerm: Boolean = forwardTo.isTerm + override def isType: Boolean = forwardTo.isType + } + + /** Tree has a name */ + abstract class NameTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { + type ThisTree[-T >: Untyped] <: NameTree[T] + def name: Name + } + + /** Tree refers by name to a denotation */ + abstract class RefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { + type ThisTree[-T >: Untyped] <: RefTree[T] + def qualifier: Tree[T] + override def isType: Boolean = name.isTypeName + override def isTerm: Boolean = name.isTermName + } + + /** Tree defines a new symbol */ + trait DefTree[-T >: Untyped] extends DenotingTree[T] { + type ThisTree[-T >: Untyped] <: DefTree[T] + + private var myMods: untpd.Modifiers | Null = _ + + private[dotc] def rawMods: untpd.Modifiers = + if (myMods == null) untpd.EmptyModifiers else myMods.uncheckedNN + + def withAnnotations(annots: List[untpd.Tree]): ThisTree[Untyped] = withMods(rawMods.withAnnotations(annots)) + + def withMods(mods: untpd.Modifiers): ThisTree[Untyped] = { + val tree = if (myMods == null || (myMods == mods)) this else cloneIn(source) + tree.setMods(mods) + tree.asInstanceOf[ThisTree[Untyped]] + } + + def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(untpd.Modifiers(flags)) + def withAddedFlags(flags: FlagSet): ThisTree[Untyped] = withMods(rawMods | flags) + + /** Destructively update modifiers. To be used with care. */ + def setMods(mods: untpd.Modifiers): Unit = myMods = mods + + override def isDef: Boolean = true + def namedType: NamedType = tpe.asInstanceOf[NamedType] + } + + extension (mdef: untpd.DefTree) def mods: untpd.Modifiers = mdef.rawMods + + sealed trait WithEndMarker[-T >: Untyped]: + self: PackageDef[T] | NamedDefTree[T] => + + import WithEndMarker.* + + final def endSpan(using Context): Span = + if hasEndMarker then + val realName = srcName.stripModuleClassSuffix.lastPart + span.withStart(span.end - realName.length) + else + NoSpan + + /** The name in source code that represents this construct, + * and is the name that the user must write to create a valid + * end marker. + * e.g. a constructor definition is terminated in the source + * code by `end this`, so it's `srcName` should return `this`. + */ + protected def srcName(using Context): Name + + final def withEndMarker(): self.type = + self.withAttachment(HasEndMarker, ()) + + final def withEndMarker(copyFrom: WithEndMarker[?]): self.type = + if copyFrom.hasEndMarker then + this.withEndMarker() + else + this + + final def dropEndMarker(): self.type = + self.removeAttachment(HasEndMarker) + this + + protected def hasEndMarker: Boolean = self.hasAttachment(HasEndMarker) + + object WithEndMarker: + /** Property key that signals the tree was terminated + * with an `end` marker in the source code + */ + private val HasEndMarker: Property.StickyKey[Unit] = Property.StickyKey() + + end WithEndMarker + + abstract class NamedDefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) + extends NameTree[T] with DefTree[T] with WithEndMarker[T] { + type ThisTree[-T >: Untyped] <: NamedDefTree[T] + + protected def srcName(using Context): Name = + if name == nme.CONSTRUCTOR then nme.this_ + else if symbol.isPackageObject then symbol.owner.name + else name + + /** The position of the name defined by this definition. + * This is a point position if the definition is synthetic, or a range position + * if the definition comes from source. + * It might also be that the definition does not have a position (for instance when synthesized by + * a calling chain from `viewExists`), in that case the return position is NoSpan. + * Overridden in Bind + */ + def nameSpan(using Context): Span = + if (span.exists) { + val point = span.point + if (rawMods.is(Synthetic) || span.isSynthetic || name.toTermName == nme.ERROR) Span(point) + else { + val realName = srcName.stripModuleClassSuffix.lastPart + Span(point, point + realName.length, point) + } + } + else span + + /** The source position of the name defined by this definition. + * This is a point position if the definition is synthetic, or a range position + * if the definition comes from source. + */ + def namePos(using Context): SourcePosition = source.atSpan(nameSpan) + } + + /** Tree defines a new symbol and carries modifiers. + * The position of a MemberDef contains only the defined identifier or pattern. + * The envelope of a MemberDef contains the whole definition and has its point + * on the opening keyword (or the next token after that if keyword is missing). + */ + abstract class MemberDef[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { + type ThisTree[-T >: Untyped] <: MemberDef[T] + + def rawComment: Option[Comment] = getAttachment(DocComment) + + def setComment(comment: Option[Comment]): this.type = { + comment.map(putAttachment(DocComment, _)) + this + } + + def name: Name + } + + /** A ValDef or DefDef tree */ + abstract class ValOrDefDef[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T] with WithLazyField[Tree[T]] { + type ThisTree[-T >: Untyped] <: ValOrDefDef[T] + def name: TermName + def tpt: Tree[T] + def unforcedRhs: LazyTree[T] = unforced + def rhs(using Context): Tree[T] = forceIfLazy + } + + trait ValOrTypeDef[-T >: Untyped] extends MemberDef[T]: + type ThisTree[-T >: Untyped] <: ValOrTypeDef[T] + + type ParamClause[T >: Untyped] = List[ValDef[T]] | List[TypeDef[T]] + + // ----------- Tree case classes ------------------------------------ + + /** name */ + case class Ident[-T >: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) + extends RefTree[T] { + type ThisTree[-T >: Untyped] = Ident[T] + def qualifier: Tree[T] = genericEmptyTree + + def isBackquoted: Boolean = hasAttachment(Backquoted) + } + + class SearchFailureIdent[-T >: Untyped] private[ast] (name: Name, expl: -> String)(implicit @constructorOnly src: SourceFile) + extends Ident[T](name) { + def explanation = expl + override def toString: String = s"SearchFailureIdent($explanation)" + } + + /** qualifier.name, or qualifier#name, if qualifier is a type */ + case class Select[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) + extends RefTree[T] { + type ThisTree[-T >: Untyped] = Select[T] + + override def denot(using Context): Denotation = typeOpt match + case ConstantType(_) if ConstFold.foldedUnops.contains(name) => + // Recover the denotation of a constant-folded selection + qualifier.typeOpt.member(name).atSignature(Signature.NotAMethod, name) + case _ => + super.denot + + def nameSpan(using Context): Span = + if span.exists then + val point = span.point + if name.toTermName == nme.ERROR then + Span(point) + else if qualifier.span.start > span.start then // right associative + val realName = name.stripModuleClassSuffix.lastPart + Span(span.start, span.start + realName.length, point) + else + Span(point, span.end, point) + else span + } + + class SelectWithSig[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) + extends Select[T](qualifier, name) { + override def toString: String = s"SelectWithSig($qualifier, $name, $sig)" + } + + /** qual.this */ + case class This[-T >: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) + extends DenotingTree[T] with TermTree[T] { + type ThisTree[-T >: Untyped] = This[T] + // Denotation of a This tree is always the underlying class; needs correction for modules. + override def denot(using Context): Denotation = + typeOpt match { + case tpe @ TermRef(pre, _) if tpe.symbol.is(Module) => + tpe.symbol.moduleClass.denot.asSeenFrom(pre) + case _ => + super.denot + } + } + + /** C.super[mix], where qual = C.this */ + case class Super[-T >: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with TermTree[T] { + type ThisTree[-T >: Untyped] = Super[T] + def forwardTo: Tree[T] = qual + } + + abstract class GenericApply[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { + type ThisTree[-T >: Untyped] <: GenericApply[T] + val fun: Tree[T] + val args: List[Tree[T]] + def forwardTo: Tree[T] = fun + } + + object GenericApply: + def unapply[T >: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match + case tree: GenericApply[T] => Some((tree.fun, tree.args)) + case _ => None + + /** The kind of application */ + enum ApplyKind: + case Regular // r.f(x) + case Using // r.f(using x) + case InfixTuple // r f (x1, ..., xN) where N != 1; needs to be treated specially for an error message in typedApply + + /** fun(args) */ + case class Apply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends GenericApply[T] { + type ThisTree[-T >: Untyped] = Apply[T] + + def setApplyKind(kind: ApplyKind) = + putAttachment(untpd.KindOfApply, kind) + this + + /** The kind of this application. Works reliably only for untyped trees; typed trees + * are under no obligation to update it correctly. + */ + def applyKind: ApplyKind = + attachmentOrElse(untpd.KindOfApply, ApplyKind.Regular) + } + + /** fun[args] */ + case class TypeApply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends GenericApply[T] { + type ThisTree[-T >: Untyped] = TypeApply[T] + } + + /** const */ + case class Literal[-T >: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) + extends Tree[T] with TermTree[T] { + type ThisTree[-T >: Untyped] = Literal[T] + } + + /** new tpt, but no constructor call */ + case class New[-T >: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] with TermTree[T] { + type ThisTree[-T >: Untyped] = New[T] + } + + /** expr : tpt */ + case class Typed[-T >: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with TermTree[T] { + type ThisTree[-T >: Untyped] = Typed[T] + def forwardTo: Tree[T] = expr + } + + /** name = arg, in a parameter list */ + case class NamedArg[-T >: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[-T >: Untyped] = NamedArg[T] + } + + /** name = arg, outside a parameter list */ + case class Assign[-T >: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = Assign[T] + } + + /** { stats; expr } */ + case class Block[-T >: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[-T >: Untyped] = Block[T] + override def isType: Boolean = expr.isType + override def isTerm: Boolean = !isType // this will classify empty trees as terms, which is necessary + } + + /** if cond then thenp else elsep */ + case class If[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = If[T] + def isInline = false + } + class InlineIf[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + extends If(cond, thenp, elsep) { + override def isInline = true + override def toString = s"InlineIf($cond, $thenp, $elsep)" + } + + /** A closure with an environment and a reference to a method. + * @param env The captured parameters of the closure + * @param meth A ref tree that refers to the method of the closure. + * The first (env.length) parameters of that method are filled + * with env values. + * @param tpt Either EmptyTree or a TypeTree. If tpt is EmptyTree the type + * of the closure is a function type, otherwise it is the type + * given in `tpt`, which must be a SAM type. + */ + case class Closure[-T >: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = Closure[T] + } + + /** selector match { cases } */ + case class Match[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = Match[T] + def isInline = false + } + class InlineMatch[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + extends Match(selector, cases) { + override def isInline = true + override def toString = s"InlineMatch($selector, $cases)" + } + + /** case pat if guard => body */ + case class CaseDef[-T >: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[-T >: Untyped] = CaseDef[T] + } + + /** label[tpt]: { expr } */ + case class Labeled[-T >: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + extends NameTree[T] { + type ThisTree[-T >: Untyped] = Labeled[T] + def name: Name = bind.name + } + + /** return expr + * where `from` refers to the method or label from which the return takes place + * After program transformations this is not necessarily the enclosing method, because + * closures can intervene. + */ + case class Return[-T >: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = Return[T] + } + + /** while (cond) { body } */ + case class WhileDo[-T >: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = WhileDo[T] + } + + /** try block catch cases finally finalizer */ + case class Try[-T >: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[-T >: Untyped] = Try[T] + } + + /** Seq(elems) + * @param tpt The element type of the sequence. + */ + case class SeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[-T >: Untyped] = SeqLiteral[T] + } + + /** Array(elems) */ + class JavaSeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends SeqLiteral(elems, elemtpt) { + override def toString: String = s"JavaSeqLiteral($elems, $elemtpt)" + } + + /** A tree representing inlined code. + * + * @param call Info about the original call that was inlined + * Until PostTyper, this is the full call, afterwards only + * a reference to the toplevel class from which the call was inlined. + * @param bindings Bindings for proxies to be used in the inlined code + * @param expansion The inlined tree, minus bindings. + * + * The full inlined code is equivalent to + * + * { bindings; expansion } + * + * The reason to keep `bindings` separate is because they are typed in a + * different context: `bindings` represent the arguments to the inlined + * call, whereas `expansion` represents the body of the inlined function. + */ + case class Inlined[-T >: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[-T >: Untyped] = Inlined[T] + override def isTerm = expansion.isTerm + override def isType = expansion.isType + } + + /** A type tree that represents an existing or inferred type */ + case class TypeTree[-T >: Untyped]()(implicit @constructorOnly src: SourceFile) + extends DenotingTree[T] with TypTree[T] { + type ThisTree[-T >: Untyped] = TypeTree[T] + override def isEmpty: Boolean = !hasType + override def toString: String = + s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" + } + + /** A type tree whose type is inferred. These trees appear in two contexts + * - as an argument of a TypeApply. In that case its type is always a TypeVar + * - as a (result-)type of an inferred ValDef or DefDef. + * Every TypeVar is created as the type of one InferredTypeTree. + */ + class InferredTypeTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] + + /** ref.type */ + case class SingletonTypeTree[-T >: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) + extends DenotingTree[T] with TypTree[T] { + type ThisTree[-T >: Untyped] = SingletonTypeTree[T] + } + + /** tpt { refinements } */ + case class RefinedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with TypTree[T] { + type ThisTree[-T >: Untyped] = RefinedTypeTree[T] + def forwardTo: Tree[T] = tpt + } + + /** tpt[args] */ + case class AppliedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with TypTree[T] { + type ThisTree[-T >: Untyped] = AppliedTypeTree[T] + def forwardTo: Tree[T] = tpt + } + + /** [typeparams] -> tpt + * + * Note: the type of such a tree is not necessarily a `HKTypeLambda`, it can + * also be a `TypeBounds` where the upper bound is an `HKTypeLambda`, and the + * lower bound is either a reference to `Nothing` or an `HKTypeLambda`, + * this happens because these trees are typed by `HKTypeLambda#fromParams` which + * makes sure to move bounds outside of the type lambda itself to simplify their + * handling in the compiler. + * + * You may ask: why not normalize the trees too? That way, + * + * LambdaTypeTree(X, TypeBoundsTree(A, B)) + * + * would become, + * + * TypeBoundsTree(LambdaTypeTree(X, A), LambdaTypeTree(X, B)) + * + * which would maintain consistency between a tree and its type. The problem + * with this definition is that the same tree `X` appears twice, therefore + * we'd have to create two symbols for it which makes it harder to relate the + * source code written by the user with the trees used by the compiler (for + * example, to make "find all references" work in the IDE). + */ + case class LambdaTypeTree[-T >: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = LambdaTypeTree[T] + } + + case class TermLambdaTypeTree[-T >: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = TermLambdaTypeTree[T] + } + + /** [bound] selector match { cases } */ + case class MatchTypeTree[-T >: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = MatchTypeTree[T] + } + + /** => T */ + case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = ByNameTypeTree[T] + } + + /** >: lo <: hi + * >: lo <: hi = alias for RHS of bounded opaque type + */ + case class TypeBoundsTree[-T >: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = TypeBoundsTree[T] + } + + /** name @ body */ + case class Bind[-T >: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends NamedDefTree[T] with PatternTree[T] { + type ThisTree[-T >: Untyped] = Bind[T] + override def isType: Boolean = name.isTypeName + override def isTerm: Boolean = name.isTermName + + override def nameSpan(using Context): Span = + if span.exists then Span(span.start, span.start + name.toString.length) else span + } + + /** tree_1 | ... | tree_n */ + case class Alternative[-T >: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends PatternTree[T] { + type ThisTree[-T >: Untyped] = Alternative[T] + } + + /** The typed translation of `extractor(patterns)` in a pattern. The translation has the following + * components: + * + * @param fun is `extractor.unapply` (or, for backwards compatibility, `extractor.unapplySeq`) + * possibly with type parameters + * @param implicits Any implicit parameters passed to the unapply after the selector + * @param patterns The argument patterns in the pattern match. + * + * It is typed with same type as first `fun` argument + * Given a match selector `sel` a pattern UnApply(fun, implicits, patterns) is roughly translated as follows + * + * val result = fun(sel)(implicits) + * if (result.isDefined) "match patterns against result" + */ + case class UnApply[-T >: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with PatternTree[T] { + type ThisTree[-T >: Untyped] = UnApply[T] + def forwardTo = fun + } + + /** mods val name: tpt = rhs */ + case class ValDef[-T >: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + extends ValOrDefDef[T], ValOrTypeDef[T] { + type ThisTree[-T >: Untyped] = ValDef[T] + assert(isEmpty || (tpt ne genericEmptyTree)) + def unforced: LazyTree[T] = preRhs + protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x + } + + /** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */ + case class DefDef[-T >: Untyped] private[ast] (name: TermName, + paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + extends ValOrDefDef[T] { + type ThisTree[-T >: Untyped] = DefDef[T] + assert(tpt ne genericEmptyTree) + def unforced: LazyTree[T] = preRhs + protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x + + def leadingTypeParams(using Context): List[TypeDef[T]] = paramss match + case (tparams @ (tparam: TypeDef[_]) :: _) :: _ => tparams.asInstanceOf[List[TypeDef[T]]] + case _ => Nil + + def trailingParamss(using Context): List[ParamClause[T]] = paramss match + case ((tparam: TypeDef[_]) :: _) :: paramss1 => paramss1 + case _ => paramss + + def termParamss(using Context): List[List[ValDef[T]]] = + (if ctx.erasedTypes then paramss else untpd.termParamssIn(paramss)) + .asInstanceOf[List[List[ValDef[T]]]] + } + + /** mods class name template or + * mods trait name template or + * mods type name = rhs or + * mods type name >: lo <: hi, if rhs = TypeBoundsTree(lo, hi) or + * mods type name >: lo <: hi = rhs if rhs = TypeBoundsTree(lo, hi, alias) and opaque in mods + */ + case class TypeDef[-T >: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + extends MemberDef[T], ValOrTypeDef[T] { + type ThisTree[-T >: Untyped] = TypeDef[T] + + /** Is this a definition of a class? */ + def isClassDef: Boolean = rhs.isInstanceOf[Template[?]] + + def isBackquoted: Boolean = hasAttachment(Backquoted) + } + + /** extends parents { self => body } + * @param parentsOrDerived A list of parents followed by a list of derived classes, + * if this is of class untpd.DerivingTemplate. + * Typed templates only have parents. + */ + case class Template[-T >: Untyped] private[ast] (constr: DefDef[T], parentsOrDerived: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + extends DefTree[T] with WithLazyField[List[Tree[T]]] { + type ThisTree[-T >: Untyped] = Template[T] + def unforcedBody: LazyTreeList[T] = unforced + def unforced: LazyTreeList[T] = preBody + protected def force(x: List[Tree[T @uncheckedVariance]]): Unit = preBody = x + def body(using Context): List[Tree[T]] = forceIfLazy + + def parents: List[Tree[T]] = parentsOrDerived // overridden by DerivingTemplate + def derived: List[untpd.Tree] = Nil // overridden by DerivingTemplate + } + + + abstract class ImportOrExport[-T >: Untyped](implicit @constructorOnly src: SourceFile) + extends DenotingTree[T] { + type ThisTree[-T >: Untyped] <: ImportOrExport[T] + val expr: Tree[T] + val selectors: List[untpd.ImportSelector] + } + + /** import expr.selectors + * where a selector is either an untyped `Ident`, `name` or + * an untyped thicket consisting of `name` and `rename`. + */ + case class Import[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + extends ImportOrExport[T] { + type ThisTree[-T >: Untyped] = Import[T] + } + + /** export expr.selectors + * where a selector is either an untyped `Ident`, `name` or + * an untyped thicket consisting of `name` and `rename`. + */ + case class Export[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + extends ImportOrExport[T] { + type ThisTree[-T >: Untyped] = Export[T] + } + + /** package pid { stats } */ + case class PackageDef[-T >: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with WithEndMarker[T] { + type ThisTree[-T >: Untyped] = PackageDef[T] + def forwardTo: RefTree[T] = pid + protected def srcName(using Context): Name = pid.name + } + + /** arg @annot */ + case class Annotated[-T >: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] { + type ThisTree[-T >: Untyped] = Annotated[T] + def forwardTo: Tree[T] = arg + } + + trait WithoutTypeOrPos[-T >: Untyped] extends Tree[T] { + override def withTypeUnchecked(tpe: Type): ThisTree[Type] = this.asInstanceOf[ThisTree[Type]] + override def span: Span = NoSpan + override def span_=(span: Span): Unit = {} + } + + /** Temporary class that results from translation of ModuleDefs + * (and possibly other statements). + * The contained trees will be integrated when transformed with + * a `transform(List[Tree])` call. + */ + case class Thicket[-T >: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends Tree[T] with WithoutTypeOrPos[T] { + myTpe = NoType.asInstanceOf[T] + type ThisTree[-T >: Untyped] = Thicket[T] + + def mapElems(op: Tree[T] => Tree[T] @uncheckedVariance): Thicket[T] = { + val newTrees = trees.mapConserve(op) + if (trees eq newTrees) + this + else + Thicket[T](newTrees)(source).asInstanceOf[this.type] + } + + override def foreachInThicket(op: Tree[T] => Unit): Unit = + trees foreach (_.foreachInThicket(op)) + + override def isEmpty: Boolean = trees.isEmpty + override def toList: List[Tree[T]] = flatten(trees) + override def toString: String = if (isEmpty) "EmptyTree" else "Thicket(" + trees.mkString(", ") + ")" + override def span: Span = + def combine(s: Span, ts: List[Tree[T]]): Span = ts match + case t :: ts1 => combine(s.union(t.span), ts1) + case nil => s + combine(NoSpan, trees) + + override def withSpan(span: Span): this.type = + mapElems(_.withSpan(span)).asInstanceOf[this.type] + } + + class EmptyTree[T >: Untyped] extends Thicket(Nil)(NoSource) { + // assert(uniqueId != 1492) + override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyTree") + } + + class EmptyValDef[T >: Untyped] extends ValDef[T]( + nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T])(NoSource) with WithoutTypeOrPos[T] { + myTpe = NoType.asInstanceOf[T] + setMods(untpd.Modifiers(PrivateLocal)) + override def isEmpty: Boolean = true + override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyValDef") + } + + @sharable val theEmptyTree = new EmptyTree[Type]() + @sharable val theEmptyValDef = new EmptyValDef[Type]() + + def genericEmptyValDef[T >: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] + def genericEmptyTree[T >: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] + + /** Tree that replaces a level 1 splices in pickled (level 0) quotes. + * It is only used when picking quotes (will never be in a TASTy file). + * + * @param isTermHole If this hole is a term, otherwise it is a type hole. + * @param idx The index of the hole in it's enclosing level 0 quote. + * @param args The arguments of the splice to compute its content + * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. + * @param tpt Type of the hole + */ + case class Hole[-T >: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[-T >: Untyped] <: Hole[T] + override def isTerm: Boolean = isTermHole + override def isType: Boolean = !isTermHole + } + + def flatten[T >: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { + def recur(buf: ListBuffer[Tree[T]] | Null, remaining: List[Tree[T]]): ListBuffer[Tree[T]] | Null = + remaining match { + case Thicket(elems) :: remaining1 => + var buf1 = buf + if (buf1 == null) { + buf1 = new ListBuffer[Tree[T]] + var scanned = trees + while (scanned `ne` remaining) { + buf1 += scanned.head + scanned = scanned.tail + } + } + recur(recur(buf1, elems), remaining1) + case tree :: remaining1 => + if (buf != null) buf += tree + recur(buf, remaining1) + case nil => + buf + } + val buf = recur(null, trees) + if (buf != null) buf.toList else trees + } + + // ----- Lazy trees and tree sequences + + /** A tree that can have a lazy field + * The field is represented by some private `var` which is + * accessed by `unforced` and `force`. Forcing the field will + * set the `var` to the underlying value. + */ + trait WithLazyField[+T <: AnyRef] { + def unforced: T | Lazy[T] + protected def force(x: T @uncheckedVariance): Unit + def forceIfLazy(using Context): T = unforced match { + case lzy: Lazy[T @unchecked] => + val x = lzy.complete + force(x) + x + case x: T @ unchecked => x + } + } + + /** A base trait for lazy tree fields. + * These can be instantiated with Lazy instances which + * can delay tree construction until the field is first demanded. + */ + trait Lazy[+T <: AnyRef] { + def complete(using Context): T + } + + // ----- Generic Tree Instances, inherited from `tpt` and `untpd`. + + abstract class Instance[T >: Untyped <: Type] { inst => + + type Tree = Trees.Tree[T] + type TypTree = Trees.TypTree[T] + type TermTree = Trees.TermTree[T] + type PatternTree = Trees.PatternTree[T] + type DenotingTree = Trees.DenotingTree[T] + type ProxyTree = Trees.ProxyTree[T] + type NameTree = Trees.NameTree[T] + type RefTree = Trees.RefTree[T] + type DefTree = Trees.DefTree[T] + type NamedDefTree = Trees.NamedDefTree[T] + type MemberDef = Trees.MemberDef[T] + type ValOrDefDef = Trees.ValOrDefDef[T] + type ValOrTypeDef = Trees.ValOrTypeDef[T] + type LazyTree = Trees.LazyTree[T] + type LazyTreeList = Trees.LazyTreeList[T] + type ParamClause = Trees.ParamClause[T] + + type Ident = Trees.Ident[T] + type SearchFailureIdent = Trees.SearchFailureIdent[T] + type Select = Trees.Select[T] + type SelectWithSig = Trees.SelectWithSig[T] + type This = Trees.This[T] + type Super = Trees.Super[T] + type Apply = Trees.Apply[T] + type TypeApply = Trees.TypeApply[T] + type GenericApply = Trees.GenericApply[T] + type Literal = Trees.Literal[T] + type New = Trees.New[T] + type Typed = Trees.Typed[T] + type NamedArg = Trees.NamedArg[T] + type Assign = Trees.Assign[T] + type Block = Trees.Block[T] + type If = Trees.If[T] + type InlineIf = Trees.InlineIf[T] + type Closure = Trees.Closure[T] + type Match = Trees.Match[T] + type InlineMatch = Trees.InlineMatch[T] + type CaseDef = Trees.CaseDef[T] + type Labeled = Trees.Labeled[T] + type Return = Trees.Return[T] + type WhileDo = Trees.WhileDo[T] + type Try = Trees.Try[T] + type SeqLiteral = Trees.SeqLiteral[T] + type JavaSeqLiteral = Trees.JavaSeqLiteral[T] + type Inlined = Trees.Inlined[T] + type TypeTree = Trees.TypeTree[T] + type InferredTypeTree = Trees.InferredTypeTree[T] + type SingletonTypeTree = Trees.SingletonTypeTree[T] + type RefinedTypeTree = Trees.RefinedTypeTree[T] + type AppliedTypeTree = Trees.AppliedTypeTree[T] + type LambdaTypeTree = Trees.LambdaTypeTree[T] + type TermLambdaTypeTree = Trees.TermLambdaTypeTree[T] + type MatchTypeTree = Trees.MatchTypeTree[T] + type ByNameTypeTree = Trees.ByNameTypeTree[T] + type TypeBoundsTree = Trees.TypeBoundsTree[T] + type Bind = Trees.Bind[T] + type Alternative = Trees.Alternative[T] + type UnApply = Trees.UnApply[T] + type ValDef = Trees.ValDef[T] + type DefDef = Trees.DefDef[T] + type TypeDef = Trees.TypeDef[T] + type Template = Trees.Template[T] + type Import = Trees.Import[T] + type Export = Trees.Export[T] + type ImportOrExport = Trees.ImportOrExport[T] + type PackageDef = Trees.PackageDef[T] + type Annotated = Trees.Annotated[T] + type Thicket = Trees.Thicket[T] + + type Hole = Trees.Hole[T] + + @sharable val EmptyTree: Thicket = genericEmptyTree + @sharable val EmptyValDef: ValDef = genericEmptyValDef + @sharable val ContextualEmptyTree: Thicket = new EmptyTree() // an empty tree marking a contextual closure + + // ----- Auxiliary creation methods ------------------ + + def Thicket(): Thicket = EmptyTree + def Thicket(x1: Tree, x2: Tree)(implicit src: SourceFile): Thicket = new Thicket(x1 :: x2 :: Nil) + def Thicket(x1: Tree, x2: Tree, x3: Tree)(implicit src: SourceFile): Thicket = new Thicket(x1 :: x2 :: x3 :: Nil) + def Thicket(xs: List[Tree])(implicit src: SourceFile) = new Thicket(xs) + + def flatTree(xs: List[Tree])(implicit src: SourceFile): Tree = flatten(xs) match { + case x :: Nil => x + case ys => Thicket(ys) + } + + // ----- Helper classes for copying, transforming, accumulating ----------------- + + val cpy: TreeCopier + + /** A class for copying trees. The copy methods avoid creating a new tree + * If all arguments stay the same. + * + * Note: Some of the copy methods take a context. + * These are exactly those methods that are overridden in TypedTreeCopier + * so that they selectively retype themselves. Retyping needs a context. + */ + abstract class TreeCopier { + protected def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] + protected def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T] + + /** Soucre of the copied tree */ + protected def sourceFile(tree: Tree): SourceFile = tree.source + + protected def finalize(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] = + Stats.record(s"TreeCopier.finalize/${tree.getClass == copied.getClass}") + postProcess(tree, copied.withSpan(tree.span).withAttachmentsFrom(tree)) + + protected def finalize(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T] = + Stats.record(s"TreeCopier.finalize/${tree.getClass == copied.getClass}") + postProcess(tree, copied.withSpan(tree.span).withAttachmentsFrom(tree)) + + def Ident(tree: Tree)(name: Name)(using Context): Ident = tree match { + case tree: Ident if name == tree.name => tree + case _ => finalize(tree, untpd.Ident(name)(sourceFile(tree))) + } + def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = tree match { + case tree: SelectWithSig => + if ((qualifier eq tree.qualifier) && (name == tree.name)) tree + else finalize(tree, SelectWithSig(qualifier, name, tree.sig)(sourceFile(tree))) + case tree: Select if (qualifier eq tree.qualifier) && (name == tree.name) => tree + case _ => finalize(tree, untpd.Select(qualifier, name)(sourceFile(tree))) + } + /** Copy Ident or Select trees */ + def Ref(tree: RefTree)(name: Name)(using Context): RefTree = tree match { + case Ident(_) => Ident(tree)(name) + case Select(qual, _) => Select(tree)(qual, name) + } + def This(tree: Tree)(qual: untpd.Ident)(using Context): This = tree match { + case tree: This if (qual eq tree.qual) => tree + case _ => finalize(tree, untpd.This(qual)(sourceFile(tree))) + } + def Super(tree: Tree)(qual: Tree, mix: untpd.Ident)(using Context): Super = tree match { + case tree: Super if (qual eq tree.qual) && (mix eq tree.mix) => tree + case _ => finalize(tree, untpd.Super(qual, mix)(sourceFile(tree))) + } + def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = tree match { + case tree: Apply if (fun eq tree.fun) && (args eq tree.args) => tree + case _ => finalize(tree, untpd.Apply(fun, args)(sourceFile(tree))) + //.ensuring(res => res.uniqueId != 2213, s"source = $tree, ${tree.uniqueId}, ${tree.span}") + } + def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = tree match { + case tree: TypeApply if (fun eq tree.fun) && (args eq tree.args) => tree + case _ => finalize(tree, untpd.TypeApply(fun, args)(sourceFile(tree))) + } + def Literal(tree: Tree)(const: Constant)(using Context): Literal = tree match { + case tree: Literal if const == tree.const => tree + case _ => finalize(tree, untpd.Literal(const)(sourceFile(tree))) + } + def New(tree: Tree)(tpt: Tree)(using Context): New = tree match { + case tree: New if (tpt eq tree.tpt) => tree + case _ => finalize(tree, untpd.New(tpt)(sourceFile(tree))) + } + def Typed(tree: Tree)(expr: Tree, tpt: Tree)(using Context): Typed = tree match { + case tree: Typed if (expr eq tree.expr) && (tpt eq tree.tpt) => tree + case tree => finalize(tree, untpd.Typed(expr, tpt)(sourceFile(tree))) + } + def NamedArg(tree: Tree)(name: Name, arg: Tree)(using Context): NamedArg = tree match { + case tree: NamedArg if (name == tree.name) && (arg eq tree.arg) => tree + case _ => finalize(tree, untpd.NamedArg(name, arg)(sourceFile(tree))) + } + def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = tree match { + case tree: Assign if (lhs eq tree.lhs) && (rhs eq tree.rhs) => tree + case _ => finalize(tree, untpd.Assign(lhs, rhs)(sourceFile(tree))) + } + def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = tree match { + case tree: Block if (stats eq tree.stats) && (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.Block(stats, expr)(sourceFile(tree))) + } + def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = tree match { + case tree: If if (cond eq tree.cond) && (thenp eq tree.thenp) && (elsep eq tree.elsep) => tree + case tree: InlineIf => finalize(tree, untpd.InlineIf(cond, thenp, elsep)(sourceFile(tree))) + case _ => finalize(tree, untpd.If(cond, thenp, elsep)(sourceFile(tree))) + } + def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = tree match { + case tree: Closure if (env eq tree.env) && (meth eq tree.meth) && (tpt eq tree.tpt) => tree + case _ => finalize(tree, untpd.Closure(env, meth, tpt)(sourceFile(tree))) + } + def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = tree match { + case tree: Match if (selector eq tree.selector) && (cases eq tree.cases) => tree + case tree: InlineMatch => finalize(tree, untpd.InlineMatch(selector, cases)(sourceFile(tree))) + case _ => finalize(tree, untpd.Match(selector, cases)(sourceFile(tree))) + } + def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = tree match { + case tree: CaseDef if (pat eq tree.pat) && (guard eq tree.guard) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.CaseDef(pat, guard, body)(sourceFile(tree))) + } + def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = tree match { + case tree: Labeled if (bind eq tree.bind) && (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.Labeled(bind, expr)(sourceFile(tree))) + } + def Return(tree: Tree)(expr: Tree, from: Tree)(using Context): Return = tree match { + case tree: Return if (expr eq tree.expr) && (from eq tree.from) => tree + case _ => finalize(tree, untpd.Return(expr, from)(sourceFile(tree))) + } + def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = tree match { + case tree: WhileDo if (cond eq tree.cond) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.WhileDo(cond, body)(sourceFile(tree))) + } + def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = tree match { + case tree: Try if (expr eq tree.expr) && (cases eq tree.cases) && (finalizer eq tree.finalizer) => tree + case _ => finalize(tree, untpd.Try(expr, cases, finalizer)(sourceFile(tree))) + } + def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = tree match { + case tree: JavaSeqLiteral => + if ((elems eq tree.elems) && (elemtpt eq tree.elemtpt)) tree + else finalize(tree, untpd.JavaSeqLiteral(elems, elemtpt)) + case tree: SeqLiteral if (elems eq tree.elems) && (elemtpt eq tree.elemtpt) => tree + case _ => finalize(tree, untpd.SeqLiteral(elems, elemtpt)(sourceFile(tree))) + } + def Inlined(tree: Tree)(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = tree match { + case tree: Inlined if (call eq tree.call) && (bindings eq tree.bindings) && (expansion eq tree.expansion) => tree + case _ => finalize(tree, untpd.Inlined(call, bindings, expansion)(sourceFile(tree))) + } + def SingletonTypeTree(tree: Tree)(ref: Tree)(using Context): SingletonTypeTree = tree match { + case tree: SingletonTypeTree if (ref eq tree.ref) => tree + case _ => finalize(tree, untpd.SingletonTypeTree(ref)(sourceFile(tree))) + } + def RefinedTypeTree(tree: Tree)(tpt: Tree, refinements: List[Tree])(using Context): RefinedTypeTree = tree match { + case tree: RefinedTypeTree if (tpt eq tree.tpt) && (refinements eq tree.refinements) => tree + case _ => finalize(tree, untpd.RefinedTypeTree(tpt, refinements)(sourceFile(tree))) + } + def AppliedTypeTree(tree: Tree)(tpt: Tree, args: List[Tree])(using Context): AppliedTypeTree = tree match { + case tree: AppliedTypeTree if (tpt eq tree.tpt) && (args eq tree.args) => tree + case _ => finalize(tree, untpd.AppliedTypeTree(tpt, args)(sourceFile(tree))) + } + def LambdaTypeTree(tree: Tree)(tparams: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = tree match { + case tree: LambdaTypeTree if (tparams eq tree.tparams) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.LambdaTypeTree(tparams, body)(sourceFile(tree))) + } + def TermLambdaTypeTree(tree: Tree)(params: List[ValDef], body: Tree)(using Context): TermLambdaTypeTree = tree match { + case tree: TermLambdaTypeTree if (params eq tree.params) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.TermLambdaTypeTree(params, body)(sourceFile(tree))) + } + def MatchTypeTree(tree: Tree)(bound: Tree, selector: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = tree match { + case tree: MatchTypeTree if (bound eq tree.bound) && (selector eq tree.selector) && (cases eq tree.cases) => tree + case _ => finalize(tree, untpd.MatchTypeTree(bound, selector, cases)(sourceFile(tree))) + } + def ByNameTypeTree(tree: Tree)(result: Tree)(using Context): ByNameTypeTree = tree match { + case tree: ByNameTypeTree if (result eq tree.result) => tree + case _ => finalize(tree, untpd.ByNameTypeTree(result)(sourceFile(tree))) + } + def TypeBoundsTree(tree: Tree)(lo: Tree, hi: Tree, alias: Tree)(using Context): TypeBoundsTree = tree match { + case tree: TypeBoundsTree if (lo eq tree.lo) && (hi eq tree.hi) && (alias eq tree.alias) => tree + case _ => finalize(tree, untpd.TypeBoundsTree(lo, hi, alias)(sourceFile(tree))) + } + def Bind(tree: Tree)(name: Name, body: Tree)(using Context): Bind = tree match { + case tree: Bind if (name eq tree.name) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.Bind(name, body)(sourceFile(tree))) + } + def Alternative(tree: Tree)(trees: List[Tree])(using Context): Alternative = tree match { + case tree: Alternative if (trees eq tree.trees) => tree + case _ => finalize(tree, untpd.Alternative(trees)(sourceFile(tree))) + } + def UnApply(tree: Tree)(fun: Tree, implicits: List[Tree], patterns: List[Tree])(using Context): UnApply = tree match { + case tree: UnApply if (fun eq tree.fun) && (implicits eq tree.implicits) && (patterns eq tree.patterns) => tree + case _ => finalize(tree, untpd.UnApply(fun, implicits, patterns)(sourceFile(tree))) + } + def ValDef(tree: Tree)(name: TermName, tpt: Tree, rhs: LazyTree)(using Context): ValDef = tree match { + case tree: ValDef if (name == tree.name) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree + case _ => finalize(tree, untpd.ValDef(name, tpt, rhs)(sourceFile(tree))) + } + def DefDef(tree: Tree)(name: TermName, paramss: List[ParamClause], tpt: Tree, rhs: LazyTree)(using Context): DefDef = tree match { + case tree: DefDef if (name == tree.name) && (paramss eq tree.paramss) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree + case _ => finalize(tree, untpd.DefDef(name, paramss, tpt, rhs)(sourceFile(tree))) + } + def TypeDef(tree: Tree)(name: TypeName, rhs: Tree)(using Context): TypeDef = tree match { + case tree: TypeDef if (name == tree.name) && (rhs eq tree.rhs) => tree + case _ => finalize(tree, untpd.TypeDef(name, rhs)(sourceFile(tree))) + } + def Template(tree: Tree)(constr: DefDef, parents: List[Tree], derived: List[untpd.Tree], self: ValDef, body: LazyTreeList)(using Context): Template = tree match { + case tree: Template if (constr eq tree.constr) && (parents eq tree.parents) && (derived eq tree.derived) && (self eq tree.self) && (body eq tree.unforcedBody) => tree + case tree => finalize(tree, untpd.Template(constr, parents, derived, self, body)(sourceFile(tree))) + } + def Import(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = tree match { + case tree: Import if (expr eq tree.expr) && (selectors eq tree.selectors) => tree + case _ => finalize(tree, untpd.Import(expr, selectors)(sourceFile(tree))) + } + def Export(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Export = tree match { + case tree: Export if (expr eq tree.expr) && (selectors eq tree.selectors) => tree + case _ => finalize(tree, untpd.Export(expr, selectors)(sourceFile(tree))) + } + def PackageDef(tree: Tree)(pid: RefTree, stats: List[Tree])(using Context): PackageDef = tree match { + case tree: PackageDef if (pid eq tree.pid) && (stats eq tree.stats) => tree + case _ => finalize(tree, untpd.PackageDef(pid, stats)(sourceFile(tree))) + } + def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = tree match { + case tree: Annotated if (arg eq tree.arg) && (annot eq tree.annot) => tree + case _ => finalize(tree, untpd.Annotated(arg, annot)(sourceFile(tree))) + } + def Thicket(tree: Tree)(trees: List[Tree])(using Context): Thicket = tree match { + case tree: Thicket if (trees eq tree.trees) => tree + case _ => finalize(tree, untpd.Thicket(trees)(sourceFile(tree))) + } + def Hole(tree: Tree)(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = tree match { + case tree: Hole if isTerm == tree.isTerm && idx == tree.idx && args.eq(tree.args) && content.eq(tree.content) && content.eq(tree.content) => tree + case _ => finalize(tree, untpd.Hole(isTerm, idx, args, content, tpt)(sourceFile(tree))) + } + + // Copier methods with default arguments; these demand that the original tree + // is of the same class as the copy. We only include trees with more than 2 elements here. + def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(using Context): If = + If(tree: Tree)(cond, thenp, elsep) + def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = + Closure(tree: Tree)(env, meth, tpt) + def CaseDef(tree: CaseDef)(pat: Tree = tree.pat, guard: Tree = tree.guard, body: Tree = tree.body)(using Context): CaseDef = + CaseDef(tree: Tree)(pat, guard, body) + def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(using Context): Try = + Try(tree: Tree)(expr, cases, finalizer) + def UnApply(tree: UnApply)(fun: Tree = tree.fun, implicits: List[Tree] = tree.implicits, patterns: List[Tree] = tree.patterns)(using Context): UnApply = + UnApply(tree: Tree)(fun, implicits, patterns) + def ValDef(tree: ValDef)(name: TermName = tree.name, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs)(using Context): ValDef = + ValDef(tree: Tree)(name, tpt, rhs) + def DefDef(tree: DefDef)(name: TermName = tree.name, paramss: List[ParamClause] = tree.paramss, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs)(using Context): DefDef = + DefDef(tree: Tree)(name, paramss, tpt, rhs) + def TypeDef(tree: TypeDef)(name: TypeName = tree.name, rhs: Tree = tree.rhs)(using Context): TypeDef = + TypeDef(tree: Tree)(name, rhs) + def Template(tree: Template)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, derived: List[untpd.Tree] = tree.derived, self: ValDef = tree.self, body: LazyTreeList = tree.unforcedBody)(using Context): Template = + Template(tree: Tree)(constr, parents, derived, self, body) + def Hole(tree: Hole)(isTerm: Boolean = tree.isTerm, idx: Int = tree.idx, args: List[Tree] = tree.args, content: Tree = tree.content, tpt: Tree = tree.tpt)(using Context): Hole = + Hole(tree: Tree)(isTerm, idx, args, content, tpt) + + } + + /** Hook to indicate that a transform of some subtree should be skipped */ + protected def skipTransform(tree: Tree)(using Context): Boolean = false + + /** For untyped trees, this is just the identity. + * For typed trees, a context derived form `ctx` that records `call` as the + * innermost enclosing call for which the inlined version is currently + * processed. + */ + protected def inlineContext(call: Tree)(using Context): Context = ctx + + /** The context to use when mapping or accumulating over a tree */ + def localCtx(tree: Tree)(using Context): Context + + /** The context to use when transforming a tree. + * It ensures that the source is correct, and that the local context is used if + * that's necessary for transforming the whole tree. + * TODO: ensure transform is always called with the correct context as argument + * @see https://github.com/lampepfl/dotty/pull/13880#discussion_r836395977 + */ + def transformCtx(tree: Tree)(using Context): Context = + val sourced = + if tree.source.exists && tree.source != ctx.source + then ctx.withSource(tree.source) + else ctx + tree match + case t: (MemberDef | PackageDef | LambdaTypeTree | TermLambdaTypeTree) => + localCtx(t)(using sourced) + case _ => + sourced + + abstract class TreeMap(val cpy: TreeCopier = inst.cpy) { self => + def transform(tree: Tree)(using Context): Tree = { + inContext(transformCtx(tree)) { + Stats.record(s"TreeMap.transform/$getClass") + if (skipTransform(tree)) tree + else tree match { + case Ident(name) => + tree + case Select(qualifier, name) => + cpy.Select(tree)(transform(qualifier), name) + case This(qual) => + tree + case Super(qual, mix) => + cpy.Super(tree)(transform(qual), mix) + case Apply(fun, args) => + cpy.Apply(tree)(transform(fun), transform(args)) + case TypeApply(fun, args) => + cpy.TypeApply(tree)(transform(fun), transform(args)) + case Literal(const) => + tree + case New(tpt) => + cpy.New(tree)(transform(tpt)) + case Typed(expr, tpt) => + cpy.Typed(tree)(transform(expr), transform(tpt)) + case NamedArg(name, arg) => + cpy.NamedArg(tree)(name, transform(arg)) + case Assign(lhs, rhs) => + cpy.Assign(tree)(transform(lhs), transform(rhs)) + case blk: Block => + transformBlock(blk) + case If(cond, thenp, elsep) => + cpy.If(tree)(transform(cond), transform(thenp), transform(elsep)) + case Closure(env, meth, tpt) => + cpy.Closure(tree)(transform(env), transform(meth), transform(tpt)) + case Match(selector, cases) => + cpy.Match(tree)(transform(selector), transformSub(cases)) + case CaseDef(pat, guard, body) => + cpy.CaseDef(tree)(transform(pat), transform(guard), transform(body)) + case Labeled(bind, expr) => + cpy.Labeled(tree)(transformSub(bind), transform(expr)) + case Return(expr, from) => + cpy.Return(tree)(transform(expr), transformSub(from)) + case WhileDo(cond, body) => + cpy.WhileDo(tree)(transform(cond), transform(body)) + case Try(block, cases, finalizer) => + cpy.Try(tree)(transform(block), transformSub(cases), transform(finalizer)) + case SeqLiteral(elems, elemtpt) => + cpy.SeqLiteral(tree)(transform(elems), transform(elemtpt)) + case Inlined(call, bindings, expansion) => + cpy.Inlined(tree)(call, transformSub(bindings), transform(expansion)(using inlineContext(call))) + case TypeTree() => + tree + case SingletonTypeTree(ref) => + cpy.SingletonTypeTree(tree)(transform(ref)) + case RefinedTypeTree(tpt, refinements) => + cpy.RefinedTypeTree(tree)(transform(tpt), transformSub(refinements)) + case AppliedTypeTree(tpt, args) => + cpy.AppliedTypeTree(tree)(transform(tpt), transform(args)) + case LambdaTypeTree(tparams, body) => + cpy.LambdaTypeTree(tree)(transformSub(tparams), transform(body)) + case TermLambdaTypeTree(params, body) => + cpy.TermLambdaTypeTree(tree)(transformSub(params), transform(body)) + case MatchTypeTree(bound, selector, cases) => + cpy.MatchTypeTree(tree)(transform(bound), transform(selector), transformSub(cases)) + case ByNameTypeTree(result) => + cpy.ByNameTypeTree(tree)(transform(result)) + case TypeBoundsTree(lo, hi, alias) => + cpy.TypeBoundsTree(tree)(transform(lo), transform(hi), transform(alias)) + case Bind(name, body) => + cpy.Bind(tree)(name, transform(body)) + case Alternative(trees) => + cpy.Alternative(tree)(transform(trees)) + case UnApply(fun, implicits, patterns) => + cpy.UnApply(tree)(transform(fun), transform(implicits), transform(patterns)) + case EmptyValDef => + tree + case tree @ ValDef(name, tpt, _) => + val tpt1 = transform(tpt) + val rhs1 = transform(tree.rhs) + cpy.ValDef(tree)(name, tpt1, rhs1) + case tree @ DefDef(name, paramss, tpt, _) => + cpy.DefDef(tree)(name, transformParamss(paramss), transform(tpt), transform(tree.rhs)) + case tree @ TypeDef(name, rhs) => + cpy.TypeDef(tree)(name, transform(rhs)) + case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => + cpy.Template(tree)(transformSub(constr), transform(tree.parents), Nil, transformSub(self), transformStats(tree.body, tree.symbol)) + case Import(expr, selectors) => + cpy.Import(tree)(transform(expr), selectors) + case Export(expr, selectors) => + cpy.Export(tree)(transform(expr), selectors) + case PackageDef(pid, stats) => + cpy.PackageDef(tree)(transformSub(pid), transformStats(stats, ctx.owner)) + case Annotated(arg, annot) => + cpy.Annotated(tree)(transform(arg), transform(annot)) + case Thicket(trees) => + val trees1 = transform(trees) + if (trees1 eq trees) tree else Thicket(trees1) + case tree @ Hole(_, _, args, content, tpt) => + cpy.Hole(tree)(args = transform(args), content = transform(content), tpt = transform(tpt)) + case _ => + transformMoreCases(tree) + } + } + } + + def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = + transform(trees) + def transformBlock(blk: Block)(using Context): Block = + cpy.Block(blk)(transformStats(blk.stats, ctx.owner), transform(blk.expr)) + def transform(trees: List[Tree])(using Context): List[Tree] = + flatten(trees mapConserve (transform(_))) + def transformSub[Tr <: Tree](tree: Tr)(using Context): Tr = + transform(tree).asInstanceOf[Tr] + def transformSub[Tr <: Tree](trees: List[Tr])(using Context): List[Tr] = + transform(trees).asInstanceOf[List[Tr]] + def transformParams(params: ParamClause)(using Context): ParamClause = + transform(params).asInstanceOf[ParamClause] + def transformParamss(paramss: List[ParamClause])(using Context): List[ParamClause] = + paramss.mapConserve(transformParams) + + protected def transformMoreCases(tree: Tree)(using Context): Tree = { + assert(ctx.reporter.errorsReported) + tree + } + } + + abstract class TreeAccumulator[X] { self: TreeAccumulator[X] @retains(caps.*) => + // Ties the knot of the traversal: call `foldOver(x, tree))` to dive in the `tree` node. + def apply(x: X, tree: Tree)(using Context): X + + def apply(x: X, trees: List[Tree])(using Context): X = + def fold(x: X, trees: List[Tree]): X = trees match + case tree :: rest => fold(apply(x, tree), rest) + case Nil => x + fold(x, trees) + + def foldOver(x: X, tree: Tree)(using Context): X = + if (tree.source != ctx.source && tree.source.exists) + foldOver(x, tree)(using ctx.withSource(tree.source)) + else { + Stats.record(s"TreeAccumulator.foldOver/$getClass") + tree match { + case Ident(name) => + x + case Select(qualifier, name) => + this(x, qualifier) + case This(qual) => + x + case Super(qual, mix) => + this(x, qual) + case Apply(fun, args) => + this(this(x, fun), args) + case TypeApply(fun, args) => + this(this(x, fun), args) + case Literal(const) => + x + case New(tpt) => + this(x, tpt) + case Typed(expr, tpt) => + this(this(x, expr), tpt) + case NamedArg(name, arg) => + this(x, arg) + case Assign(lhs, rhs) => + this(this(x, lhs), rhs) + case Block(stats, expr) => + this(this(x, stats), expr) + case If(cond, thenp, elsep) => + this(this(this(x, cond), thenp), elsep) + case Closure(env, meth, tpt) => + this(this(this(x, env), meth), tpt) + case Match(selector, cases) => + this(this(x, selector), cases) + case CaseDef(pat, guard, body) => + this(this(this(x, pat), guard), body) + case Labeled(bind, expr) => + this(this(x, bind), expr) + case Return(expr, from) => + this(this(x, expr), from) + case WhileDo(cond, body) => + this(this(x, cond), body) + case Try(block, handler, finalizer) => + this(this(this(x, block), handler), finalizer) + case SeqLiteral(elems, elemtpt) => + this(this(x, elems), elemtpt) + case Inlined(call, bindings, expansion) => + this(this(x, bindings), expansion)(using inlineContext(call)) + case TypeTree() => + x + case SingletonTypeTree(ref) => + this(x, ref) + case RefinedTypeTree(tpt, refinements) => + this(this(x, tpt), refinements) + case AppliedTypeTree(tpt, args) => + this(this(x, tpt), args) + case LambdaTypeTree(tparams, body) => + inContext(localCtx(tree)) { + this(this(x, tparams), body) + } + case TermLambdaTypeTree(params, body) => + inContext(localCtx(tree)) { + this(this(x, params), body) + } + case MatchTypeTree(bound, selector, cases) => + this(this(this(x, bound), selector), cases) + case ByNameTypeTree(result) => + this(x, result) + case TypeBoundsTree(lo, hi, alias) => + this(this(this(x, lo), hi), alias) + case Bind(name, body) => + this(x, body) + case Alternative(trees) => + this(x, trees) + case UnApply(fun, implicits, patterns) => + this(this(this(x, fun), implicits), patterns) + case tree @ ValDef(_, tpt, _) => + inContext(localCtx(tree)) { + this(this(x, tpt), tree.rhs) + } + case tree @ DefDef(_, paramss, tpt, _) => + inContext(localCtx(tree)) { + this(this(paramss.foldLeft(x)(apply), tpt), tree.rhs) + } + case TypeDef(_, rhs) => + inContext(localCtx(tree)) { + this(x, rhs) + } + case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => + this(this(this(this(x, constr), parents), self), tree.body) + case Import(expr, _) => + this(x, expr) + case Export(expr, _) => + this(x, expr) + case PackageDef(pid, stats) => + this(this(x, pid), stats)(using localCtx(tree)) + case Annotated(arg, annot) => + this(this(x, arg), annot) + case Thicket(ts) => + this(x, ts) + case Hole(_, _, args, content, tpt) => + this(this(this(x, args), content), tpt) + case _ => + foldMoreCases(x, tree) + } + } + + def foldMoreCases(x: X, tree: Tree)(using Context): X = { + assert(ctx.reporter.hasUnreportedErrors + || ctx.reporter.errorsReported + || ctx.mode.is(Mode.Interactive), tree) + // In interactive mode, errors might come from previous runs. + // In case of errors it may be that typed trees point to untyped ones. + // The IDE can still traverse inside such trees, either in the run where errors + // are reported, or in subsequent ones. + x + } + } + + abstract class TreeTraverser extends TreeAccumulator[Unit] { + def traverse(tree: Tree)(using Context): Unit + def traverse(trees: List[Tree])(using Context) = apply((), trees) + def apply(x: Unit, tree: Tree)(using Context): Unit = traverse(tree) + protected def traverseChildren(tree: Tree)(using Context): Unit = foldOver((), tree) + } + + /** Fold `f` over all tree nodes, in depth-first, prefix order */ + class DeepFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { + def apply(x: X, tree: Tree)(using Context): X = foldOver(f(x, tree), tree) + } + + /** Fold `f` over all tree nodes, in depth-first, prefix order, but don't visit + * subtrees where `f` returns a different result for the root, i.e. `f(x, root) ne x`. + */ + class ShallowFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { + def apply(x: X, tree: Tree)(using Context): X = { + val x1 = f(x, tree) + if (x1.asInstanceOf[AnyRef] ne x.asInstanceOf[AnyRef]) x1 + else foldOver(x1, tree) + } + } + + def rename(tree: NameTree, newName: Name)(using Context): tree.ThisTree[T] = { + tree match { + case tree: Ident => cpy.Ident(tree)(newName) + case tree: Select => cpy.Select(tree)(tree.qualifier, newName) + case tree: Bind => cpy.Bind(tree)(newName, tree.body) + case tree: ValDef => cpy.ValDef(tree)(name = newName.asTermName) + case tree: DefDef => cpy.DefDef(tree)(name = newName.asTermName) + case tree: TypeDef => cpy.TypeDef(tree)(name = newName.asTypeName) + } + }.asInstanceOf[tree.ThisTree[T]] + + object TypeDefs: + def unapply(xs: List[Tree]): Option[List[TypeDef]] = xs match + case (x: TypeDef) :: _ => Some(xs.asInstanceOf[List[TypeDef]]) + case _ => None + + object ValDefs: + def unapply(xs: List[Tree]): Option[List[ValDef]] = xs match + case Nil => Some(Nil) + case (x: ValDef) :: _ => Some(xs.asInstanceOf[List[ValDef]]) + case _ => None + + def termParamssIn(paramss: List[ParamClause]): List[List[ValDef]] = paramss match + case ValDefs(vparams) :: paramss1 => + val paramss2 = termParamssIn(paramss1) + if paramss2 eq paramss1 then paramss.asInstanceOf[List[List[ValDef]]] + else vparams :: paramss2 + case _ :: paramss1 => + termParamssIn(paramss1) + case nil => + Nil + + /** If `tparams` is non-empty, add it to the left `paramss`, merging + * it with a leading type parameter list of `paramss`, if one exists. + */ + def joinParams(tparams: List[TypeDef], paramss: List[ParamClause]): List[ParamClause] = + if tparams.isEmpty then paramss + else paramss match + case TypeDefs(tparams1) :: paramss1 => (tparams ++ tparams1) :: paramss1 + case _ => tparams :: paramss + + def isTermOnly(paramss: List[ParamClause]): Boolean = paramss match + case Nil => true + case params :: paramss1 => + params match + case (param: untpd.TypeDef) :: _ => false + case _ => isTermOnly(paramss1) + + def asTermOnly(paramss: List[ParamClause]): List[List[ValDef]] = + assert(isTermOnly(paramss)) + paramss.asInstanceOf[List[List[ValDef]]] + + /** Delegate to FunProto or FunProtoTyped depending on whether the prefix is `untpd` or `tpd`. */ + protected def FunProto(args: List[Tree], resType: Type)(using Context): ProtoTypes.FunProto + + /** Construct the application `$receiver.$method[$targs]($args)` using overloading resolution + * to find a matching overload of `$method` if necessary. + * This is useful when overloading resolution needs to be performed in a phase after typer. + * Note that this will not perform any kind of implicit search. + * + * @param expectedType An expected type of the application used to guide overloading resolution + */ + def applyOverloaded( + receiver: tpd.Tree, method: TermName, args: List[Tree], targs: List[Type], + expectedType: Type)(using parentCtx: Context): tpd.Tree = { + given ctx: Context = parentCtx.retractMode(Mode.ImplicitsEnabled) + import dotty.tools.dotc.ast.tpd.TreeOps + + val typer = ctx.typer + val proto = FunProto(args, expectedType) + val denot = receiver.tpe.member(method) + if !denot.exists then + overload.println(i"members = ${receiver.tpe.decls}") + report.error(i"no member $receiver . $method", receiver.srcPos) + val selected = + if (denot.isOverloaded) { + def typeParamCount(tp: Type) = tp.widen match { + case tp: PolyType => tp.paramInfos.length + case _ => 0 + } + val allAlts = denot.alternatives + .map(denot => TermRef(receiver.tpe, denot.symbol)) + .filter(tr => typeParamCount(tr) == targs.length) + .filter { _.widen match { + case MethodTpe(_, _, x: MethodType) => !x.isImplicitMethod + case _ => true + }} + val alternatives = ctx.typer.resolveOverloaded(allAlts, proto) + assert(alternatives.size == 1, + i"${if (alternatives.isEmpty) "no" else "multiple"} overloads available for " + + i"$method on ${receiver.tpe.widenDealiasKeepAnnots} with targs: $targs%, %; args: $args%, %; expectedType: $expectedType." + + i"all alternatives: ${allAlts.map(_.symbol.showDcl).mkString(", ")}\n" + + i"matching alternatives: ${alternatives.map(_.symbol.showDcl).mkString(", ")}.") // this is parsed from bytecode tree. there's nothing user can do about it + alternatives.head + } + else TermRef(receiver.tpe, denot.symbol) + val fun = receiver.select(selected).appliedToTypes(targs) + + val apply = untpd.Apply(fun, args) + typer.ApplyTo(apply, fun, selected, proto, expectedType) + } + + + def resolveConstructor(atp: Type, args: List[Tree])(using Context): tpd.Tree = { + val targs = atp.argTypes + withoutMode(Mode.PatternOrTypeBits) { + applyOverloaded(tpd.New(atp.typeConstructor), nme.CONSTRUCTOR, args, targs, atp) + } + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/tpd.scala b/tests/pos-with-compiler-cc/dotc/ast/tpd.scala new file mode 100644 index 000000000000..1f43daec4d37 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/tpd.scala @@ -0,0 +1,1546 @@ +package dotty.tools +package dotc +package ast + +import dotty.tools.dotc.transform.{ExplicitOuter, Erasure} +import typer.ProtoTypes +import transform.SymUtils._ +import transform.TypeUtils._ +import core._ +import Scopes.newScope +import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._, NameOps._ +import Symbols._, StdNames._, Annotations._, Trees._, Symbols._ +import Decorators._, DenotTransformers._ +import collection.{immutable, mutable} +import util.{Property, SourceFile} +import NameKinds.{TempResultName, OuterSelectName} +import typer.ConstFold + +import scala.annotation.tailrec +import scala.collection.mutable.ListBuffer +import language.experimental.pureFunctions + +/** Some creators for typed trees */ +object tpd extends Trees.Instance[Type] with TypedTreeInfo { + + private def ta(using Context) = ctx.typeAssigner + + def Ident(tp: NamedType)(using Context): Ident = + ta.assignType(untpd.Ident(tp.name), tp) + + def Select(qualifier: Tree, name: Name)(using Context): Select = + ta.assignType(untpd.Select(qualifier, name), qualifier) + + def Select(qualifier: Tree, tp: NamedType)(using Context): Select = + untpd.Select(qualifier, tp.name).withType(tp) + + def This(cls: ClassSymbol)(using Context): This = + untpd.This(untpd.Ident(cls.name)).withType(cls.thisType) + + def Super(qual: Tree, mix: untpd.Ident, mixinClass: Symbol)(using Context): Super = + ta.assignType(untpd.Super(qual, mix), qual, mixinClass) + + def Super(qual: Tree, mixName: TypeName, mixinClass: Symbol = NoSymbol)(using Context): Super = + Super(qual, if (mixName.isEmpty) untpd.EmptyTypeIdent else untpd.Ident(mixName), mixinClass) + + def Apply(fn: Tree, args: List[Tree])(using Context): Apply = fn match + case Block(Nil, expr) => + Apply(expr, args) + case _: RefTree | _: GenericApply | _: Inlined | _: Hole => + ta.assignType(untpd.Apply(fn, args), fn, args) + + def TypeApply(fn: Tree, args: List[Tree])(using Context): TypeApply = fn match + case Block(Nil, expr) => + TypeApply(expr, args) + case _: RefTree | _: GenericApply => + ta.assignType(untpd.TypeApply(fn, args), fn, args) + + def Literal(const: Constant)(using Context): Literal = + ta.assignType(untpd.Literal(const)) + + def unitLiteral(using Context): Literal = + Literal(Constant(())) + + def nullLiteral(using Context): Literal = + Literal(Constant(null)) + + def New(tpt: Tree)(using Context): New = + ta.assignType(untpd.New(tpt), tpt) + + def New(tp: Type)(using Context): New = New(TypeTree(tp)) + + def Typed(expr: Tree, tpt: Tree)(using Context): Typed = + ta.assignType(untpd.Typed(expr, tpt), tpt) + + def NamedArg(name: Name, arg: Tree)(using Context): NamedArg = + ta.assignType(untpd.NamedArg(name, arg), arg) + + def Assign(lhs: Tree, rhs: Tree)(using Context): Assign = + ta.assignType(untpd.Assign(lhs, rhs)) + + def Block(stats: List[Tree], expr: Tree)(using Context): Block = + ta.assignType(untpd.Block(stats, expr), stats, expr) + + /** Join `stats` in front of `expr` creating a new block if necessary */ + def seq(stats: List[Tree], expr: Tree)(using Context): Tree = + if (stats.isEmpty) expr + else expr match { + case Block(_, _: Closure) => + Block(stats, expr) // leave closures in their own block + case Block(estats, eexpr) => + cpy.Block(expr)(stats ::: estats, eexpr).withType(ta.avoidingType(eexpr, stats)) + case _ => + Block(stats, expr) + } + + def If(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = + ta.assignType(untpd.If(cond, thenp, elsep), thenp, elsep) + + def InlineIf(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = + ta.assignType(untpd.InlineIf(cond, thenp, elsep), thenp, elsep) + + def Closure(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = + ta.assignType(untpd.Closure(env, meth, tpt), meth, tpt) + + /** A function def + * + * vparams => expr + * + * gets expanded to + * + * { def $anonfun(vparams) = expr; Closure($anonfun) } + * + * where the closure's type is the target type of the expression (FunctionN, unless + * otherwise specified). + */ + def Closure(meth: TermSymbol, rhsFn: List[List[Tree]] => Tree, targs: List[Tree] = Nil, targetType: Type = NoType)(using Context): Block = { + val targetTpt = if (targetType.exists) TypeTree(targetType) else EmptyTree + val call = + if (targs.isEmpty) Ident(TermRef(NoPrefix, meth)) + else TypeApply(Ident(TermRef(NoPrefix, meth)), targs) + Block( + DefDef(meth, rhsFn) :: Nil, + Closure(Nil, call, targetTpt)) + } + + /** A closure whose anonymous function has the given method type */ + def Lambda(tpe: MethodType, rhsFn: List[Tree] => Tree)(using Context): Block = { + val meth = newAnonFun(ctx.owner, tpe) + Closure(meth, tss => rhsFn(tss.head).changeOwner(ctx.owner, meth)) + } + + def CaseDef(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = + ta.assignType(untpd.CaseDef(pat, guard, body), pat, body) + + def Match(selector: Tree, cases: List[CaseDef])(using Context): Match = + ta.assignType(untpd.Match(selector, cases), selector, cases) + + def InlineMatch(selector: Tree, cases: List[CaseDef])(using Context): Match = + ta.assignType(untpd.InlineMatch(selector, cases), selector, cases) + + def Labeled(bind: Bind, expr: Tree)(using Context): Labeled = + ta.assignType(untpd.Labeled(bind, expr)) + + def Labeled(sym: TermSymbol, expr: Tree)(using Context): Labeled = + Labeled(Bind(sym, EmptyTree), expr) + + def Return(expr: Tree, from: Tree)(using Context): Return = + ta.assignType(untpd.Return(expr, from)) + + def Return(expr: Tree, from: Symbol)(using Context): Return = + Return(expr, Ident(from.termRef)) + + def WhileDo(cond: Tree, body: Tree)(using Context): WhileDo = + ta.assignType(untpd.WhileDo(cond, body)) + + def Try(block: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = + ta.assignType(untpd.Try(block, cases, finalizer), block, cases) + + def SeqLiteral(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = + ta.assignType(untpd.SeqLiteral(elems, elemtpt), elems, elemtpt) + + def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(using Context): JavaSeqLiteral = + ta.assignType(untpd.JavaSeqLiteral(elems, elemtpt), elems, elemtpt).asInstanceOf[JavaSeqLiteral] + + def Inlined(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = + ta.assignType(untpd.Inlined(call, bindings, expansion), bindings, expansion) + + def TypeTree(tp: Type, inferred: Boolean = false)(using Context): TypeTree = + (if inferred then untpd.InferredTypeTree() else untpd.TypeTree()).withType(tp) + + def SingletonTypeTree(ref: Tree)(using Context): SingletonTypeTree = + ta.assignType(untpd.SingletonTypeTree(ref), ref) + + def RefinedTypeTree(parent: Tree, refinements: List[Tree], refineCls: ClassSymbol)(using Context): Tree = + ta.assignType(untpd.RefinedTypeTree(parent, refinements), parent, refinements, refineCls) + + def AppliedTypeTree(tycon: Tree, args: List[Tree])(using Context): AppliedTypeTree = + ta.assignType(untpd.AppliedTypeTree(tycon, args), tycon, args) + + def ByNameTypeTree(result: Tree)(using Context): ByNameTypeTree = + ta.assignType(untpd.ByNameTypeTree(result), result) + + def LambdaTypeTree(tparams: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = + ta.assignType(untpd.LambdaTypeTree(tparams, body), tparams, body) + + def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = + ta.assignType(untpd.MatchTypeTree(bound, selector, cases), bound, selector, cases) + + def TypeBoundsTree(lo: Tree, hi: Tree, alias: Tree = EmptyTree)(using Context): TypeBoundsTree = + ta.assignType(untpd.TypeBoundsTree(lo, hi, alias), lo, hi, alias) + + def Bind(sym: Symbol, body: Tree)(using Context): Bind = + ta.assignType(untpd.Bind(sym.name, body), sym) + + /** A pattern corresponding to `sym: tpe` */ + def BindTyped(sym: TermSymbol, tpe: Type)(using Context): Bind = + Bind(sym, Typed(Underscore(tpe), TypeTree(tpe))) + + def Alternative(trees: List[Tree])(using Context): Alternative = + ta.assignType(untpd.Alternative(trees), trees) + + def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree], proto: Type)(using Context): UnApply = { + assert(fun.isInstanceOf[RefTree] || fun.isInstanceOf[GenericApply]) + ta.assignType(untpd.UnApply(fun, implicits, patterns), proto) + } + + def ValDef(sym: TermSymbol, rhs: LazyTree = EmptyTree, inferred: Boolean = false)(using Context): ValDef = + ta.assignType(untpd.ValDef(sym.name, TypeTree(sym.info, inferred), rhs), sym) + + def SyntheticValDef(name: TermName, rhs: Tree, flags: FlagSet = EmptyFlags)(using Context): ValDef = + ValDef(newSymbol(ctx.owner, name, Synthetic | flags, rhs.tpe.widen, coord = rhs.span), rhs) + + def DefDef(sym: TermSymbol, paramss: List[List[Symbol]], + resultType: Type, rhs: Tree)(using Context): DefDef = + sym.setParamss(paramss) + ta.assignType( + untpd.DefDef( + sym.name, + paramss.map { + case TypeSymbols(params) => params.map(param => TypeDef(param).withSpan(param.span)) + case TermSymbols(params) => params.map(param => ValDef(param).withSpan(param.span)) + case _ => unreachable() + }, + TypeTree(resultType), + rhs), + sym) + + def DefDef(sym: TermSymbol, rhs: Tree = EmptyTree)(using Context): DefDef = + ta.assignType(DefDef(sym, Function.const(rhs) _), sym) + + /** A DefDef with given method symbol `sym`. + * @rhsFn A function from parameter references + * to the method's right-hand side. + * Parameter symbols are taken from the `rawParamss` field of `sym`, or + * are freshly generated if `rawParamss` is empty. + */ + def DefDef(sym: TermSymbol, rhsFn: List[List[Tree]] => Tree)(using Context): DefDef = + + // Map method type `tp` with remaining parameters stored in rawParamss to + // final result type and all (given or synthesized) parameters + def recur(tp: Type, remaining: List[List[Symbol]]): (Type, List[List[Symbol]]) = tp match + case tp: PolyType => + val (tparams: List[TypeSymbol], remaining1) = remaining match + case tparams :: remaining1 => + assert(tparams.hasSameLengthAs(tp.paramNames) && tparams.head.isType) + (tparams.asInstanceOf[List[TypeSymbol]], remaining1) + case nil => + (newTypeParams(sym, tp.paramNames, EmptyFlags, tp.instantiateParamInfos(_)), Nil) + val (rtp, paramss) = recur(tp.instantiate(tparams.map(_.typeRef)), remaining1) + (rtp, tparams :: paramss) + case tp: MethodType => + val isParamDependent = tp.isParamDependent + val previousParamRefs: ListBuffer[TermRef] = + // It is ok to assign `null` here. + // If `isParamDependent == false`, the value of `previousParamRefs` is not used. + if isParamDependent then mutable.ListBuffer[TermRef]() else (null: ListBuffer[TermRef] | Null).uncheckedNN + + def valueParam(name: TermName, origInfo: Type): TermSymbol = + val maybeImplicit = + if tp.isContextualMethod then Given + else if tp.isImplicitMethod then Implicit + else EmptyFlags + val maybeErased = if tp.isErasedMethod then Erased else EmptyFlags + + def makeSym(info: Type) = newSymbol(sym, name, TermParam | maybeImplicit | maybeErased, info, coord = sym.coord) + + if isParamDependent then + val sym = makeSym(origInfo.substParams(tp, previousParamRefs.toList)) + previousParamRefs += sym.termRef + sym + else makeSym(origInfo) + end valueParam + + val (vparams: List[TermSymbol], remaining1) = + if tp.paramNames.isEmpty then (Nil, remaining) + else remaining match + case vparams :: remaining1 => + assert(vparams.hasSameLengthAs(tp.paramNames) && vparams.head.isTerm) + (vparams.asInstanceOf[List[TermSymbol]], remaining1) + case nil => + (tp.paramNames.lazyZip(tp.paramInfos).map(valueParam), Nil) + val (rtp, paramss) = recur(tp.instantiate(vparams.map(_.termRef)), remaining1) + (rtp, vparams :: paramss) + case _ => + assert(remaining.isEmpty) + (tp.widenExpr, Nil) + end recur + + val (rtp, paramss) = recur(sym.info, sym.rawParamss) + DefDef(sym, paramss, rtp, rhsFn(paramss.nestedMap(ref))) + end DefDef + + def TypeDef(sym: TypeSymbol)(using Context): TypeDef = + ta.assignType(untpd.TypeDef(sym.name, TypeTree(sym.info)), sym) + + def ClassDef(cls: ClassSymbol, constr: DefDef, body: List[Tree], superArgs: List[Tree] = Nil)(using Context): TypeDef = { + val firstParent :: otherParents = cls.info.parents: @unchecked + val superRef = + if (cls.is(Trait)) TypeTree(firstParent) + else { + def isApplicable(ctpe: Type): Boolean = ctpe match { + case ctpe: PolyType => + isApplicable(ctpe.instantiate(firstParent.argTypes)) + case ctpe: MethodType => + (superArgs corresponds ctpe.paramInfos)(_.tpe <:< _) + case _ => + false + } + val constr = firstParent.decl(nme.CONSTRUCTOR).suchThat(constr => isApplicable(constr.info)) + New(firstParent, constr.symbol.asTerm, superArgs) + } + ClassDefWithParents(cls, constr, superRef :: otherParents.map(TypeTree(_)), body) + } + + def ClassDefWithParents(cls: ClassSymbol, constr: DefDef, parents: List[Tree], body: List[Tree])(using Context): TypeDef = { + val selfType = + if (cls.classInfo.selfInfo ne NoType) ValDef(newSelfSym(cls)) + else EmptyValDef + def isOwnTypeParam(stat: Tree) = + stat.symbol.is(TypeParam) && stat.symbol.owner == cls + val bodyTypeParams = body filter isOwnTypeParam map (_.symbol) + val newTypeParams = + for (tparam <- cls.typeParams if !(bodyTypeParams contains tparam)) + yield TypeDef(tparam) + val findLocalDummy = FindLocalDummyAccumulator(cls) + val localDummy = body.foldLeft(NoSymbol: Symbol)(findLocalDummy.apply) + .orElse(newLocalDummy(cls)) + val impl = untpd.Template(constr, parents, Nil, selfType, newTypeParams ++ body) + .withType(localDummy.termRef) + ta.assignType(untpd.TypeDef(cls.name, impl), cls) + } + + /** An anonymous class + * + * new parents { forwarders } + * + * where `forwarders` contains forwarders for all functions in `fns`. + * @param parents a non-empty list of class types + * @param fns a non-empty of functions for which forwarders should be defined in the class. + * The class has the same owner as the first function in `fns`. + * Its position is the union of all functions in `fns`. + */ + def AnonClass(parents: List[Type], fns: List[TermSymbol], methNames: List[TermName])(using Context): Block = { + AnonClass(fns.head.owner, parents, fns.map(_.span).reduceLeft(_ union _)) { cls => + def forwarder(fn: TermSymbol, name: TermName) = { + val fwdMeth = fn.copy(cls, name, Synthetic | Method | Final).entered.asTerm + for overridden <- fwdMeth.allOverriddenSymbols do + if overridden.is(Extension) then fwdMeth.setFlag(Extension) + if !overridden.is(Deferred) then fwdMeth.setFlag(Override) + DefDef(fwdMeth, ref(fn).appliedToArgss(_)) + } + fns.lazyZip(methNames).map(forwarder) + } + } + + /** An anonymous class + * + * new parents { body } + * + * with the specified owner and position. + */ + def AnonClass(owner: Symbol, parents: List[Type], coord: Coord)(body: ClassSymbol => List[Tree])(using Context): Block = + val parents1 = + if (parents.head.classSymbol.is(Trait)) { + val head = parents.head.parents.head + if (head.isRef(defn.AnyClass)) defn.AnyRefType :: parents else head :: parents + } + else parents + val cls = newNormalizedClassSymbol(owner, tpnme.ANON_CLASS, Synthetic | Final, parents1, coord = coord) + val constr = newConstructor(cls, Synthetic, Nil, Nil).entered + val cdef = ClassDef(cls, DefDef(constr), body(cls)) + Block(cdef :: Nil, New(cls.typeRef, Nil)) + + def Import(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = + ta.assignType(untpd.Import(expr, selectors), newImportSymbol(ctx.owner, expr)) + + def Export(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Export = + ta.assignType(untpd.Export(expr, selectors)) + + def PackageDef(pid: RefTree, stats: List[Tree])(using Context): PackageDef = + ta.assignType(untpd.PackageDef(pid, stats), pid) + + def Annotated(arg: Tree, annot: Tree)(using Context): Annotated = + ta.assignType(untpd.Annotated(arg, annot), arg, annot) + + def Throw(expr: Tree)(using Context): Tree = + ref(defn.throwMethod).appliedTo(expr) + + def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = + ta.assignType(untpd.Hole(isTermHole, idx, args, content, tpt), tpt) + + // ------ Making references ------------------------------------------------------ + + def prefixIsElidable(tp: NamedType)(using Context): Boolean = { + val typeIsElidable = tp.prefix match { + case pre: ThisType => + tp.isType || + pre.cls.isStaticOwner || + tp.symbol.isParamOrAccessor && !pre.cls.is(Trait) && ctx.owner.enclosingClass == pre.cls + // was ctx.owner.enclosingClass.derivesFrom(pre.cls) which was not tight enough + // and was spuriously triggered in case inner class would inherit from outer one + // eg anonymous TypeMap inside TypeMap.andThen + case pre: TermRef => + pre.symbol.is(Module) && pre.symbol.isStatic + case pre => + pre `eq` NoPrefix + } + typeIsElidable || + tp.symbol.is(JavaStatic) || + tp.symbol.hasAnnotation(defn.ScalaStaticAnnot) + } + + def needsSelect(tp: Type)(using Context): Boolean = tp match { + case tp: TermRef => !prefixIsElidable(tp) + case _ => false + } + + /** A tree representing the same reference as the given type */ + def ref(tp: NamedType, needLoad: Boolean = true)(using Context): Tree = + if (tp.isType) TypeTree(tp) + else if (prefixIsElidable(tp)) Ident(tp) + else if (tp.symbol.is(Module) && ctx.owner.isContainedIn(tp.symbol.moduleClass)) + followOuterLinks(This(tp.symbol.moduleClass.asClass)) + else if (tp.symbol hasAnnotation defn.ScalaStaticAnnot) + Ident(tp) + else + val pre = tp.prefix + if (pre.isSingleton) followOuterLinks(singleton(pre.dealias, needLoad)).select(tp) + else + val res = Select(TypeTree(pre), tp) + if needLoad && !res.symbol.isStatic then + throw new TypeError(em"cannot establish a reference to $res") + res + + def ref(sym: Symbol)(using Context): Tree = + ref(NamedType(sym.owner.thisType, sym.name, sym.denot)) + + private def followOuterLinks(t: Tree)(using Context) = t match { + case t: This if ctx.erasedTypes && !(t.symbol == ctx.owner.enclosingClass || t.symbol.isStaticOwner) => + // after erasure outer paths should be respected + ExplicitOuter.OuterOps(ctx).path(toCls = t.tpe.classSymbol) + case t => + t + } + + def singleton(tp: Type, needLoad: Boolean = true)(using Context): Tree = tp.dealias match { + case tp: TermRef => ref(tp, needLoad) + case tp: ThisType => This(tp.cls) + case tp: SkolemType => singleton(tp.narrow, needLoad) + case SuperType(qual, _) => singleton(qual, needLoad) + case ConstantType(value) => Literal(value) + } + + /** A path that corresponds to the given type `tp`. Error if `tp` is not a refinement + * of an addressable singleton type. + */ + def pathFor(tp: Type)(using Context): Tree = { + def recur(tp: Type): Tree = tp match { + case tp: NamedType => + tp.info match { + case TypeAlias(alias) => recur(alias) + case _: TypeBounds => EmptyTree + case _ => singleton(tp) + } + case tp: TypeProxy => recur(tp.superType) + case _ => EmptyTree + } + recur(tp).orElse { + report.error(em"$tp is not an addressable singleton type") + TypeTree(tp) + } + } + + /** A tree representing a `newXYZArray` operation of the right + * kind for the given element type in `elemTpe`. No type arguments or + * `length` arguments are given. + */ + def newArray(elemTpe: Type, returnTpe: Type, span: Span, dims: JavaSeqLiteral)(using Context): Tree = { + val elemClass = elemTpe.classSymbol + def newArr = + ref(defn.DottyArraysModule).select(defn.newArrayMethod).withSpan(span) + + if (!ctx.erasedTypes) { + assert(!TypeErasure.isGeneric(elemTpe), elemTpe) //needs to be done during typer. See Applications.convertNewGenericArray + newArr.appliedToTypeTrees(TypeTree(returnTpe) :: Nil).appliedToTermArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) + } + else // after erasure + newArr.appliedToTermArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) + } + + /** The wrapped array method name for an array of type elemtp */ + def wrapArrayMethodName(elemtp: Type)(using Context): TermName = { + val elemCls = elemtp.classSymbol + if (elemCls.isPrimitiveValueClass) nme.wrapXArray(elemCls.name) + else if (elemCls.derivesFrom(defn.ObjectClass) && !elemCls.isNotRuntimeClass) nme.wrapRefArray + else nme.genericWrapArray + } + + /** A tree representing a `wrapXYZArray(tree)` operation of the right + * kind for the given element type in `elemTpe`. + */ + def wrapArray(tree: Tree, elemtp: Type)(using Context): Tree = + val wrapper = ref(defn.getWrapVarargsArrayModule) + .select(wrapArrayMethodName(elemtp)) + .appliedToTypes(if (elemtp.isPrimitiveValueType) Nil else elemtp :: Nil) + val actualElem = wrapper.tpe.widen.firstParamTypes.head + wrapper.appliedTo(tree.ensureConforms(actualElem)) + + // ------ Creating typed equivalents of trees that exist only in untyped form ------- + + /** new C(args), calling the primary constructor of C */ + def New(tp: Type, args: List[Tree])(using Context): Apply = + New(tp, tp.dealias.typeSymbol.primaryConstructor.asTerm, args) + + /** new C(args), calling given constructor `constr` of C */ + def New(tp: Type, constr: TermSymbol, args: List[Tree])(using Context): Apply = { + val targs = tp.argTypes + val tycon = tp.typeConstructor + New(tycon) + .select(TermRef(tycon, constr)) + .appliedToTypes(targs) + .appliedToTermArgs(args) + } + + /** An object def + * + * object obs extends parents { decls } + * + * gets expanded to + * + * val obj = new obj$ + * class obj$ extends parents { this: obj.type => decls } + * + * (The following no longer applies: + * What's interesting here is that the block is well typed + * (because class obj$ is hoistable), but the type of the `obj` val is + * not expressible. What needs to happen in general when + * inferring the type of a val from its RHS, is: if the type contains + * a class that has the val itself as owner, then that class + * is remapped to have the val's owner as owner. Remapping could be + * done by cloning the class with the new owner and substituting + * everywhere in the tree. We know that remapping is safe + * because the only way a local class can appear in the RHS of a val is + * by being hoisted outside of a block, and the necessary checks are + * done at this point already. + * + * On the other hand, for method result type inference, if the type of + * the RHS of a method contains a class owned by the method, this would be + * an error.) + */ + def ModuleDef(sym: TermSymbol, body: List[Tree])(using Context): tpd.Thicket = { + val modcls = sym.moduleClass.asClass + val constrSym = modcls.primaryConstructor orElse newDefaultConstructor(modcls).entered + val constr = DefDef(constrSym.asTerm, EmptyTree) + val clsdef = ClassDef(modcls, constr, body) + val valdef = ValDef(sym, New(modcls.typeRef).select(constrSym).appliedToNone) + Thicket(valdef, clsdef) + } + + /** A `_` with given type */ + def Underscore(tp: Type)(using Context): Ident = untpd.Ident(nme.WILDCARD).withType(tp) + + def defaultValue(tpe: Type)(using Context): Tree = { + val tpw = tpe.widen + + if (tpw isRef defn.IntClass) Literal(Constant(0)) + else if (tpw isRef defn.LongClass) Literal(Constant(0L)) + else if (tpw isRef defn.BooleanClass) Literal(Constant(false)) + else if (tpw isRef defn.CharClass) Literal(Constant('\u0000')) + else if (tpw isRef defn.FloatClass) Literal(Constant(0f)) + else if (tpw isRef defn.DoubleClass) Literal(Constant(0d)) + else if (tpw isRef defn.ByteClass) Literal(Constant(0.toByte)) + else if (tpw isRef defn.ShortClass) Literal(Constant(0.toShort)) + else nullLiteral.select(defn.Any_asInstanceOf).appliedToType(tpe) + } + + private class FindLocalDummyAccumulator(cls: ClassSymbol)(using Context) extends TreeAccumulator[Symbol] { + def apply(sym: Symbol, tree: Tree)(using Context) = + if (sym.exists) sym + else if (tree.isDef) { + val owner = tree.symbol.owner + if (owner.isLocalDummy && owner.owner == cls) owner + else if (owner == cls) foldOver(sym, tree) + else sym + } + else foldOver(sym, tree) + } + + /** The owner to be used in a local context when traversing a tree */ + def localOwner(tree: Tree)(using Context): Symbol = + val sym = tree.symbol + (if sym.is(PackageVal) then sym.moduleClass else sym).orElse(ctx.owner) + + /** The local context to use when traversing trees */ + def localCtx(tree: Tree)(using Context): Context = ctx.withOwner(localOwner(tree)) + + override val cpy: TypedTreeCopier = // Type ascription needed to pick up any new members in TreeCopier (currently there are none) + TypedTreeCopier() + + val cpyBetweenPhases: TimeTravellingTreeCopier = TimeTravellingTreeCopier() + + class TypedTreeCopier extends TreeCopier { + def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[Type] = + copied.withTypeUnchecked(tree.tpe) + def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[Type] = + copied.withTypeUnchecked(tree.tpe) + + protected val untpdCpy = untpd.cpy + + override def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = { + val tree1 = untpdCpy.Select(tree)(qualifier, name) + tree match { + case tree: Select if qualifier.tpe eq tree.qualifier.tpe => + tree1.withTypeUnchecked(tree.tpe) + case _ => + val tree2: Select = tree.tpe match { + case tpe: NamedType => + val qualType = qualifier.tpe.widenIfUnstable + if qualType.isExactlyNothing then tree1.withTypeUnchecked(tree.tpe) + else tree1.withType(tpe.derivedSelect(qualType)) + case _ => tree1.withTypeUnchecked(tree.tpe) + } + ConstFold.Select(tree2) + } + } + + override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = { + val tree1 = untpdCpy.Apply(tree)(fun, args) + tree match { + case tree: Apply + if (fun.tpe eq tree.fun.tpe) && sameTypes(args, tree.args) => + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, fun, args) + } + } + + override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = { + val tree1 = untpdCpy.TypeApply(tree)(fun, args) + tree match { + case tree: TypeApply + if (fun.tpe eq tree.fun.tpe) && sameTypes(args, tree.args) => + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, fun, args) + } + } + + override def Literal(tree: Tree)(const: Constant)(using Context): Literal = + ta.assignType(untpdCpy.Literal(tree)(const)) + + override def New(tree: Tree)(tpt: Tree)(using Context): New = + ta.assignType(untpdCpy.New(tree)(tpt), tpt) + + override def Typed(tree: Tree)(expr: Tree, tpt: Tree)(using Context): Typed = + ta.assignType(untpdCpy.Typed(tree)(expr, tpt), tpt) + + override def NamedArg(tree: Tree)(name: Name, arg: Tree)(using Context): NamedArg = + ta.assignType(untpdCpy.NamedArg(tree)(name, arg), arg) + + override def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = + ta.assignType(untpdCpy.Assign(tree)(lhs, rhs)) + + override def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = { + val tree1 = untpdCpy.Block(tree)(stats, expr) + tree match { + case tree: Block if (expr.tpe eq tree.expr.tpe) && (expr.tpe eq tree.tpe) => + // The last guard is a conservative check: if `tree.tpe` is different from `expr.tpe`, then + // it was computed from widening `expr.tpe`, and tree transforms might cause `expr.tpe.widen` + // to change even if `expr.tpe` itself didn't change, e.g: + // { val s = ...; s } + // If the type of `s` changed, then the type of the block might have changed, even though `expr.tpe` + // will still be `TermRef(NoPrefix, s)` + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, stats, expr) + } + } + + override def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = { + val tree1 = untpdCpy.If(tree)(cond, thenp, elsep) + tree match { + case tree: If if (thenp.tpe eq tree.thenp.tpe) && (elsep.tpe eq tree.elsep.tpe) && + ((tree.tpe eq thenp.tpe) || (tree.tpe eq elsep.tpe)) => + // The last guard is a conservative check similar to the one done in `Block` above, + // if `tree.tpe` is not identical to the type of one of its branch, it might have been + // computed from the widened type of the branches, so the same reasoning than + // in `Block` applies. + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, thenp, elsep) + } + } + + override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = { + val tree1 = untpdCpy.Closure(tree)(env, meth, tpt) + tree match { + case tree: Closure if sameTypes(env, tree.env) && (meth.tpe eq tree.meth.tpe) && (tpt.tpe eq tree.tpt.tpe) => + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, meth, tpt) + } + } + + override def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = { + val tree1 = untpdCpy.Match(tree)(selector, cases) + tree match { + case tree: Match if sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, selector, cases) + } + } + + override def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = { + val tree1 = untpdCpy.CaseDef(tree)(pat, guard, body) + tree match { + case tree: CaseDef if body.tpe eq tree.body.tpe => tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, pat, body) + } + } + + override def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = + ta.assignType(untpdCpy.Labeled(tree)(bind, expr)) + + override def Return(tree: Tree)(expr: Tree, from: Tree)(using Context): Return = + ta.assignType(untpdCpy.Return(tree)(expr, from)) + + override def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = + ta.assignType(untpdCpy.WhileDo(tree)(cond, body)) + + override def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = { + val tree1 = untpdCpy.Try(tree)(expr, cases, finalizer) + tree match { + case tree: Try if (expr.tpe eq tree.expr.tpe) && sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, expr, cases) + } + } + + override def Inlined(tree: Tree)(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = { + val tree1 = untpdCpy.Inlined(tree)(call, bindings, expansion) + tree match { + case tree: Inlined if sameTypes(bindings, tree.bindings) && (expansion.tpe eq tree.expansion.tpe) => + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, bindings, expansion) + } + } + + override def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = { + val tree1 = untpdCpy.SeqLiteral(tree)(elems, elemtpt) + tree match { + case tree: SeqLiteral + if sameTypes(elems, tree.elems) && (elemtpt.tpe eq tree.elemtpt.tpe) => + tree1.withTypeUnchecked(tree.tpe) + case _ => + ta.assignType(tree1, elems, elemtpt) + } + } + + override def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = { + val tree1 = untpdCpy.Annotated(tree)(arg, annot) + tree match { + case tree: Annotated if (arg.tpe eq tree.arg.tpe) && (annot eq tree.annot) => tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, arg, annot) + } + } + + override def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(using Context): If = + If(tree: Tree)(cond, thenp, elsep) + override def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = + Closure(tree: Tree)(env, meth, tpt) + override def CaseDef(tree: CaseDef)(pat: Tree = tree.pat, guard: Tree = tree.guard, body: Tree = tree.body)(using Context): CaseDef = + CaseDef(tree: Tree)(pat, guard, body) + override def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(using Context): Try = + Try(tree: Tree)(expr, cases, finalizer) + } + + class TimeTravellingTreeCopier extends TypedTreeCopier { + override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = + tree match + case tree: Apply + if (tree.fun eq fun) && (tree.args eq args) + && tree.tpe.isInstanceOf[ConstantType] + && isPureExpr(tree) => tree + case _ => + ta.assignType(untpdCpy.Apply(tree)(fun, args), fun, args) + // Note: Reassigning the original type if `fun` and `args` have the same types as before + // does not work here in general: The computed type depends on the widened function type, not + // the function type itself. A tree transform may keep the function type the + // same but its widened type might change. + // However, we keep constant types of pure expressions. This uses the underlying assumptions + // that pure functions yielding a constant will not change in later phases. + + override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = + ta.assignType(untpdCpy.TypeApply(tree)(fun, args), fun, args) + // Same remark as for Apply + + override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = + ta.assignType(untpdCpy.Closure(tree)(env, meth, tpt), meth, tpt) + + override def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = + Closure(tree: Tree)(env, meth, tpt) + } + + override def skipTransform(tree: Tree)(using Context): Boolean = tree.tpe.isError + + implicit class TreeOps[ThisTree <: tpd.Tree](private val tree: ThisTree) extends AnyVal { + + def isValue(using Context): Boolean = + tree.isTerm && tree.tpe.widen.isValueType + + def isValueOrPattern(using Context): Boolean = + tree.isValue || tree.isPattern + + def isValueType: Boolean = + tree.isType && tree.tpe.isValueType + + def isInstantiation: Boolean = tree match { + case Apply(Select(New(_), nme.CONSTRUCTOR), _) => true + case _ => false + } + + def shallowFold[T](z: T)(op: (T, tpd.Tree) => T)(using Context): T = + ShallowFolder(op).apply(z, tree) + + def deepFold[T](z: T)(op: (T, tpd.Tree) => T)(using Context): T = + DeepFolder(op).apply(z, tree) + + def find[T](pred: (tpd.Tree) => Boolean)(using Context): Option[tpd.Tree] = + shallowFold[Option[tpd.Tree]](None)((accum, tree) => if (pred(tree)) Some(tree) else accum) + + def subst(from: List[Symbol], to: List[Symbol])(using Context): ThisTree = + TreeTypeMap(substFrom = from, substTo = to).apply(tree) + + /** Change owner from `from` to `to`. If `from` is a weak owner, also change its + * owner to `to`, and continue until a non-weak owner is reached. + */ + def changeOwner(from: Symbol, to: Symbol)(using Context): ThisTree = { + @tailrec def loop(from: Symbol, froms: List[Symbol], tos: List[Symbol]): ThisTree = + if (from.isWeakOwner && !from.owner.isClass) + loop(from.owner, from :: froms, to :: tos) + else + //println(i"change owner ${from :: froms}%, % ==> $tos of $tree") + TreeTypeMap(oldOwners = from :: froms, newOwners = tos).apply(tree) + if (from == to) tree else loop(from, Nil, to :: Nil) + } + + /** + * Set the owner of every definition in this tree which is not itself contained in this + * tree to be `newowner` + */ + def changeNonLocalOwners(newOwner: Symbol)(using Context): Tree = { + val ownerAcc = new TreeAccumulator[immutable.Set[Symbol]] { + def apply(ss: immutable.Set[Symbol], tree: Tree)(using Context) = tree match { + case tree: DefTree => + val sym = tree.symbol + if sym.exists && !sym.owner.is(Package) then ss + sym.owner else ss + case _ => + foldOver(ss, tree) + } + } + val owners = ownerAcc(immutable.Set.empty[Symbol], tree).toList + val newOwners = List.fill(owners.size)(newOwner) + TreeTypeMap(oldOwners = owners, newOwners = newOwners).apply(tree) + } + + /** After phase `trans`, set the owner of every definition in this tree that was formerly + * owner by `from` to `to`. + */ + def changeOwnerAfter(from: Symbol, to: Symbol, trans: DenotTransformer)(using Context): ThisTree = + if (ctx.phase == trans.next) { + val traverser = new TreeTraverser { + def traverse(tree: Tree)(using Context) = tree match { + case tree: DefTree => + val sym = tree.symbol + val prevDenot = atPhase(trans)(sym.denot) + if (prevDenot.effectiveOwner == from.skipWeakOwner) { + val d = sym.copySymDenotation(owner = to) + d.installAfter(trans) + d.transformAfter(trans, d => if (d.owner eq from) d.copySymDenotation(owner = to) else d) + } + if (sym.isWeakOwner) traverseChildren(tree) + case _ => + traverseChildren(tree) + } + } + traverser.traverse(tree) + tree + } + else atPhase(trans.next)(changeOwnerAfter(from, to, trans)) + + /** A select node with the given selector name and a computed type */ + def select(name: Name)(using Context): Select = + Select(tree, name) + + /** A select node with the given selector name such that the designated + * member satisfies predicate `p`. Useful for disambiguating overloaded members. + */ + def select(name: Name, p: Symbol => Boolean)(using Context): Select = + select(tree.tpe.member(name).suchThat(p).symbol) + + /** A select node with the given type */ + def select(tp: NamedType)(using Context): Select = + untpd.Select(tree, tp.name).withType(tp) + + /** A select node that selects the given symbol. Note: Need to make sure this + * is in fact the symbol you would get when you select with the symbol's name, + * otherwise a data race may occur which would be flagged by -Yno-double-bindings. + */ + def select(sym: Symbol)(using Context): Select = { + val tp = + if (sym.isType) { + assert(!sym.is(TypeParam)) + TypeRef(tree.tpe, sym.asType) + } + else + TermRef(tree.tpe, sym.name.asTermName, sym.denot.asSeenFrom(tree.tpe)) + untpd.Select(tree, sym.name).withType(tp) + } + + /** A select node with the given selector name and signature and a computed type */ + def selectWithSig(name: Name, sig: Signature, target: Name)(using Context): Tree = + untpd.SelectWithSig(tree, name, sig).withType(tree.tpe.select(name.asTermName, sig, target)) + + /** A select node with selector name and signature taken from `sym`. + * Note: Use this method instead of select(sym) if the referenced symbol + * might be overridden in the type of the qualifier prefix. See note + * on select(sym: Symbol). + */ + def selectWithSig(sym: Symbol)(using Context): Tree = + selectWithSig(sym.name, sym.signature, sym.targetName) + + /** A unary apply node with given argument: `tree(arg)` */ + def appliedTo(arg: Tree)(using Context): Apply = + appliedToTermArgs(arg :: Nil) + + /** An apply node with given arguments: `tree(arg, args0, ..., argsN)` */ + def appliedTo(arg: Tree, args: Tree*)(using Context): Apply = + appliedToTermArgs(arg :: args.toList) + + /** An apply node with given argument list `tree(args(0), ..., args(args.length - 1))` */ + def appliedToTermArgs(args: List[Tree])(using Context): Apply = + Apply(tree, args) + + /** An applied node that accepts only varargs as arguments */ + def appliedToVarargs(args: List[Tree], tpt: Tree)(using Context): Apply = + appliedTo(repeated(args, tpt)) + + /** An apply or type apply node with given argument list */ + def appliedToArgs(args: List[Tree])(using Context): GenericApply = args match + case arg :: args1 if arg.isType => TypeApply(tree, args) + case _ => Apply(tree, args) + + /** The current tree applied to given argument lists: + * `tree (argss(0)) ... (argss(argss.length -1))` + */ + def appliedToArgss(argss: List[List[Tree]])(using Context): Tree = + argss.foldLeft(tree: Tree)(_.appliedToArgs(_)) + + /** The current tree applied to (): `tree()` */ + def appliedToNone(using Context): Apply = Apply(tree, Nil) + + /** The current tree applied to given type argument: `tree[targ]` */ + def appliedToType(targ: Type)(using Context): Tree = + appliedToTypes(targ :: Nil) + + /** The current tree applied to given type arguments: `tree[targ0, ..., targN]` */ + def appliedToTypes(targs: List[Type])(using Context): Tree = + appliedToTypeTrees(targs map (TypeTree(_))) + + /** The current tree applied to given type argument: `tree[targ]` */ + def appliedToTypeTree(targ: Tree)(using Context): Tree = + appliedToTypeTrees(targ :: Nil) + + /** The current tree applied to given type argument list: `tree[targs(0), ..., targs(targs.length - 1)]` */ + def appliedToTypeTrees(targs: List[Tree])(using Context): Tree = + if targs.isEmpty then tree else TypeApply(tree, targs) + + /** Apply to `()` unless tree's widened type is parameterless */ + def ensureApplied(using Context): Tree = + if (tree.tpe.widen.isParameterless) tree else tree.appliedToNone + + /** `tree == that` */ + def equal(that: Tree)(using Context): Tree = + if (that.tpe.widen.isRef(defn.NothingClass)) + Literal(Constant(false)) + else + applyOverloaded(tree, nme.EQ, that :: Nil, Nil, defn.BooleanType) + + /** `tree.isInstanceOf[tp]`, with special treatment of singleton types */ + def isInstance(tp: Type)(using Context): Tree = tp.dealias match { + case ConstantType(c) if c.tag == StringTag => + singleton(tp).equal(tree) + case tp: SingletonType => + if tp.widen.derivesFrom(defn.ObjectClass) then + tree.ensureConforms(defn.ObjectType).select(defn.Object_eq).appliedTo(singleton(tp)) + else + singleton(tp).equal(tree) + case _ => + tree.select(defn.Any_isInstanceOf).appliedToType(tp) + } + + /** tree.asInstanceOf[`tp`] */ + def asInstance(tp: Type)(using Context): Tree = { + assert(tp.isValueType, i"bad cast: $tree.asInstanceOf[$tp]") + tree.select(defn.Any_asInstanceOf).appliedToType(tp) + } + + /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ + def cast(tp: Type)(using Context): Tree = cast(TypeTree(tp)) + + /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ + def cast(tpt: TypeTree)(using Context): Tree = + assert(tpt.tpe.isValueType, i"bad cast: $tree.asInstanceOf[$tpt]") + tree.select(if (ctx.erasedTypes) defn.Any_asInstanceOf else defn.Any_typeCast) + .appliedToTypeTree(tpt) + + /** cast `tree` to `tp` (or its box/unbox/cast equivalent when after + * erasure and value and non-value types are mixed), + * unless tree's type already conforms to `tp`. + */ + def ensureConforms(tp: Type)(using Context): Tree = + if (tree.tpe <:< tp) tree + else if (!ctx.erasedTypes) cast(tp) + else Erasure.Boxing.adaptToType(tree, tp) + + /** `tree ne null` (might need a cast to be type correct) */ + def testNotNull(using Context): Tree = { + // If the receiver is of type `Nothing` or `Null`, add an ascription or cast + // so that the selection succeeds. + // e.g. `null.ne(null)` doesn't type, but `(null: AnyRef).ne(null)` does. + val receiver = + if tree.tpe.isBottomType then + if ctx.explicitNulls then tree.cast(defn.AnyRefType) + else Typed(tree, TypeTree(defn.AnyRefType)) + else tree.ensureConforms(defn.ObjectType) + // also need to cast the null literal to AnyRef in explicit nulls + val nullLit = if ctx.explicitNulls then nullLiteral.cast(defn.AnyRefType) else nullLiteral + receiver.select(defn.Object_ne).appliedTo(nullLit).withSpan(tree.span) + } + + /** If inititializer tree is `_`, the default value of its type, + * otherwise the tree itself. + */ + def wildcardToDefault(using Context): Tree = + if (isWildcardArg(tree)) defaultValue(tree.tpe) else tree + + /** `this && that`, for boolean trees `this`, `that` */ + def and(that: Tree)(using Context): Tree = + tree.select(defn.Boolean_&&).appliedTo(that) + + /** `this || that`, for boolean trees `this`, `that` */ + def or(that: Tree)(using Context): Tree = + tree.select(defn.Boolean_||).appliedTo(that) + + /** The translation of `tree = rhs`. + * This is either the tree as an assignment, or a setter call. + */ + def becomes(rhs: Tree)(using Context): Tree = { + val sym = tree.symbol + if (sym.is(Method)) { + val setter = sym.setter.orElse { + assert(sym.name.isSetterName && sym.info.firstParamTypes.nonEmpty, sym) + sym + } + val qual = tree match { + case id: Ident => desugarIdentPrefix(id) + case Select(qual, _) => qual + } + qual.select(setter).appliedTo(rhs) + } + else Assign(tree, rhs) + } + + /** tree @annot + * + * works differently for type trees and term trees + */ + def annotated(annot: Tree)(using Context): Tree = + if (tree.isTerm) + Typed(tree, TypeTree(AnnotatedType(tree.tpe.widenIfUnstable, Annotation(annot)))) + else + Annotated(tree, annot) + + /** A synthetic select with that will be turned into an outer path by ExplicitOuter. + * @param levels How many outer levels to select + * @param tp The type of the destination of the outer path. + */ + def outerSelect(levels: Int, tp: Type)(using Context): Tree = + untpd.Select(tree, OuterSelectName(EmptyTermName, levels)).withType(SkolemType(tp)) + + /** Replace Inlined nodes and InlineProxy references to underlying arguments */ + def underlyingArgument(using Context): Tree = { + val mapToUnderlying = new MapToUnderlying { + /** Should get the rhs of this binding + * Returns true if the symbol is a val or def generated by eta-expansion/inline + */ + override protected def skipLocal(sym: Symbol): Boolean = + sym.isOneOf(InlineProxy | Synthetic) + } + mapToUnderlying.transform(tree) + } + + /** Replace Ident nodes references to the underlying tree that defined them */ + def underlying(using Context): Tree = MapToUnderlying().transform(tree) + + // --- Higher order traversal methods ------------------------------- + + /** Apply `f` to each subtree of this tree */ + def foreachSubTree(f: Tree => Unit)(using Context): Unit = { + val traverser = new TreeTraverser { + def traverse(tree: Tree)(using Context) = foldOver(f(tree), tree) + } + traverser.traverse(tree) + } + + /** Is there a subtree of this tree that satisfies predicate `p`? */ + def existsSubTree(p: Tree => Boolean)(using Context): Boolean = { + val acc = new TreeAccumulator[Boolean] { + def apply(x: Boolean, t: Tree)(using Context) = x || p(t) || foldOver(x, t) + } + acc(false, tree) + } + + /** All subtrees of this tree that satisfy predicate `p`. */ + def filterSubTrees(f: Tree => Boolean)(using Context): List[Tree] = { + val buf = mutable.ListBuffer[Tree]() + foreachSubTree { tree => if (f(tree)) buf += tree } + buf.toList + } + + /** Set this tree as the `defTree` of its symbol and return this tree */ + def setDefTree(using Context): ThisTree = { + val sym = tree.symbol + if (sym.exists) sym.defTree = tree + tree + } + + def etaExpandCFT(using Context): Tree = + def expand(target: Tree, tp: Type)(using Context): Tree = tp match + case defn.ContextFunctionType(argTypes, resType, isErased) => + val anonFun = newAnonFun( + ctx.owner, + MethodType.companion(isContextual = true, isErased = isErased)(argTypes, resType), + coord = ctx.owner.coord) + def lambdaBody(refss: List[List[Tree]]) = + expand(target.select(nme.apply).appliedToArgss(refss), resType)( + using ctx.withOwner(anonFun)) + Closure(anonFun, lambdaBody) + case _ => + target + expand(tree, tree.tpe.widen) + } + + inline val MapRecursionLimit = 10 + + extension (trees: List[Tree]) + + /** A map that expands to a recursive function. It's equivalent to + * + * flatten(trees.mapConserve(op)) + * + * and falls back to it after `MaxRecursionLimit` recursions. + * Before that it uses a simpler method that uses stackspace + * instead of heap. + * Note `op` is duplicated in the generated code, so it should be + * kept small. + */ + inline def mapInline(inline op: Tree => Tree): List[Tree] = + def recur(trees: List[Tree], count: Int): List[Tree] = + if count > MapRecursionLimit then + // use a slower implementation that avoids stack overflows + flatten(trees.mapConserve(op)) + else trees match + case tree :: rest => + val tree1 = op(tree) + val rest1 = recur(rest, count + 1) + if (tree1 eq tree) && (rest1 eq rest) then trees + else tree1 match + case Thicket(elems1) => elems1 ::: rest1 + case _ => tree1 :: rest1 + case nil => nil + recur(trees, 0) + + /** Transform statements while maintaining import contexts and expression contexts + * in the same way as Typer does. The code addresses additional concerns: + * - be tail-recursive where possible + * - don't re-allocate trees where nothing has changed + */ + inline def mapStatements[T]( + exprOwner: Symbol, + inline op: Tree => Context ?=> Tree, + inline wrapResult: List[Tree] => Context ?=> T)(using Context): T = + @tailrec + def loop(mapped: mutable.ListBuffer[Tree] | Null, unchanged: List[Tree], pending: List[Tree])(using Context): T = + pending match + case stat :: rest => + val statCtx = stat match + case _: DefTree | _: ImportOrExport => ctx + case _ => ctx.exprContext(stat, exprOwner) + val stat1 = op(stat)(using statCtx) + val restCtx = stat match + case stat: Import => ctx.importContext(stat, stat.symbol) + case _ => ctx + if stat1 eq stat then + loop(mapped, unchanged, rest)(using restCtx) + else + val buf = if mapped == null then new mutable.ListBuffer[Tree] else mapped + var xc = unchanged + while xc ne pending do + buf += xc.head + xc = xc.tail + stat1 match + case Thicket(stats1) => buf ++= stats1 + case _ => buf += stat1 + loop(buf, rest, rest)(using restCtx) + case nil => + wrapResult( + if mapped == null then unchanged + else mapped.prependToList(unchanged)) + + loop(null, trees, trees) + end mapStatements + end extension + + /** A treemap that generates the same contexts as the original typer for statements. + * This means: + * - statements that are not definitions get the exprOwner as owner + * - imports are reflected in the contexts of subsequent statements + */ + class TreeMapWithPreciseStatContexts(cpy: TreeCopier = tpd.cpy) extends TreeMap(cpy): + def transformStats[T](trees: List[Tree], exprOwner: Symbol, wrapResult: List[Tree] => Context ?=> T)(using Context): T = + trees.mapStatements(exprOwner, transform(_), wrapResult) + final override def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = + transformStats(trees, exprOwner, sameStats) + override def transformBlock(blk: Block)(using Context) = + transformStats(blk.stats, ctx.owner, + stats1 => ctx ?=> cpy.Block(blk)(stats1, transform(blk.expr))) + + val sameStats: List[Tree] => Context ?=> List[Tree] = stats => stats + + /** Map Inlined nodes, NamedArgs, Blocks with no statements and local references to underlying arguments. + * Also drops Inline and Block with no statements. + */ + private class MapToUnderlying extends TreeMap { + override def transform(tree: Tree)(using Context): Tree = tree match { + case tree: Ident if isBinding(tree.symbol) && skipLocal(tree.symbol) => + tree.symbol.defTree match { + case defTree: ValOrDefDef => + val rhs = defTree.rhs + assert(!rhs.isEmpty) + transform(rhs) + case _ => tree + } + case Inlined(_, Nil, arg) => transform(arg) + case Block(Nil, arg) => transform(arg) + case NamedArg(_, arg) => transform(arg) + case tree => super.transform(tree) + } + + /** Should get the rhs of this binding */ + protected def skipLocal(sym: Symbol): Boolean = true + + /** Is this a symbol that of a local val or parameterless def for which we could get the rhs */ + private def isBinding(sym: Symbol)(using Context): Boolean = + sym.isTerm && !sym.is(Param) && !sym.owner.isClass && + !(sym.is(Method) && sym.info.isInstanceOf[MethodOrPoly]) // if is a method it is parameterless + } + + extension (xs: List[tpd.Tree]) + def tpes: List[Type] = xs match { + case x :: xs1 => x.tpe :: xs1.tpes + case nil => Nil + } + + /** A trait for loaders that compute trees. Currently implemented just by DottyUnpickler. */ + trait TreeProvider { + protected def computeRootTrees(using Context): List[Tree] + + private var myTrees: List[Tree] | Null = _ + + /** Get trees defined by this provider. Cache them if -Yretain-trees is set. */ + def rootTrees(using Context): List[Tree] = + if (ctx.settings.YretainTrees.value) { + if (myTrees == null) myTrees = computeRootTrees + myTrees.uncheckedNN + } + else computeRootTrees + + /** Get first tree defined by this provider, or EmptyTree if none exists */ + def tree(using Context): Tree = + rootTrees.headOption.getOrElse(EmptyTree) + + /** Is it possible that the tree to load contains a definition of or reference to `id`? */ + def mightContain(id: String)(using Context): Boolean = true + } + + // convert a numeric with a toXXX method + def primitiveConversion(tree: Tree, numericCls: Symbol)(using Context): Tree = { + val mname = "to".concat(numericCls.name) + val conversion = tree.tpe member(mname) + if (conversion.symbol.exists) + tree.select(conversion.symbol.termRef).ensureApplied + else if (tree.tpe.widen isRef numericCls) + tree + else { + report.warning(i"conversion from ${tree.tpe.widen} to ${numericCls.typeRef} will always fail at runtime.") + Throw(New(defn.ClassCastExceptionClass.typeRef, Nil)).withSpan(tree.span) + } + } + + /** A tree that corresponds to `Predef.classOf[$tp]` in source */ + def clsOf(tp: Type)(using Context): Tree = + if ctx.erasedTypes && !tp.isRef(defn.UnitClass) then + Literal(Constant(TypeErasure.erasure(tp))) + else + Literal(Constant(tp)) + + @tailrec + def sameTypes(trees: List[tpd.Tree], trees1: List[tpd.Tree]): Boolean = + if (trees.isEmpty) trees.isEmpty + else if (trees1.isEmpty) trees.isEmpty + else (trees.head.tpe eq trees1.head.tpe) && sameTypes(trees.tail, trees1.tail) + + /** If `tree`'s purity level is less than `level`, let-bind it so that it gets evaluated + * only once. I.e. produce a + * + * { val x = 'tree ; ~within('x) } + * + * instead of otherwise + * + * ~within('tree) + */ + def letBindUnless(level: TreeInfo.PurityLevel, tree: Tree)(within: Tree => Tree)(using Context): Tree = + if (exprPurity(tree) >= level) within(tree) + else { + val vdef = SyntheticValDef(TempResultName.fresh(), tree) + Block(vdef :: Nil, within(Ident(vdef.namedType))) + } + + /** Let bind `tree` unless `tree` is at least idempotent */ + def evalOnce(tree: Tree)(within: Tree => Tree)(using Context): Tree = + letBindUnless(TreeInfo.Idempotent, tree)(within) + + def runtimeCall(name: TermName, args: List[Tree])(using Context): Tree = + Ident(defn.ScalaRuntimeModule.requiredMethod(name).termRef).appliedToTermArgs(args) + + /** An extractor that pulls out type arguments */ + object MaybePoly: + def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match + case TypeApply(tree, targs) => Some(tree, targs) + case _ => Some(tree, Nil) + + object TypeArgs: + def unapply(ts: List[Tree]): Option[List[Tree]] = + if ts.nonEmpty && ts.head.isType then Some(ts) else None + + /** Split argument clauses into a leading type argument clause if it exists and + * remaining clauses + */ + def splitArgs(argss: List[List[Tree]]): (List[Tree], List[List[Tree]]) = argss match + case TypeArgs(targs) :: argss1 => (targs, argss1) + case _ => (Nil, argss) + + def joinArgs(targs: List[Tree], argss: List[List[Tree]]): List[List[Tree]] = + if targs.isEmpty then argss else targs :: argss + + /** A key to be used in a context property that tracks enclosing inlined calls */ + private val InlinedCalls = Property.Key[List[Tree]]() + + /** A key to be used in a context property that tracks the number of inlined trees */ + private val InlinedTrees = Property.Key[Counter]() + final class Counter { + var count: Int = 0 + } + + /** Record an enclosing inlined call. + * EmptyTree calls (for parameters) cancel the next-enclosing call in the list instead of being added to it. + * We assume parameters are never nested inside parameters. + */ + override def inlineContext(call: Tree)(using Context): Context = { + // We assume enclosingInlineds is already normalized, and only process the new call with the head. + val oldIC = enclosingInlineds + + val newIC = + if call.isEmpty then + oldIC match + case t1 :: ts2 => ts2 + case _ => oldIC + else + call :: oldIC + + val ctx1 = ctx.fresh.setProperty(InlinedCalls, newIC) + if oldIC.isEmpty then ctx1.setProperty(InlinedTrees, new Counter) else ctx1 + } + + /** All enclosing calls that are currently inlined, from innermost to outermost. + */ + def enclosingInlineds(using Context): List[Tree] = + ctx.property(InlinedCalls).getOrElse(Nil) + + /** Record inlined trees */ + def addInlinedTrees(n: Int)(using Context): Unit = + ctx.property(InlinedTrees).foreach(_.count += n) + + /** Check if the limit on the number of inlined trees has been reached */ + def reachedInlinedTreesLimit(using Context): Boolean = + ctx.property(InlinedTrees) match + case Some(c) => c.count > ctx.settings.XmaxInlinedTrees.value + case None => false + + /** The source file where the symbol of the `inline` method referred to by `call` + * is defined + */ + def sourceFile(call: Tree)(using Context): SourceFile = call.symbol.source + + /** Desugar identifier into a select node. Return the tree itself if not possible */ + def desugarIdent(tree: Ident)(using Context): RefTree = { + val qual = desugarIdentPrefix(tree) + if (qual.isEmpty) tree + else qual.select(tree.symbol) + } + + /** Recover identifier prefix (e.g. this) if it exists */ + def desugarIdentPrefix(tree: Ident)(using Context): Tree = tree.tpe match { + case TermRef(prefix: TermRef, _) => + prefix.info match + case mt: MethodType if mt.paramInfos.isEmpty && mt.resultType.typeSymbol.is(Module) => + ref(mt.resultType.typeSymbol.sourceModule) + case _ => + ref(prefix) + case TermRef(prefix: ThisType, _) => + This(prefix.cls) + case _ => + EmptyTree + } + + /** + * The symbols that are imported with `expr.name` + * + * @param expr The base of the import statement + * @param name The name that is being imported. + * @return All the symbols that would be imported with `expr.name`. + */ + def importedSymbols(expr: Tree, name: Name)(using Context): List[Symbol] = { + def lookup(name: Name): Symbol = expr.tpe.member(name).symbol + val symbols = + List(lookup(name.toTermName), + lookup(name.toTypeName), + lookup(name.moduleClassName), + lookup(name.sourceModuleName)) + + symbols.map(_.sourceSymbol).filter(_.exists).distinct + } + + /** + * All the symbols that are imported by the first selector of `imp` that matches + * `selectorPredicate`. + * + * @param imp The import statement to analyze + * @param selectorPredicate A test to find the selector to use. + * @return The symbols imported. + */ + def importedSymbols(imp: Import, + selectorPredicate: untpd.ImportSelector -> Boolean = util.common.alwaysTrue) + (using Context): List[Symbol] = + imp.selectors.find(selectorPredicate) match + case Some(sel) => importedSymbols(imp.expr, sel.name) + case _ => Nil + + /** + * The list of select trees that resolve to the same symbols as the ones that are imported + * by `imp`. + */ + def importSelections(imp: Import)(using Context): List[Select] = { + def imported(sym: Symbol, id: untpd.Ident, rename: Option[untpd.Ident]): List[Select] = { + // Give a zero-extent position to the qualifier to prevent it from being included several + // times in results in the language server. + val noPosExpr = focusPositions(imp.expr) + val selectTree = Select(noPosExpr, sym.name).withSpan(id.span) + rename match { + case None => + selectTree :: Nil + case Some(rename) => + // Get the type of the symbol that is actually selected, and construct a select + // node with the new name and the type of the real symbol. + val name = if (sym.name.isTypeName) rename.name.toTypeName else rename.name + val actual = Select(noPosExpr, sym.name) + val renameTree = Select(noPosExpr, name).withSpan(rename.span).withType(actual.tpe) + selectTree :: renameTree :: Nil + } + } + + imp.selectors.flatMap { sel => + if sel.isWildcard then Nil + else + val renamedOpt = sel.renamed match + case renamed: untpd.Ident => Some(renamed) + case untpd.EmptyTree => None + importedSymbols(imp.expr, sel.name).flatMap { sym => + imported(sym, sel.imported, renamedOpt) + } + } + } + + /** Creates the tuple type tree repesentation of the type trees in `ts` */ + def tupleTypeTree(elems: List[Tree])(using Context): Tree = { + val arity = elems.length + if arity <= Definitions.MaxTupleArity then + val tupleTp = defn.TupleType(arity) + if tupleTp != null then + AppliedTypeTree(TypeTree(tupleTp), elems) + else nestedPairsTypeTree(elems) + else nestedPairsTypeTree(elems) + } + + /** Creates the nested pairs type tree repesentation of the type trees in `ts` */ + def nestedPairsTypeTree(ts: List[Tree])(using Context): Tree = + ts.foldRight[Tree](TypeTree(defn.EmptyTupleModule.termRef))((x, acc) => AppliedTypeTree(TypeTree(defn.PairClass.typeRef), x :: acc :: Nil)) + + /** Replaces all positions in `tree` with zero-extent positions */ + private def focusPositions(tree: Tree)(using Context): Tree = { + val transformer = new tpd.TreeMap { + override def transform(tree: Tree)(using Context): Tree = + super.transform(tree).withSpan(tree.span.focus) + } + transformer.transform(tree) + } + + /** Convert a list of trees to a vararg-compatible tree. + * Used to make arguments for methods that accept varargs. + */ + def repeated(trees: List[Tree], tpt: Tree)(using Context): Tree = + ctx.typeAssigner.arrayToRepeated(JavaSeqLiteral(trees, tpt)) + + /** Create a tree representing a list containing all + * the elements of the argument list. A "list of tree to + * tree of list" conversion. + * + * @param trees the elements the list represented by + * the resulting tree should contain. + * @param tpe the type of the elements of the resulting list. + * + */ + def mkList(trees: List[Tree], tpt: Tree)(using Context): Tree = + ref(defn.ListModule).select(nme.apply) + .appliedToTypeTree(tpt) + .appliedToVarargs(trees, tpt) + + + protected def FunProto(args: List[Tree], resType: Type)(using Context) = + ProtoTypes.FunProtoTyped(args, resType)(ctx.typer, ApplyKind.Regular) +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala new file mode 100644 index 000000000000..eb729d33a091 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala @@ -0,0 +1,819 @@ +package dotty.tools +package dotc +package ast + +import core._ +import Types._, Contexts._, Constants._, Names._, Flags._ +import dotty.tools.dotc.typer.ProtoTypes +import Symbols._, StdNames._, Trees._ +import util.{Property, SourceFile, NoSource} +import util.Spans.Span +import annotation.constructorOnly +import annotation.internal.sharable +import Decorators._ +import annotation.retains +import language.experimental.pureFunctions + +object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { + + // ----- Tree cases that exist in untyped form only ------------------ + + abstract class OpTree(implicit @constructorOnly src: SourceFile) extends Tree { + def op: Ident + override def isTerm: Boolean = op.isTerm + override def isType: Boolean = op.isType + } + + /** A typed subtree of an untyped tree needs to be wrapped in a TypedSplice + * @param owner The current owner at the time the tree was defined + * @param isExtensionReceiver The splice was created from the receiver `e` in an extension + * method call `e.f(...)` + */ + abstract case class TypedSplice(splice: tpd.Tree)(val owner: Symbol, val isExtensionReceiver: Boolean)(implicit @constructorOnly src: SourceFile) extends ProxyTree { + def forwardTo: tpd.Tree = splice + override def toString = + def ext = if isExtensionReceiver then ", isExtensionReceiver = true" else "" + s"TypedSplice($splice$ext)" + } + + object TypedSplice { + def apply(tree: tpd.Tree, isExtensionReceiver: Boolean = false)(using Context): TypedSplice = + new TypedSplice(tree)(ctx.owner, isExtensionReceiver) {} + } + + /** mods object name impl */ + case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile) + extends MemberDef { + type ThisTree[-T >: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef + def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl) + } + + /** An untyped template with a derives clause. Derived parents are added to the end + * of the `parents` list. `derivedCount` keeps track of how many there are. + * This representation was chosen because it balances two concerns: + * - maximize overlap between DerivingTemplate and Template for code streamlining + * - keep invariant that elements of untyped trees align with source positions + */ + class DerivingTemplate(constr: DefDef, parentsOrDerived: List[Tree], self: ValDef, preBody: LazyTreeList, derivedCount: Int)(implicit @constructorOnly src: SourceFile) + extends Template(constr, parentsOrDerived, self, preBody) { + override val parents = parentsOrDerived.dropRight(derivedCount) + override val derived = parentsOrDerived.takeRight(derivedCount) + } + + case class ParsedTry(expr: Tree, handler: Tree, finalizer: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + + case class SymbolLit(str: String)(implicit @constructorOnly src: SourceFile) extends TermTree + + /** An interpolated string + * @param segments a list of two element tickets consisting of string literal and argument tree, + * possibly with a simple string literal as last element of the list + */ + case class InterpolatedString(id: TermName, segments: List[Tree])(implicit @constructorOnly src: SourceFile) + extends TermTree + + /** A function type or closure */ + case class Function(args: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { + override def isTerm: Boolean = body.isTerm + override def isType: Boolean = body.isType + } + + /** A function type or closure with `implicit`, `erased`, or `given` modifiers */ + class FunctionWithMods(args: List[Tree], body: Tree, val mods: Modifiers)(implicit @constructorOnly src: SourceFile) + extends Function(args, body) + + /** A polymorphic function type */ + case class PolyFunction(targs: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { + override def isTerm = body.isTerm + override def isType = body.isType + } + + /** A function created from a wildcard expression + * @param placeholderParams a list of definitions of synthetic parameters. + * @param body the function body where wildcards are replaced by + * references to synthetic parameters. + * This is equivalent to Function, except that forms a special case for the overlapping + * positions tests. + */ + class WildcardFunction(placeholderParams: List[ValDef], body: Tree)(implicit @constructorOnly src: SourceFile) + extends Function(placeholderParams, body) + + case class InfixOp(left: Tree, op: Ident, right: Tree)(implicit @constructorOnly src: SourceFile) extends OpTree + case class PostfixOp(od: Tree, op: Ident)(implicit @constructorOnly src: SourceFile) extends OpTree + case class PrefixOp(op: Ident, od: Tree)(implicit @constructorOnly src: SourceFile) extends OpTree + case class Parens(t: Tree)(implicit @constructorOnly src: SourceFile) extends ProxyTree { + def forwardTo: Tree = t + } + case class Tuple(trees: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree { + override def isTerm: Boolean = trees.isEmpty || trees.head.isTerm + override def isType: Boolean = !isTerm + } + case class Throw(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + case class Quote(quoted: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + case class Splice(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree { + def isInBraces: Boolean = span.end != expr.span.end + } + case class ForYield(enums: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + case class ForDo(enums: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + case class GenFrom(pat: Tree, expr: Tree, checkMode: GenCheckMode)(implicit @constructorOnly src: SourceFile) extends Tree + case class GenAlias(pat: Tree, expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree + case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree + case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree + case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree + case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree + + case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { + // TODO: Make bound a typed tree? + + /** It's a `given` selector */ + val isGiven: Boolean = imported.name.isEmpty + + /** It's a `given` or `_` selector */ + val isWildcard: Boolean = isGiven || imported.name == nme.WILDCARD + + /** The imported name, EmptyTermName if it's a given selector */ + val name: TermName = imported.name.asInstanceOf[TermName] + + /** The renamed part (which might be `_`), if present, or `name`, if missing */ + val rename: TermName = renamed match + case Ident(rename: TermName) => rename + case _ => name + } + + case class Number(digits: String, kind: NumberKind)(implicit @constructorOnly src: SourceFile) extends TermTree + + enum NumberKind { + case Whole(radix: Int) + case Decimal + case Floating + } + + /** {x1, ..., xN} T (only relevant under captureChecking) */ + case class CapturingTypeTree(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree + + /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ + case class DependentTypeTree(tp: List[Symbol] -> Type)(implicit @constructorOnly src: SourceFile) extends Tree + + @sharable object EmptyTypeIdent extends Ident(tpnme.EMPTY)(NoSource) with WithoutTypeOrPos[Untyped] { + override def isEmpty: Boolean = true + } + + def WildcardTypeBoundsTree()(using src: SourceFile): TypeBoundsTree = TypeBoundsTree(EmptyTree, EmptyTree, EmptyTree) + object WildcardTypeBoundsTree: + def unapply(tree: untpd.Tree): Boolean = tree match + case TypeBoundsTree(EmptyTree, EmptyTree, _) => true + case _ => false + + + /** A block generated by the XML parser, only treated specially by + * `Positioned#checkPos` */ + class XMLBlock(stats: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends Block(stats, expr) + + /** An enum to control checking or filtering of patterns in GenFrom trees */ + enum GenCheckMode { + case Ignore // neither filter nor check since filtering was done before + case Check // check that pattern is irrefutable + case CheckAndFilter // both check and filter (transitional period starting with 3.2) + case FilterNow // filter out non-matching elements if we are not in 3.2 or later + case FilterAlways // filter out non-matching elements since pattern is prefixed by `case` + } + + // ----- Modifiers ----------------------------------------------------- + /** Mod is intended to record syntactic information about modifiers, it's + * NOT a replacement of FlagSet. + * + * For any query about semantic information, check `flags` instead. + */ + sealed abstract class Mod(val flags: FlagSet)(implicit @constructorOnly src: SourceFile) + extends Positioned + + object Mod { + case class Private()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Private) + + case class Protected()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Protected) + + case class Var()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Mutable) + + case class Implicit()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Implicit) + + case class Given()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Given) + + case class Erased()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Erased) + + case class Final()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Final) + + case class Sealed()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Sealed) + + case class Opaque()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Opaque) + + case class Open()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Open) + + case class Override()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Override) + + case class Abstract()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Abstract) + + case class Lazy()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Lazy) + + case class Inline()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Inline) + + case class Transparent()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Transparent) + + case class Infix()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Infix) + + /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ + case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) + } + + /** Modifiers and annotations for definitions + * + * @param flags The set flags + * @param privateWithin If a private or protected has is followed by a + * qualifier [q], the name q, "" as a typename otherwise. + * @param annotations The annotations preceding the modifiers + */ + case class Modifiers ( + flags: FlagSet = EmptyFlags, + privateWithin: TypeName = tpnme.EMPTY, + annotations: List[Tree] = Nil, + mods: List[Mod] = Nil) { + + def is(flag: Flag): Boolean = flags.is(flag) + def is(flag: Flag, butNot: FlagSet): Boolean = flags.is(flag, butNot = butNot) + def isOneOf(fs: FlagSet): Boolean = flags.isOneOf(fs) + def isOneOf(fs: FlagSet, butNot: FlagSet): Boolean = flags.isOneOf(fs, butNot = butNot) + def isAllOf(fc: FlagSet): Boolean = flags.isAllOf(fc) + + def | (fs: FlagSet): Modifiers = withFlags(flags | fs) + def & (fs: FlagSet): Modifiers = withFlags(flags & fs) + def &~(fs: FlagSet): Modifiers = withFlags(flags &~ fs) + + def toTypeFlags: Modifiers = withFlags(flags.toTypeFlags) + def toTermFlags: Modifiers = withFlags(flags.toTermFlags) + + def withFlags(flags: FlagSet): Modifiers = + if (this.flags == flags) this + else copy(flags = flags) + + def withoutFlags(flags: FlagSet): Modifiers = + if (this.isOneOf(flags)) + Modifiers(this.flags &~ flags, this.privateWithin, this.annotations, this.mods.filterNot(_.flags.isOneOf(flags))) + else this + + def withAddedMod(mod: Mod): Modifiers = + if (mods.exists(_ eq mod)) this + else withMods(mods :+ mod) + + private def compatible(flags1: FlagSet, flags2: FlagSet): Boolean = + flags1.isEmpty || flags2.isEmpty + || flags1.isTermFlags && flags2.isTermFlags + || flags1.isTypeFlags && flags2.isTypeFlags + + /** Add `flags` to thos modifier set, checking that there are no type/term conflicts. + * If there are conflicts, issue an error and return the modifiers consisting of + * the added flags only. The reason to do it this way is that the added flags usually + * describe the core of a construct whereas the existing set are the modifiers + * given in the source. + */ + def withAddedFlags(flags: FlagSet, span: Span)(using Context): Modifiers = + if this.flags.isAllOf(flags) then this + else if compatible(this.flags, flags) then this | flags + else + val what = if flags.isTermFlags then "values" else "types" + report.error(em"${(flags & ModifierFlags).flagsString} $what cannot be ${this.flags.flagsString}", ctx.source.atSpan(span)) + Modifiers(flags) + + /** Modifiers with given list of Mods. It is checked that + * all modifiers are already accounted for in `flags` and `privateWithin`. + */ + def withMods(ms: List[Mod]): Modifiers = + if (mods eq ms) this + else { + if (ms.nonEmpty) + for (m <- ms) + assert(flags.isAllOf(m.flags) + || m.isInstanceOf[Mod.Private] && !privateWithin.isEmpty + || (m.isInstanceOf[Mod.Abstract] || m.isInstanceOf[Mod.Override]) && flags.is(AbsOverride), + s"unaccounted modifier: $m in $this with flags ${flags.flagsString} when adding $ms") + copy(mods = ms) + } + + def withAddedAnnotation(annot: Tree): Modifiers = + if (annotations.exists(_ eq annot)) this + else withAnnotations(annotations :+ annot) + + def withAnnotations(annots: List[Tree]): Modifiers = + if (annots eq annotations) this + else copy(annotations = annots) + + def withPrivateWithin(pw: TypeName): Modifiers = + if (pw.isEmpty) this + else copy(privateWithin = pw) + + def hasFlags: Boolean = flags != EmptyFlags + def hasAnnotations: Boolean = annotations.nonEmpty + def hasPrivateWithin: Boolean = privateWithin != tpnme.EMPTY + def hasMod(cls: Class[?]) = mods.exists(_.getClass == cls) + + private def isEnum = is(Enum, butNot = JavaDefined) + + def isEnumCase: Boolean = isEnum && is(Case) + def isEnumClass: Boolean = isEnum && !is(Case) + } + + @sharable val EmptyModifiers: Modifiers = Modifiers() + + // ----- TypeTrees that refer to other tree's symbols ------------------- + + /** A type tree that gets its type from some other tree's symbol. Enters the + * type tree in the References attachment of the `from` tree as a side effect. + */ + abstract class DerivedTypeTree(implicit @constructorOnly src: SourceFile) extends TypeTree { + + private var myWatched: Tree = EmptyTree + + /** The watched tree; used only for printing */ + def watched: Tree = myWatched + + /** Install the derived type tree as a dependency on `original` */ + def watching(original: DefTree): this.type = { + myWatched = original + val existing = original.attachmentOrElse(References, Nil) + original.putAttachment(References, this :: existing) + this + } + + /** Install the derived type tree as a dependency on `sym` */ + def watching(sym: Symbol): this.type = withAttachment(OriginalSymbol, sym) + + /** A hook to ensure that all necessary symbols are completed so that + * OriginalSymbol attachments are propagated to this tree + */ + def ensureCompletions(using Context): Unit = () + + /** The method that computes the tree with the derived type */ + def derivedTree(originalSym: Symbol)(using Context): tpd.Tree + } + + /** Property key containing TypeTrees whose type is computed + * from the symbol in this type. These type trees have marker trees + * TypeRefOfSym or InfoOfSym as their originals. + */ + val References: Property.Key[List[DerivedTypeTree]] = Property.Key() + + /** Property key for TypeTrees marked with TypeRefOfSym or InfoOfSym + * which contains the symbol of the original tree from which this + * TypeTree is derived. + */ + val OriginalSymbol: Property.Key[Symbol] = Property.Key() + + /** Property key for contextual Apply trees of the form `fn given arg` */ + val KindOfApply: Property.StickyKey[ApplyKind] = Property.StickyKey() + + // ------ Creation methods for untyped only ----------------- + + def Ident(name: Name)(implicit src: SourceFile): Ident = new Ident(name) + def SearchFailureIdent(name: Name, explanation: -> String)(implicit src: SourceFile): SearchFailureIdent = new SearchFailureIdent(name, explanation) + def Select(qualifier: Tree, name: Name)(implicit src: SourceFile): Select = new Select(qualifier, name) + def SelectWithSig(qualifier: Tree, name: Name, sig: Signature)(implicit src: SourceFile): Select = new SelectWithSig(qualifier, name, sig) + def This(qual: Ident)(implicit src: SourceFile): This = new This(qual) + def Super(qual: Tree, mix: Ident)(implicit src: SourceFile): Super = new Super(qual, mix) + def Apply(fun: Tree, args: List[Tree])(implicit src: SourceFile): Apply = new Apply(fun, args) + def TypeApply(fun: Tree, args: List[Tree])(implicit src: SourceFile): TypeApply = new TypeApply(fun, args) + def Literal(const: Constant)(implicit src: SourceFile): Literal = new Literal(const) + def New(tpt: Tree)(implicit src: SourceFile): New = new New(tpt) + def Typed(expr: Tree, tpt: Tree)(implicit src: SourceFile): Typed = new Typed(expr, tpt) + def NamedArg(name: Name, arg: Tree)(implicit src: SourceFile): NamedArg = new NamedArg(name, arg) + def Assign(lhs: Tree, rhs: Tree)(implicit src: SourceFile): Assign = new Assign(lhs, rhs) + def Block(stats: List[Tree], expr: Tree)(implicit src: SourceFile): Block = new Block(stats, expr) + def If(cond: Tree, thenp: Tree, elsep: Tree)(implicit src: SourceFile): If = new If(cond, thenp, elsep) + def InlineIf(cond: Tree, thenp: Tree, elsep: Tree)(implicit src: SourceFile): If = new InlineIf(cond, thenp, elsep) + def Closure(env: List[Tree], meth: Tree, tpt: Tree)(implicit src: SourceFile): Closure = new Closure(env, meth, tpt) + def Match(selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): Match = new Match(selector, cases) + def InlineMatch(selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): Match = new InlineMatch(selector, cases) + def CaseDef(pat: Tree, guard: Tree, body: Tree)(implicit src: SourceFile): CaseDef = new CaseDef(pat, guard, body) + def Labeled(bind: Bind, expr: Tree)(implicit src: SourceFile): Labeled = new Labeled(bind, expr) + def Return(expr: Tree, from: Tree)(implicit src: SourceFile): Return = new Return(expr, from) + def WhileDo(cond: Tree, body: Tree)(implicit src: SourceFile): WhileDo = new WhileDo(cond, body) + def Try(expr: Tree, cases: List[CaseDef], finalizer: Tree)(implicit src: SourceFile): Try = new Try(expr, cases, finalizer) + def SeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): SeqLiteral = new SeqLiteral(elems, elemtpt) + def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): JavaSeqLiteral = new JavaSeqLiteral(elems, elemtpt) + def Inlined(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(implicit src: SourceFile): Inlined = new Inlined(call, bindings, expansion) + def TypeTree()(implicit src: SourceFile): TypeTree = new TypeTree() + def InferredTypeTree()(implicit src: SourceFile): TypeTree = new InferredTypeTree() + def SingletonTypeTree(ref: Tree)(implicit src: SourceFile): SingletonTypeTree = new SingletonTypeTree(ref) + def RefinedTypeTree(tpt: Tree, refinements: List[Tree])(implicit src: SourceFile): RefinedTypeTree = new RefinedTypeTree(tpt, refinements) + def AppliedTypeTree(tpt: Tree, args: List[Tree])(implicit src: SourceFile): AppliedTypeTree = new AppliedTypeTree(tpt, args) + def LambdaTypeTree(tparams: List[TypeDef], body: Tree)(implicit src: SourceFile): LambdaTypeTree = new LambdaTypeTree(tparams, body) + def TermLambdaTypeTree(params: List[ValDef], body: Tree)(implicit src: SourceFile): TermLambdaTypeTree = new TermLambdaTypeTree(params, body) + def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): MatchTypeTree = new MatchTypeTree(bound, selector, cases) + def ByNameTypeTree(result: Tree)(implicit src: SourceFile): ByNameTypeTree = new ByNameTypeTree(result) + def TypeBoundsTree(lo: Tree, hi: Tree, alias: Tree = EmptyTree)(implicit src: SourceFile): TypeBoundsTree = new TypeBoundsTree(lo, hi, alias) + def Bind(name: Name, body: Tree)(implicit src: SourceFile): Bind = new Bind(name, body) + def Alternative(trees: List[Tree])(implicit src: SourceFile): Alternative = new Alternative(trees) + def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree])(implicit src: SourceFile): UnApply = new UnApply(fun, implicits, patterns) + def ValDef(name: TermName, tpt: Tree, rhs: LazyTree)(implicit src: SourceFile): ValDef = new ValDef(name, tpt, rhs) + def DefDef(name: TermName, paramss: List[ParamClause], tpt: Tree, rhs: LazyTree)(implicit src: SourceFile): DefDef = new DefDef(name, paramss, tpt, rhs) + def TypeDef(name: TypeName, rhs: Tree)(implicit src: SourceFile): TypeDef = new TypeDef(name, rhs) + def Template(constr: DefDef, parents: List[Tree], derived: List[Tree], self: ValDef, body: LazyTreeList)(implicit src: SourceFile): Template = + if (derived.isEmpty) new Template(constr, parents, self, body) + else new DerivingTemplate(constr, parents ++ derived, self, body, derived.length) + def Import(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Import = new Import(expr, selectors) + def Export(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Export = new Export(expr, selectors) + def PackageDef(pid: RefTree, stats: List[Tree])(implicit src: SourceFile): PackageDef = new PackageDef(pid, stats) + def Annotated(arg: Tree, annot: Tree)(implicit src: SourceFile): Annotated = new Annotated(arg, annot) + def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(implicit src: SourceFile): Hole = new Hole(isTermHole, idx, args, content, tpt) + + // ------ Additional creation methods for untyped only ----------------- + + /** new T(args1)...(args_n) + * ==> + * new T.[Ts](args1)...(args_n) + * + * where `Ts` are the class type arguments of `T` or its class type alias. + * Note: we also keep any type arguments as parts of `T`. This is necessary to allow + * navigation into these arguments from the IDE, and to do the right thing in + * PrepareInlineable. + */ + def New(tpt: Tree, argss: List[List[Tree]])(using Context): Tree = + ensureApplied(argss.foldLeft(makeNew(tpt))(Apply(_, _))) + + /** A new expression with constrictor and possibly type arguments. See + * `New(tpt, argss)` for details. + */ + def makeNew(tpt: Tree)(using Context): Tree = { + val (tycon, targs) = tpt match { + case AppliedTypeTree(tycon, targs) => + (tycon, targs) + case TypedSplice(tpt1: tpd.Tree) => + val argTypes = tpt1.tpe.dealias.argTypesLo + def wrap(tpe: Type) = TypeTree(tpe).withSpan(tpt.span) + (tpt, argTypes.map(wrap)) + case _ => + (tpt, Nil) + } + val nu: Tree = Select(New(tycon), nme.CONSTRUCTOR) + if (targs.nonEmpty) TypeApply(nu, targs) else nu + } + + def Block(stat: Tree, expr: Tree)(implicit src: SourceFile): Block = + Block(stat :: Nil, expr) + + def Apply(fn: Tree, arg: Tree)(implicit src: SourceFile): Apply = + Apply(fn, arg :: Nil) + + def ensureApplied(tpt: Tree)(implicit src: SourceFile): Tree = tpt match { + case _: Apply => tpt + case _ => Apply(tpt, Nil) + } + + def AppliedTypeTree(tpt: Tree, arg: Tree)(implicit src: SourceFile): AppliedTypeTree = + AppliedTypeTree(tpt, arg :: Nil) + + def TypeTree(tpe: Type)(using Context): TypedSplice = + TypedSplice(TypeTree().withTypeUnchecked(tpe)) + + def InferredTypeTree(tpe: Type)(using Context): TypedSplice = + TypedSplice(new InferredTypeTree().withTypeUnchecked(tpe)) + + def unitLiteral(implicit src: SourceFile): Literal = Literal(Constant(())) + + def ref(tp: NamedType)(using Context): Tree = + TypedSplice(tpd.ref(tp)) + + def ref(sym: Symbol)(using Context): Tree = + TypedSplice(tpd.ref(sym)) + + def rawRef(tp: NamedType)(using Context): Tree = + if tp.typeParams.isEmpty then ref(tp) + else AppliedTypeTree(ref(tp), tp.typeParams.map(_ => WildcardTypeBoundsTree())) + + def rootDot(name: Name)(implicit src: SourceFile): Select = Select(Ident(nme.ROOTPKG), name) + def scalaDot(name: Name)(implicit src: SourceFile): Select = Select(rootDot(nme.scala), name) + def scalaAnnotationDot(name: Name)(using SourceFile): Select = Select(scalaDot(nme.annotation), name) + def scalaRuntimeDot(name: Name)(using SourceFile): Select = Select(scalaDot(nme.runtime), name) + def scalaUnit(implicit src: SourceFile): Select = scalaDot(tpnme.Unit) + def scalaAny(implicit src: SourceFile): Select = scalaDot(tpnme.Any) + def javaDotLangDot(name: Name)(implicit src: SourceFile): Select = Select(Select(Ident(nme.java), nme.lang), name) + + def captureRoot(using Context): Select = + Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) + + def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(using Context): DefDef = + DefDef(nme.CONSTRUCTOR, joinParams(tparams, vparamss), TypeTree(), rhs) + + def emptyConstructor(using Context): DefDef = + makeConstructor(Nil, Nil) + + def makeSelfDef(name: TermName, tpt: Tree)(using Context): ValDef = + ValDef(name, tpt, EmptyTree).withFlags(PrivateLocal) + + def makeTupleOrParens(ts: List[Tree])(using Context): Tree = ts match { + case t :: Nil => Parens(t) + case _ => Tuple(ts) + } + + def makeTuple(ts: List[Tree])(using Context): Tree = ts match { + case t :: Nil => t + case _ => Tuple(ts) + } + + def makeAndType(left: Tree, right: Tree)(using Context): AppliedTypeTree = + AppliedTypeTree(ref(defn.andType.typeRef), left :: right :: Nil) + + def makeParameter(pname: TermName, tpe: Tree, mods: Modifiers, isBackquoted: Boolean = false)(using Context): ValDef = { + val vdef = ValDef(pname, tpe, EmptyTree) + if (isBackquoted) vdef.pushAttachment(Backquoted, ()) + vdef.withMods(mods | Param) + } + + def makeSyntheticParameter(n: Int = 1, tpt: Tree | Null = null, flags: FlagSet = SyntheticTermParam)(using Context): ValDef = + ValDef(nme.syntheticParamName(n), if (tpt == null) TypeTree() else tpt, EmptyTree) + .withFlags(flags) + + def lambdaAbstract(params: List[ValDef] | List[TypeDef], tpt: Tree)(using Context): Tree = + params match + case Nil => tpt + case (vd: ValDef) :: _ => TermLambdaTypeTree(params.asInstanceOf[List[ValDef]], tpt) + case _ => LambdaTypeTree(params.asInstanceOf[List[TypeDef]], tpt) + + def lambdaAbstractAll(paramss: List[List[ValDef] | List[TypeDef]], tpt: Tree)(using Context): Tree = + paramss.foldRight(tpt)(lambdaAbstract) + + /** A reference to given definition. If definition is a repeated + * parameter, the reference will be a repeated argument. + */ + def refOfDef(tree: MemberDef)(using Context): Tree = tree match { + case ValDef(_, PostfixOp(_, Ident(tpnme.raw.STAR)), _) => repeated(Ident(tree.name)) + case _ => Ident(tree.name) + } + + /** A repeated argument such as `arg: _*` */ + def repeated(arg: Tree)(using Context): Typed = Typed(arg, Ident(tpnme.WILDCARD_STAR)) + + +// --------- Copier/Transformer/Accumulator classes for untyped trees ----- + + def localCtx(tree: Tree)(using Context): Context = ctx + + override val cpy: UntypedTreeCopier = UntypedTreeCopier() + + class UntypedTreeCopier extends TreeCopier { + + def postProcess(tree: Tree, copied: Tree): copied.ThisTree[Untyped] = + copied.asInstanceOf[copied.ThisTree[Untyped]] + + def postProcess(tree: Tree, copied: MemberDef): copied.ThisTree[Untyped] = { + tree match { + case tree: MemberDef => copied.withMods(tree.rawMods) + case _ => copied + } + }.asInstanceOf[copied.ThisTree[Untyped]] + + def ModuleDef(tree: Tree)(name: TermName, impl: Template)(using Context): ModuleDef = tree match { + case tree: ModuleDef if (name eq tree.name) && (impl eq tree.impl) => tree + case _ => finalize(tree, untpd.ModuleDef(name, impl)(tree.source)) + } + def ParsedTry(tree: Tree)(expr: Tree, handler: Tree, finalizer: Tree)(using Context): TermTree = tree match { + case tree: ParsedTry if (expr eq tree.expr) && (handler eq tree.handler) && (finalizer eq tree.finalizer) => tree + case _ => finalize(tree, untpd.ParsedTry(expr, handler, finalizer)(tree.source)) + } + def SymbolLit(tree: Tree)(str: String)(using Context): TermTree = tree match { + case tree: SymbolLit if str == tree.str => tree + case _ => finalize(tree, untpd.SymbolLit(str)(tree.source)) + } + def InterpolatedString(tree: Tree)(id: TermName, segments: List[Tree])(using Context): TermTree = tree match { + case tree: InterpolatedString if (id eq tree.id) && (segments eq tree.segments) => tree + case _ => finalize(tree, untpd.InterpolatedString(id, segments)(tree.source)) + } + def Function(tree: Tree)(args: List[Tree], body: Tree)(using Context): Tree = tree match { + case tree: Function if (args eq tree.args) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.Function(args, body)(tree.source)) + } + def PolyFunction(tree: Tree)(targs: List[Tree], body: Tree)(using Context): Tree = tree match { + case tree: PolyFunction if (targs eq tree.targs) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.PolyFunction(targs, body)(tree.source)) + } + def InfixOp(tree: Tree)(left: Tree, op: Ident, right: Tree)(using Context): Tree = tree match { + case tree: InfixOp if (left eq tree.left) && (op eq tree.op) && (right eq tree.right) => tree + case _ => finalize(tree, untpd.InfixOp(left, op, right)(tree.source)) + } + def PostfixOp(tree: Tree)(od: Tree, op: Ident)(using Context): Tree = tree match { + case tree: PostfixOp if (od eq tree.od) && (op eq tree.op) => tree + case _ => finalize(tree, untpd.PostfixOp(od, op)(tree.source)) + } + def PrefixOp(tree: Tree)(op: Ident, od: Tree)(using Context): Tree = tree match { + case tree: PrefixOp if (op eq tree.op) && (od eq tree.od) => tree + case _ => finalize(tree, untpd.PrefixOp(op, od)(tree.source)) + } + def Parens(tree: Tree)(t: Tree)(using Context): ProxyTree = tree match { + case tree: Parens if t eq tree.t => tree + case _ => finalize(tree, untpd.Parens(t)(tree.source)) + } + def Tuple(tree: Tree)(trees: List[Tree])(using Context): Tree = tree match { + case tree: Tuple if trees eq tree.trees => tree + case _ => finalize(tree, untpd.Tuple(trees)(tree.source)) + } + def Throw(tree: Tree)(expr: Tree)(using Context): TermTree = tree match { + case tree: Throw if expr eq tree.expr => tree + case _ => finalize(tree, untpd.Throw(expr)(tree.source)) + } + def Quote(tree: Tree)(quoted: Tree)(using Context): Tree = tree match { + case tree: Quote if quoted eq tree.quoted => tree + case _ => finalize(tree, untpd.Quote(quoted)(tree.source)) + } + def Splice(tree: Tree)(expr: Tree)(using Context): Tree = tree match { + case tree: Splice if expr eq tree.expr => tree + case _ => finalize(tree, untpd.Splice(expr)(tree.source)) + } + def ForYield(tree: Tree)(enums: List[Tree], expr: Tree)(using Context): TermTree = tree match { + case tree: ForYield if (enums eq tree.enums) && (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.ForYield(enums, expr)(tree.source)) + } + def ForDo(tree: Tree)(enums: List[Tree], body: Tree)(using Context): TermTree = tree match { + case tree: ForDo if (enums eq tree.enums) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.ForDo(enums, body)(tree.source)) + } + def GenFrom(tree: Tree)(pat: Tree, expr: Tree, checkMode: GenCheckMode)(using Context): Tree = tree match { + case tree: GenFrom if (pat eq tree.pat) && (expr eq tree.expr) && (checkMode == tree.checkMode) => tree + case _ => finalize(tree, untpd.GenFrom(pat, expr, checkMode)(tree.source)) + } + def GenAlias(tree: Tree)(pat: Tree, expr: Tree)(using Context): Tree = tree match { + case tree: GenAlias if (pat eq tree.pat) && (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.GenAlias(pat, expr)(tree.source)) + } + def ContextBounds(tree: Tree)(bounds: TypeBoundsTree, cxBounds: List[Tree])(using Context): TypTree = tree match { + case tree: ContextBounds if (bounds eq tree.bounds) && (cxBounds eq tree.cxBounds) => tree + case _ => finalize(tree, untpd.ContextBounds(bounds, cxBounds)(tree.source)) + } + def PatDef(tree: Tree)(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(using Context): Tree = tree match { + case tree: PatDef if (mods eq tree.mods) && (pats eq tree.pats) && (tpt eq tree.tpt) && (rhs eq tree.rhs) => tree + case _ => finalize(tree, untpd.PatDef(mods, pats, tpt, rhs)(tree.source)) + } + def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match + case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree + case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) + def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { + case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree + case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) + } + def Number(tree: Tree)(digits: String, kind: NumberKind)(using Context): Tree = tree match { + case tree: Number if (digits == tree.digits) && (kind == tree.kind) => tree + case _ => finalize(tree, untpd.Number(digits, kind)) + } + def CapturingTypeTree(tree: Tree)(refs: List[Tree], parent: Tree)(using Context): Tree = tree match + case tree: CapturingTypeTree if (refs eq tree.refs) && (parent eq tree.parent) => tree + case _ => finalize(tree, untpd.CapturingTypeTree(refs, parent)) + + def TypedSplice(tree: Tree)(splice: tpd.Tree)(using Context): ProxyTree = tree match { + case tree: TypedSplice if splice `eq` tree.splice => tree + case _ => finalize(tree, untpd.TypedSplice(splice)(using ctx)) + } + def MacroTree(tree: Tree)(expr: Tree)(using Context): Tree = tree match { + case tree: MacroTree if expr `eq` tree.expr => tree + case _ => finalize(tree, untpd.MacroTree(expr)(tree.source)) + } + } + + abstract class UntypedTreeMap(cpy: UntypedTreeCopier = untpd.cpy) extends TreeMap(cpy) { + override def transformMoreCases(tree: Tree)(using Context): Tree = tree match { + case ModuleDef(name, impl) => + cpy.ModuleDef(tree)(name, transformSub(impl)) + case tree: DerivingTemplate => + cpy.Template(tree)(transformSub(tree.constr), transform(tree.parents), + transform(tree.derived), transformSub(tree.self), transformStats(tree.body, tree.symbol)) + case ParsedTry(expr, handler, finalizer) => + cpy.ParsedTry(tree)(transform(expr), transform(handler), transform(finalizer)) + case SymbolLit(str) => + cpy.SymbolLit(tree)(str) + case InterpolatedString(id, segments) => + cpy.InterpolatedString(tree)(id, segments.mapConserve(transform)) + case Function(args, body) => + cpy.Function(tree)(transform(args), transform(body)) + case PolyFunction(targs, body) => + cpy.PolyFunction(tree)(transform(targs), transform(body)) + case InfixOp(left, op, right) => + cpy.InfixOp(tree)(transform(left), op, transform(right)) + case PostfixOp(od, op) => + cpy.PostfixOp(tree)(transform(od), op) + case PrefixOp(op, od) => + cpy.PrefixOp(tree)(op, transform(od)) + case Parens(t) => + cpy.Parens(tree)(transform(t)) + case Tuple(trees) => + cpy.Tuple(tree)(transform(trees)) + case Throw(expr) => + cpy.Throw(tree)(transform(expr)) + case Quote(t) => + cpy.Quote(tree)(transform(t)) + case Splice(expr) => + cpy.Splice(tree)(transform(expr)) + case ForYield(enums, expr) => + cpy.ForYield(tree)(transform(enums), transform(expr)) + case ForDo(enums, body) => + cpy.ForDo(tree)(transform(enums), transform(body)) + case GenFrom(pat, expr, checkMode) => + cpy.GenFrom(tree)(transform(pat), transform(expr), checkMode) + case GenAlias(pat, expr) => + cpy.GenAlias(tree)(transform(pat), transform(expr)) + case ContextBounds(bounds, cxBounds) => + cpy.ContextBounds(tree)(transformSub(bounds), transform(cxBounds)) + case PatDef(mods, pats, tpt, rhs) => + cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) + case ExtMethods(paramss, methods) => + cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) + case ImportSelector(imported, renamed, bound) => + cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) + case Number(_, _) | TypedSplice(_) => + tree + case MacroTree(expr) => + cpy.MacroTree(tree)(transform(expr)) + case CapturingTypeTree(refs, parent) => + cpy.CapturingTypeTree(tree)(transform(refs), transform(parent)) + case _ => + super.transformMoreCases(tree) + } + } + + abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] { + self: UntypedTreeAccumulator[X] @retains(caps.*) => + override def foldMoreCases(x: X, tree: Tree)(using Context): X = tree match { + case ModuleDef(name, impl) => + this(x, impl) + case tree: DerivingTemplate => + this(this(this(this(this(x, tree.constr), tree.parents), tree.derived), tree.self), tree.body) + case ParsedTry(expr, handler, finalizer) => + this(this(this(x, expr), handler), finalizer) + case SymbolLit(str) => + x + case InterpolatedString(id, segments) => + this(x, segments) + case Function(args, body) => + this(this(x, args), body) + case PolyFunction(targs, body) => + this(this(x, targs), body) + case InfixOp(left, op, right) => + this(this(this(x, left), op), right) + case PostfixOp(od, op) => + this(this(x, od), op) + case PrefixOp(op, od) => + this(this(x, op), od) + case Parens(t) => + this(x, t) + case Tuple(trees) => + this(x, trees) + case Throw(expr) => + this(x, expr) + case Quote(t) => + this(x, t) + case Splice(expr) => + this(x, expr) + case ForYield(enums, expr) => + this(this(x, enums), expr) + case ForDo(enums, body) => + this(this(x, enums), body) + case GenFrom(pat, expr, _) => + this(this(x, pat), expr) + case GenAlias(pat, expr) => + this(this(x, pat), expr) + case ContextBounds(bounds, cxBounds) => + this(this(x, bounds), cxBounds) + case PatDef(mods, pats, tpt, rhs) => + this(this(this(x, pats), tpt), rhs) + case ExtMethods(paramss, methods) => + this(paramss.foldLeft(x)(apply), methods) + case ImportSelector(imported, renamed, bound) => + this(this(this(x, imported), renamed), bound) + case Number(_, _) => + x + case TypedSplice(splice) => + this(x, splice) + case MacroTree(expr) => + this(x, expr) + case CapturingTypeTree(refs, parent) => + this(this(x, refs), parent) + case _ => + super.foldMoreCases(x, tree) + } + } + + abstract class UntypedTreeTraverser extends UntypedTreeAccumulator[Unit] { + def traverse(tree: Tree)(using Context): Unit + def apply(x: Unit, tree: Tree)(using Context): Unit = traverse(tree) + protected def traverseChildren(tree: Tree)(using Context): Unit = foldOver((), tree) + } + + /** Fold `f` over all tree nodes, in depth-first, prefix order */ + class UntypedDeepFolder[X](f: (X, Tree) => X) extends UntypedTreeAccumulator[X] { + def apply(x: X, tree: Tree)(using Context): X = foldOver(f(x, tree), tree) + } + + /** Is there a subtree of this tree that satisfies predicate `p`? */ + extension (tree: Tree) def existsSubTree(p: Tree => Boolean)(using Context): Boolean = { + val acc = new UntypedTreeAccumulator[Boolean] { + def apply(x: Boolean, t: Tree)(using Context) = x || p(t) || foldOver(x, t) + } + acc(false, tree) + } + + protected def FunProto(args: List[Tree], resType: Type)(using Context) = + ProtoTypes.FunProto(args, resType)(ctx.typer, ApplyKind.Regular) +} diff --git a/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala b/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala new file mode 100644 index 000000000000..56b3f5ba5047 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala @@ -0,0 +1,19 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.* + +/** A one-element cache for the boxed version of an unboxed capturing type */ +class BoxedTypeCache: + private var boxed: Type = compiletime.uninitialized + private var unboxed: Type = NoType + + def apply(tp: AnnotatedType)(using Context): Type = + if tp ne unboxed then + unboxed = tp + val CapturingType(parent, refs) = tp: @unchecked + boxed = CapturingType(parent, refs, boxed = true) + boxed +end BoxedTypeCache \ No newline at end of file diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala new file mode 100644 index 000000000000..fd89159e2076 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala @@ -0,0 +1,76 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.*, Annotations.* +import ast.Trees.* +import ast.{tpd, untpd} +import Decorators.* +import config.Printers.capt +import printing.Printer +import printing.Texts.Text + +/** An annotation representing a capture set and whether it is boxed. + * It simulates a normal @retains annotation except that it is more efficient, + * supports variables as capture sets, and adds a `boxed` flag. + * These annotations are created during capture checking. Before that + * there are only regular @retains and @retainsByName annotations. + * @param refs the capture set + * @param boxed whether the type carrying the annotation is boxed + * @param cls the underlying class (either annotation.retains or annotation.retainsByName) + */ +case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) extends Annotation: + import CaptureAnnotation.* + import tpd.* + + /** A cache for boxed version of a capturing type with this annotation */ + val boxedType = BoxedTypeCache() + + /** Reconstitute annotation tree from capture set */ + override def tree(using Context) = + val elems = refs.elems.toList.map { + case cr: TermRef => ref(cr) + case cr: TermParamRef => untpd.Ident(cr.paramName).withType(cr) + case cr: ThisType => This(cr.cls) + } + val arg = repeated(elems, TypeTree(defn.AnyType)) + New(symbol.typeRef, arg :: Nil) + + override def symbol(using Context) = cls + + override def derivedAnnotation(tree: Tree)(using Context): Annotation = this + + def derivedAnnotation(refs: CaptureSet, boxed: Boolean)(using Context): Annotation = + if (this.refs eq refs) && (this.boxed == boxed) then this + else CaptureAnnotation(refs, boxed)(cls) + + override def sameAnnotation(that: Annotation)(using Context): Boolean = that match + case CaptureAnnotation(refs, boxed) => + this.refs == refs && this.boxed == boxed && this.symbol == that.symbol + case _ => false + + override def mapWith(tm: TypeMap)(using Context) = + val elems = refs.elems.toList + val elems1 = elems.mapConserve(tm) + if elems1 eq elems then this + else if elems1.forall(_.isInstanceOf[CaptureRef]) + then derivedAnnotation(CaptureSet(elems1.asInstanceOf[List[CaptureRef]]*), boxed) + else EmptyAnnotation + + override def refersToParamOf(tl: TermLambda)(using Context): Boolean = + refs.elems.exists { + case TermParamRef(tl1, _) => tl eq tl1 + case _ => false + } + + override def toText(printer: Printer): Text = refs.toText(printer) + + override def hash: Int = + (refs.hashCode << 1) | (if boxed then 1 else 0) + + override def eql(that: Annotation) = that match + case that: CaptureAnnotation => (this.refs eq that.refs) && (this.boxed == that.boxed) + case _ => false + +end CaptureAnnotation diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala new file mode 100644 index 000000000000..3dfd1324ae1e --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala @@ -0,0 +1,253 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.*, Annotations.*, Flags.* +import ast.{tpd, untpd} +import Decorators.*, NameOps.* +import config.Printers.capt +import util.Property.Key +import tpd.* +import config.Feature + +private val Captures: Key[CaptureSet] = Key() +private val BoxedType: Key[BoxedTypeCache] = Key() + +/** The arguments of a @retains or @retainsByName annotation */ +private[cc] def retainedElems(tree: Tree)(using Context): List[Tree] = tree match + case Apply(_, Typed(SeqLiteral(elems, _), _) :: Nil) => elems + case _ => Nil + +/** An exception thrown if a @retains argument is not syntactically a CaptureRef */ +class IllegalCaptureRef(tpe: Type) extends Exception + +extension (tree: Tree) + + /** Map tree with CaptureRef type to its type, throw IllegalCaptureRef otherwise */ + def toCaptureRef(using Context): CaptureRef = tree.tpe match + case ref: CaptureRef => ref + case tpe => throw IllegalCaptureRef(tpe) + + /** Convert a @retains or @retainsByName annotation tree to the capture set it represents. + * For efficience, the result is cached as an Attachment on the tree. + */ + def toCaptureSet(using Context): CaptureSet = + tree.getAttachment(Captures) match + case Some(refs) => refs + case None => + val refs = CaptureSet(retainedElems(tree).map(_.toCaptureRef)*) + .showing(i"toCaptureSet $tree --> $result", capt) + tree.putAttachment(Captures, refs) + refs + + /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of + * a by name parameter type, turning the latter into an impure by name parameter type. + */ + def adaptByNameArgUnderPureFuns(using Context): Tree = + if Feature.pureFunsEnabledSomewhere then + val rbn = defn.RetainsByNameAnnot + Annotated(tree, + New(rbn.typeRef).select(rbn.primaryConstructor).appliedTo( + Typed( + SeqLiteral(ref(defn.captureRoot) :: Nil, TypeTree(defn.AnyType)), + TypeTree(defn.RepeatedParamType.appliedTo(defn.AnyType)) + ) + ) + ) + else tree + +extension (tp: Type) + + /** @pre `tp` is a CapturingType */ + def derivedCapturingType(parent: Type, refs: CaptureSet)(using Context): Type = tp match + case tp @ CapturingType(p, r) => + if (parent eq p) && (refs eq r) then tp + else CapturingType(parent, refs, tp.isBoxed) + + /** If this is a unboxed capturing type with nonempty capture set, its boxed version. + * Or, if type is a TypeBounds of capturing types, the version where the bounds are boxed. + * The identity for all other types. + */ + def boxed(using Context): Type = tp.dealias match + case tp @ CapturingType(parent, refs) if !tp.isBoxed && !refs.isAlwaysEmpty => + tp.annot match + case ann: CaptureAnnotation => + ann.boxedType(tp) + case ann => + ann.tree.getAttachment(BoxedType) match + case None => ann.tree.putAttachment(BoxedType, BoxedTypeCache()) + case _ => + ann.tree.attachment(BoxedType)(tp) + case tp: RealTypeBounds => + tp.derivedTypeBounds(tp.lo.boxed, tp.hi.boxed) + case _ => + tp + + /** If `sym` is a type parameter, the boxed version of `tp`, otherwise `tp` */ + def boxedIfTypeParam(sym: Symbol)(using Context) = + if sym.is(TypeParam) then tp.boxed else tp + + /** The boxed version of `tp`, unless `tycon` is a function symbol */ + def boxedUnlessFun(tycon: Type)(using Context) = + if ctx.phase != Phases.checkCapturesPhase || defn.isFunctionSymbol(tycon.typeSymbol) + then tp + else tp.boxed + + /** The capture set consisting of all top-level captures of `tp` that appear under a box. + * Unlike for `boxed` this also considers parents of capture types, unions and + * intersections, and type proxies other than abstract types. + */ + def boxedCaptureSet(using Context): CaptureSet = + def getBoxed(tp: Type): CaptureSet = tp match + case tp @ CapturingType(parent, refs) => + val pcs = getBoxed(parent) + if tp.isBoxed then refs ++ pcs else pcs + case tp: TypeRef if tp.symbol.isAbstractType => CaptureSet.empty + case tp: TypeProxy => getBoxed(tp.superType) + case tp: AndType => getBoxed(tp.tp1) ** getBoxed(tp.tp2) + case tp: OrType => getBoxed(tp.tp1) ++ getBoxed(tp.tp2) + case _ => CaptureSet.empty + getBoxed(tp) + + /** Is the boxedCaptureSet of this type nonempty? */ + def isBoxedCapturing(using Context) = !tp.boxedCaptureSet.isAlwaysEmpty + + /** If this type is a capturing type, the version with boxed statues as given by `boxed`. + * If it is a TermRef of a capturing type, and the box status flips, widen to a capturing + * type that captures the TermRef. + */ + def forceBoxStatus(boxed: Boolean)(using Context): Type = tp.widenDealias match + case tp @ CapturingType(parent, refs) if tp.isBoxed != boxed => + val refs1 = tp match + case ref: CaptureRef if ref.isTracked => ref.singletonCaptureSet + case _ => refs + CapturingType(parent, refs1, boxed) + case _ => + tp + + /** Map capturing type to their parents. Capturing types accessible + * via dealising are also stripped. + */ + def stripCapturing(using Context): Type = tp.dealiasKeepAnnots match + case CapturingType(parent, _) => + parent.stripCapturing + case atd @ AnnotatedType(parent, annot) => + atd.derivedAnnotatedType(parent.stripCapturing, annot) + case _ => + tp + + /** Under pureFunctions, map regular function type to impure function type + */ + def adaptFunctionTypeUnderPureFuns(using Context): Type = tp match + case AppliedType(fn, args) + if Feature.pureFunsEnabledSomewhere && defn.isFunctionClass(fn.typeSymbol) => + val fname = fn.typeSymbol.name + defn.FunctionType( + fname.functionArity, + isContextual = fname.isContextFunction, + isErased = fname.isErasedFunction, + isImpure = true).appliedTo(args) + case _ => + tp + + /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of + * a by name parameter type, turning the latter into an impure by name parameter type. + */ + def adaptByNameArgUnderPureFuns(using Context): Type = + if Feature.pureFunsEnabledSomewhere then + AnnotatedType(tp, + CaptureAnnotation(CaptureSet.universal, boxed = false)(defn.RetainsByNameAnnot)) + else + tp + + def isCapturingType(using Context): Boolean = + tp match + case CapturingType(_, _) => true + case _ => false + + /** Is type known to be always pure by its class structure, + * so that adding a capture set to it would not make sense? + */ + def isAlwaysPure(using Context): Boolean = tp.dealias match + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then sym.isPureClass + else tp.superType.isAlwaysPure + case CapturingType(parent, refs) => + parent.isAlwaysPure || refs.isAlwaysEmpty + case tp: TypeProxy => + tp.superType.isAlwaysPure + case tp: AndType => + tp.tp1.isAlwaysPure || tp.tp2.isAlwaysPure + case tp: OrType => + tp.tp1.isAlwaysPure && tp.tp2.isAlwaysPure + case _ => + false + +extension (sym: Symbol) + + /** A class is pure if: + * - one its base types has an explicitly declared self type with an empty capture set + * - or it is a value class + * - or it is Nothing or Null + */ + def isPureClass(using Context): Boolean = sym match + case cls: ClassSymbol => + val AnyValClass = defn.AnyValClass + cls.baseClasses.exists(bc => + bc == AnyValClass + || { + val selfType = bc.givenSelfType + selfType.exists && selfType.captureSet.isAlwaysEmpty + }) + || cls == defn.NothingClass + || cls == defn.NullClass + case _ => + false + + /** Does this symbol allow results carrying the universal capability? + * Currently this is true only for function type applies (since their + * results are unboxed) and `erasedValue` since this function is magic in + * that is allows to conjure global capabilies from nothing (aside: can we find a + * more controlled way to achieve this?). + * But it could be generalized to other functions that so that they can take capability + * classes as arguments. + */ + def allowsRootCapture(using Context): Boolean = + sym == defn.Compiletime_erasedValue + || defn.isFunctionClass(sym.maybeOwner) + + /** When applying `sym`, would the result type be unboxed? + * This is the case if the result type contains a top-level reference to an enclosing + * class or method type parameter and the method does not allow root capture. + * If the type parameter is instantiated to a boxed type, that type would + * have to be unboxed in the method's result. + */ + def unboxesResult(using Context): Boolean = + def containsEnclTypeParam(tp: Type): Boolean = tp.strippedDealias match + case tp @ TypeRef(pre: ThisType, _) => tp.symbol.is(Param) + case tp: TypeParamRef => true + case tp: AndOrType => containsEnclTypeParam(tp.tp1) || containsEnclTypeParam(tp.tp2) + case tp: RefinedType => containsEnclTypeParam(tp.parent) || containsEnclTypeParam(tp.refinedInfo) + case _ => false + containsEnclTypeParam(sym.info.finalResultType) + && !sym.allowsRootCapture + && sym != defn.Caps_unsafeBox + && sym != defn.Caps_unsafeUnbox + +extension (tp: AnnotatedType) + /** Is this a boxed capturing type? */ + def isBoxed(using Context): Boolean = tp.annot match + case ann: CaptureAnnotation => ann.boxed + case _ => false + +extension (ts: List[Type]) + /** Equivalent to ts.mapconserve(_.boxedUnlessFun(tycon)) but more efficient where + * it is the identity. + */ + def boxedUnlessFun(tycon: Type)(using Context) = + if ctx.phase != Phases.checkCapturesPhase || defn.isFunctionClass(tycon.typeSymbol) + then ts + else ts.mapconserve(_.boxed) + diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala new file mode 100644 index 000000000000..48ff614f2910 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala @@ -0,0 +1,906 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Flags.*, Contexts.*, Decorators.* +import config.Printers.capt +import Annotations.Annotation +import annotation.threadUnsafe +import annotation.constructorOnly +import annotation.internal.sharable +import reporting.trace +import printing.{Showable, Printer} +import printing.Texts.* +import util.{SimpleIdentitySet, Property} +import util.common.alwaysTrue +import scala.collection.mutable +import config.Config.ccAllowUnsoundMaps +import language.experimental.pureFunctions + +/** A class for capture sets. Capture sets can be constants or variables. + * Capture sets support inclusion constraints <:< where <:< is subcapturing. + * + * They also allow + * - mapping with functions from elements to capture sets + * - filtering with predicates on elements + * - intersecting wo capture sets + * + * That is, constraints can be of the forms + * + * cs1 <:< cs2 + * cs1 = ∪ {f(x) | x ∈ cs2} where f is a function from capture references to capture sets. + * cs1 = ∪ {x | x ∈ cs2, p(x)} where p is a predicate on capture references + * cs1 = cs2 ∩ cs2 + * + * We call the resulting constraint system "monadic set constraints". + * To support capture propagation across maps, mappings are supported only + * if the mapped function is either a bijection or if it is idempotent + * on capture references (c.f. doc comment on `map` below). + */ +sealed abstract class CaptureSet extends Showable, caps.Pure: + import CaptureSet.* + + /** The elements of this capture set. For capture variables, + * the elements known so far. + */ + def elems: Refs + + /** Is this capture set constant (i.e. not an unsolved capture variable)? + * Solved capture variables count as constant. + */ + def isConst: Boolean + + /** Is this capture set always empty? For unsolved capture veriables, returns + * always false. + */ + def isAlwaysEmpty: Boolean + + /** Is this capture set definitely non-empty? */ + final def isNotEmpty: Boolean = !elems.isEmpty + + /** Convert to Const. @pre: isConst */ + def asConst: Const = this match + case c: Const => c + case v: Var => + assert(v.isConst) + Const(v.elems) + + /** Cast to variable. @pre: !isConst */ + def asVar: Var = + assert(!isConst) + asInstanceOf[Var] + + /** Does this capture set contain the root reference `*` as element? */ + final def isUniversal(using Context) = + elems.exists { + case ref: TermRef => ref.symbol == defn.captureRoot + case _ => false + } + + /** Add new elements to this capture set if allowed. + * @pre `newElems` is not empty and does not overlap with `this.elems`. + * Constant capture sets never allow to add new elements. + * Variables allow it if and only if the new elements can be included + * in all their dependent sets. + * @param origin The set where the elements come from, or `empty` if not known. + * @return CompareResult.OK if elements were added, or a conflicting + * capture set that prevents addition otherwise. + */ + protected def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult + + /** If this is a variable, add `cs` as a dependent set */ + protected def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult + + /** If `cs` is a variable, add this capture set as one of its dependent sets */ + protected def addAsDependentTo(cs: CaptureSet)(using Context): this.type = + cs.addDependent(this)(using ctx, UnrecordedState) + this + + /** Try to include all references of `elems` that are not yet accounted for by this + * capture set. Inclusion is via `addNewElems`. + * @param origin The set where the elements come from, or `empty` if not known. + * @return CompareResult.OK if all unaccounted elements could be added, + * capture set that prevents addition otherwise. + */ + protected final def tryInclude(elems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + val unaccounted = elems.filter(!accountsFor(_)) + if unaccounted.isEmpty then CompareResult.OK + else addNewElems(unaccounted, origin) + + /** Equivalent to `tryInclude({elem}, origin)`, but more efficient */ + protected final def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = + if accountsFor(elem) then CompareResult.OK + else addNewElems(elem.singletonCaptureSet.elems, origin) + + /* x subsumes y if x is the same as y, or x is a this reference and y refers to a field of x */ + extension (x: CaptureRef) private def subsumes(y: CaptureRef) = + (x eq y) + || y.match + case y: TermRef => y.prefix eq x + case _ => false + + /** {x} <:< this where <:< is subcapturing, but treating all variables + * as frozen. + */ + def accountsFor(x: CaptureRef)(using Context): Boolean = + reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true) { + elems.exists(_.subsumes(x)) + || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK + } + + /** A more optimistic version of accountsFor, which does not take variable supersets + * of the `x` reference into account. A set might account for `x` if it accounts + * for `x` in a state where we assume all supersets of `x` have just the elements + * known at this point. On the other hand if x's capture set has no known elements, + * a set `cs` might account for `x` only if it subsumes `x` or it contains the + * root capability `*`. + */ + def mightAccountFor(x: CaptureRef)(using Context): Boolean = + reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true) { + elems.exists(elem => elem.subsumes(x) || elem.isRootCapability) + || !x.isRootCapability + && { + val elems = x.captureSetOfInfo.elems + !elems.isEmpty && elems.forall(mightAccountFor) + } + } + + /** A more optimistic version of subCaptures used to choose one of two typing rules + * for selections and applications. `cs1 mightSubcapture cs2` if `cs2` might account for + * every element currently known to be in `cs1`. + */ + def mightSubcapture(that: CaptureSet)(using Context): Boolean = + elems.forall(that.mightAccountFor) + + /** The subcapturing test. + * @param frozen if true, no new variables or dependent sets are allowed to + * be added when making this test. An attempt to add either + * will result in failure. + */ + final def subCaptures(that: CaptureSet, frozen: Boolean)(using Context): CompareResult = + subCaptures(that)(using ctx, if frozen then FrozenState else VarState()) + + /** The subcapturing test, using a given VarState */ + private def subCaptures(that: CaptureSet)(using Context, VarState): CompareResult = + def recur(elems: List[CaptureRef]): CompareResult = elems match + case elem :: elems1 => + var result = that.tryInclude(elem, this) + if !result.isOK && !elem.isRootCapability && summon[VarState] != FrozenState then + result = elem.captureSetOfInfo.subCaptures(that) + if result.isOK then + recur(elems1) + else + varState.rollBack() + result + case Nil => + addDependent(that) + recur(elems.toList) + .showing(i"subcaptures $this <:< $that = ${result.show}", capt) + + /** Two capture sets are considered =:= equal if they mutually subcapture each other + * in a frozen state. + */ + def =:= (that: CaptureSet)(using Context): Boolean = + this.subCaptures(that, frozen = true).isOK + && that.subCaptures(this, frozen = true).isOK + + /** The smallest capture set (via <:<) that is a superset of both + * `this` and `that` + */ + def ++ (that: CaptureSet)(using Context): CaptureSet = + if this.subCaptures(that, frozen = true).isOK then that + else if that.subCaptures(this, frozen = true).isOK then this + else if this.isConst && that.isConst then Const(this.elems ++ that.elems) + else Var(this.elems ++ that.elems).addAsDependentTo(this).addAsDependentTo(that) + + /** The smallest superset (via <:<) of this capture set that also contains `ref`. + */ + def + (ref: CaptureRef)(using Context): CaptureSet = + this ++ ref.singletonCaptureSet + + /** The largest capture set (via <:<) that is a subset of both `this` and `that` + */ + def **(that: CaptureSet)(using Context): CaptureSet = + if this.subCaptures(that, frozen = true).isOK then this + else if that.subCaptures(this, frozen = true).isOK then that + else if this.isConst && that.isConst then Const(elemIntersection(this, that)) + else Intersected(this, that) + + /** The largest subset (via <:<) of this capture set that does not account for + * any of the elements in the constant capture set `that` + */ + def -- (that: CaptureSet.Const)(using Context): CaptureSet = + val elems1 = elems.filter(!that.accountsFor(_)) + if elems1.size == elems.size then this + else if this.isConst then Const(elems1) + else Diff(asVar, that) + + /** The largest subset (via <:<) of this capture set that does not account for `ref` */ + def - (ref: CaptureRef)(using Context): CaptureSet = + this -- ref.singletonCaptureSet + + /** The largest subset (via <:<) of this capture set that only contains elements + * for which `p` is true. + */ + def filter(p: CaptureRef -> Boolean)(using Context): CaptureSet = + if this.isConst then + val elems1 = elems.filter(p) + if elems1 == elems then this + else Const(elems.filter(p)) + else Filtered(asVar, p) + + /** Capture set obtained by applying `tm` to all elements of the current capture set + * and joining the results. If the current capture set is a variable, the same + * transformation is applied to all future additions of new elements. + * + * Note: We have a problem how we handle the situation where we have a mapped set + * + * cs2 = tm(cs1) + * + * and then the propagation solver adds a new element `x` to `cs2`. What do we + * know in this case about `cs1`? We can answer this question in a sound way only + * if `tm` is a bijection on capture references or it is idempotent on capture references. + * (see definition in IdempotentCapRefMap). + * If `tm` is a bijection we know that `tm^-1(x)` must be in `cs1`. If `tm` is idempotent + * one possible solution is that `x` is in `cs1`, which is what we assume in this case. + * That strategy is sound but not complete. + * + * If `tm` is some other map, we don't know how to handle this case. For now, + * we simply refuse to handle other maps. If they do need to be handled, + * `OtherMapped` provides some approximation to a solution, but it is neither + * sound nor complete. + */ + def map(tm: TypeMap)(using Context): CaptureSet = tm match + case tm: BiTypeMap => + val mappedElems = elems.map(tm.forward) + if isConst then + if mappedElems == elems then this + else Const(mappedElems) + else BiMapped(asVar, tm, mappedElems) + case tm: IdentityCaptRefMap => + this + case _ => + val mapped = mapRefs(elems, tm, tm.variance) + if isConst then + if mapped.isConst && mapped.elems == elems then this + else mapped + else Mapped(asVar, tm, tm.variance, mapped) + + /** A mapping resulting from substituting parameters of a BindingType to a list of types */ + def substParams(tl: BindingType, to: List[Type])(using Context) = + map(Substituters.SubstParamsMap(tl, to)) + + /** Invoke handler if this set has (or later aquires) the root capability `*` */ + def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = + if isUniversal then handler() + this + + /** An upper approximation of this capture set, i.e. a constant set that is + * subcaptured by this set. If the current set is a variable + * it is the intersection of all upper approximations of known supersets + * of the variable. + * The upper approximation is meaningful only if it is constant. If not, + * `upperApprox` can return an arbitrary capture set variable. + * `upperApprox` is used in `solve`. + */ + protected def upperApprox(origin: CaptureSet)(using Context): CaptureSet + + /** Assuming set this set dependds on was just solved to be constant, propagate this info + * to this set. This might result in the set being solved to be constant + * itself. + */ + protected def propagateSolved()(using Context): Unit = () + + /** This capture set with a description that tells where it comes from */ + def withDescription(description: String): CaptureSet + + /** The provided description (using `withDescription`) for this capture set or else "" */ + def description: String + + /** A regular @retains or @retainsByName annotation with the elements of this set as arguments. */ + def toRegularAnnotation(cls: Symbol)(using Context): Annotation = + Annotation(CaptureAnnotation(this, boxed = false)(cls).tree) + + override def toText(printer: Printer): Text = + Str("{") ~ Text(elems.toList.map(printer.toTextCaptureRef), ", ") ~ Str("}") ~~ description + +object CaptureSet: + type Refs = SimpleIdentitySet[CaptureRef] + type Vars = SimpleIdentitySet[Var] + type Deps = SimpleIdentitySet[CaptureSet] + + @sharable private var varId = 0 + + /** If set to `true`, capture stack traces that tell us where sets are created */ + private final val debugSets = false + + private val emptySet = SimpleIdentitySet.empty + + /** The empty capture set `{}` */ + val empty: CaptureSet.Const = Const(emptySet) + + /** The universal capture set `{*}` */ + def universal(using Context): CaptureSet = + defn.captureRoot.termRef.singletonCaptureSet + + /** Used as a recursion brake */ + @sharable private[dotc] val Pending = Const(SimpleIdentitySet.empty) + + /** The empty capture set with a description that says it's the elf type of an + * exception class. + */ + val emptyOfException: CaptureSet.Const = Const(emptySet, "of an exception class") + + def apply(elems: CaptureRef*)(using Context): CaptureSet.Const = + if elems.isEmpty then empty + else Const(SimpleIdentitySet(elems.map(_.normalizedRef)*)) + + def apply(elems: Refs)(using Context): CaptureSet.Const = + if elems.isEmpty then empty else Const(elems) + + /** The subclass of constant capture sets with given elements `elems` */ + class Const private[CaptureSet] (val elems: Refs, val description: String = "") extends CaptureSet: + def isConst = true + def isAlwaysEmpty = elems.isEmpty + + def addNewElems(elems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + CompareResult.fail(this) + + def addDependent(cs: CaptureSet)(using Context, VarState) = CompareResult.OK + + def upperApprox(origin: CaptureSet)(using Context): CaptureSet = this + + def withDescription(description: String): Const = Const(elems, description) + + override def toString = elems.toString + end Const + + /** The subclass of captureset variables with given initial elements */ + class Var(initialElems: Refs = emptySet) extends CaptureSet: + + /** A unique identification number for diagnostics */ + val id = + varId += 1 + varId + + /** A variable is solved if it is aproximated to a from-then-on constant set. */ + private var isSolved: Boolean = false + + /** The elements currently known to be in the set */ + var elems: Refs = initialElems + + /** The sets currently known to be dependent sets (i.e. new additions to this set + * are propagated to these dependent sets.) + */ + var deps: Deps = emptySet + + def isConst = isSolved + def isAlwaysEmpty = false + + /** A handler to be invoked if the root reference `*` is added to this set + * The handler is pure in the sense that it will only output diagnostics. + */ + var rootAddedHandler: () -> Context ?-> Unit = () => () + + var description: String = "" + + /** Record current elements in given VarState provided it does not yet + * contain an entry for this variable. + */ + private def recordElemsState()(using VarState): Boolean = + varState.getElems(this) match + case None => varState.putElems(this, elems) + case _ => true + + /** Record current dependent sets in given VarState provided it does not yet + * contain an entry for this variable. + */ + private[CaptureSet] def recordDepsState()(using VarState): Boolean = + varState.getDeps(this) match + case None => varState.putDeps(this, deps) + case _ => true + + /** Reset elements to what was recorded in `state` */ + def resetElems()(using state: VarState): Unit = + elems = state.elems(this) + + /** Reset dependent sets to what was recorded in `state` */ + def resetDeps()(using state: VarState): Unit = + deps = state.deps(this) + + def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + if !isConst && recordElemsState() then + elems ++= newElems + if isUniversal then rootAddedHandler() + // assert(id != 2 || elems.size != 2, this) + (CompareResult.OK /: deps) { (r, dep) => + r.andAlso(dep.tryInclude(newElems, this)) + } + else // fail if variable is solved or given VarState is frozen + CompareResult.fail(this) + + def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult = + if (cs eq this) || cs.isUniversal || isConst then + CompareResult.OK + else if recordDepsState() then + deps += cs + CompareResult.OK + else + CompareResult.fail(this) + + override def disallowRootCapability(handler: () -> Context ?-> Unit)(using Context): this.type = + rootAddedHandler = handler + super.disallowRootCapability(handler) + + private var computingApprox = false + + /** Roughly: the intersection of all constant known supersets of this set. + * The aim is to find an as-good-as-possible constant set that is a superset + * of this set. The universal set {*} is a sound fallback. + */ + final def upperApprox(origin: CaptureSet)(using Context): CaptureSet = + if computingApprox then universal + else if isConst then this + else + computingApprox = true + try computeApprox(origin).ensuring(_.isConst) + finally computingApprox = false + + /** The intersection of all upper approximations of dependent sets */ + protected def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + (universal /: deps) { (acc, sup) => acc ** sup.upperApprox(this) } + + /** Widen the variable's elements to its upper approximation and + * mark it as constant from now on. This is used for contra-variant type variables + * in the results of defs and vals. + */ + def solve()(using Context): Unit = + if !isConst then + val approx = upperApprox(empty) + //println(i"solving var $this $approx ${approx.isConst} deps = ${deps.toList}") + val newElems = approx.elems -- elems + if newElems.isEmpty || addNewElems(newElems, empty)(using ctx, VarState()).isOK then + markSolved() + + /** Mark set as solved and propagate this info to all dependent sets */ + def markSolved()(using Context): Unit = + isSolved = true + deps.foreach(_.propagateSolved()) + + def withDescription(description: String): this.type = + this.description = + if this.description.isEmpty then description + else s"${this.description} and $description" + this + + /** Used for diagnostics and debugging: A string that traces the creation + * history of a variable by following source links. Each variable on the + * path is characterized by the variable's id and the first letter of the + * variable's class name. The path ends in a plain variable with letter `V` that + * is not derived from some other variable. + */ + protected def ids(using Context): String = + val trail = this.match + case dv: DerivedVar => dv.source.ids + case _ => "" + s"$id${getClass.getSimpleName.nn.take(1)}$trail" + + /** Adds variables to the ShownVars context property if that exists, which + * establishes a record of all variables printed in an error message. + * Prints variables wih ids under -Ycc-debug. + */ + override def toText(printer: Printer): Text = inContext(printer.printerContext) { + for vars <- ctx.property(ShownVars) do vars += this + super.toText(printer) ~ (Str(ids) provided !isConst && ctx.settings.YccDebug.value) + } + + override def toString = s"Var$id$elems" + end Var + + /** A variable that is derived from some other variable via a map or filter. */ + abstract class DerivedVar(initialElems: Refs)(using @constructorOnly ctx: Context) + extends Var(initialElems): + + // For debugging: A trace where a set was created. Note that logically it would make more + // sense to place this variable in Mapped, but that runs afoul of the initializatuon checker. + val stack = if debugSets && this.isInstanceOf[Mapped] then (new Throwable).getStackTrace().nn.take(20) else null + + /** The variable from which this variable is derived */ + def source: Var + + addAsDependentTo(source) + + override def propagateSolved()(using Context) = + if source.isConst && !isConst then markSolved() + end DerivedVar + + /** A variable that changes when `source` changes, where all additional new elements are mapped + * using ∪ { tm(x) | x <- source.elems }. + * @param source the original set that is mapped + * @param tm the type map, which is assumed to be idempotent on capture refs + * (except if ccUnsoundMaps is enabled) + * @param variance the assumed variance with which types with capturesets of size >= 2 are approximated + * (i.e. co: full capture set, contra: empty set, nonvariant is not allowed.) + * @param initial The initial mappings of source's elements at the point the Mapped set is created. + */ + class Mapped private[CaptureSet] + (val source: Var, tm: TypeMap, variance: Int, initial: CaptureSet)(using @constructorOnly ctx: Context) + extends DerivedVar(initial.elems): + addAsDependentTo(initial) // initial mappings could change by propagation + + private def mapIsIdempotent = tm.isInstanceOf[IdempotentCaptRefMap] + + assert(ccAllowUnsoundMaps || mapIsIdempotent, tm.getClass) + + private def whereCreated(using Context): String = + if stack == null then "" + else i""" + |Stack trace of variable creation:" + |${stack.mkString("\n")}""" + + override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + val added = + if origin eq source then // elements have to be mapped + mapRefs(newElems, tm, variance) + else + // elements are added by subcapturing propagation with this Mapped set + // as superset; no mapping is necessary or allowed. + Const(newElems) + super.addNewElems(added.elems, origin) + .andAlso { + if added.isConst then CompareResult.OK + else if added.asVar.recordDepsState() then { addAsDependentTo(added); CompareResult.OK } + else CompareResult.fail(this) + } + .andAlso { + if (origin ne source) && mapIsIdempotent then + // `tm` is idempotent, propagate back elems from image set. + // This is sound, since we know that for `r in newElems: tm(r) = r`, hence + // `r` is _one_ possible solution in `source` that would make an `r` appear in this set. + // It's not necessarily the only possible solution, so the scheme is incomplete. + source.tryInclude(newElems, this) + else if !mapIsIdempotent && variance <= 0 && !origin.isConst && (origin ne initial) && (origin ne source) then + // The map is neither a BiTypeMap nor an idempotent type map. + // In that case there's no much we can do. + // The scheme then does not propagate added elements back to source and rejects adding + // elements from variable sources in contra- and non-variant positions. In essence, + // we approximate types resulting from such maps by returning a possible super type + // from the actual type. But this is neither sound nor complete. + report.warning(i"trying to add elems ${CaptureSet(newElems)} from unrecognized source $origin of mapped set $this$whereCreated") + CompareResult.fail(this) + else + CompareResult.OK + } + + override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + if source eq origin then + // it's a mapping of origin, so not a superset of `origin`, + // therefore don't contribute to the intersection. + universal + else + source.upperApprox(this).map(tm) + + override def propagateSolved()(using Context) = + if initial.isConst then super.propagateSolved() + + override def toString = s"Mapped$id($source, elems = $elems)" + end Mapped + + /** A mapping where the type map is required to be a bijection. + * Parameters as in Mapped. + */ + final class BiMapped private[CaptureSet] + (val source: Var, bimap: BiTypeMap, initialElems: Refs)(using @constructorOnly ctx: Context) + extends DerivedVar(initialElems): + + override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + if origin eq source then + super.addNewElems(newElems.map(bimap.forward), origin) + else + super.addNewElems(newElems, origin) + .andAlso { + source.tryInclude(newElems.map(bimap.backward), this) + .showing(i"propagating new elems ${CaptureSet(newElems)} backward from $this to $source", capt) + } + + /** For a BiTypeMap, supertypes of the mapped type also constrain + * the source via the inverse type mapping and vice versa. That is, if + * B = f(A) and B <: C, then A <: f^-1(C), so C should flow into + * the upper approximation of A. + * Conversely if A <: C2, then we also know that B <: f(C2). + * These situations are modeled by the two branches of the conditional below. + */ + override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + val supApprox = super.computeApprox(this) + if source eq origin then supApprox.map(bimap.inverseTypeMap) + else source.upperApprox(this).map(bimap) ** supApprox + + override def toString = s"BiMapped$id($source, elems = $elems)" + end BiMapped + + /** A variable with elements given at any time as { x <- source.elems | p(x) } */ + class Filtered private[CaptureSet] + (val source: Var, p: CaptureRef -> Boolean)(using @constructorOnly ctx: Context) + extends DerivedVar(source.elems.filter(p)): + + override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + val filtered = newElems.filter(p) + if origin eq source then + super.addNewElems(filtered, origin) + else + // Filtered elements have to be back-propagated to source. + // Elements that don't satisfy `p` are not allowed. + super.addNewElems(newElems, origin) + .andAlso { + if filtered.size == newElems.size then source.tryInclude(newElems, this) + else CompareResult.fail(this) + } + + override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + if source eq origin then + // it's a filter of origin, so not a superset of `origin`, + // therefore don't contribute to the intersection. + universal + else + source.upperApprox(this).filter(p) + + override def toString = s"${getClass.getSimpleName}$id($source, elems = $elems)" + end Filtered + + /** A variable with elements given at any time as { x <- source.elems | !other.accountsFor(x) } */ + class Diff(source: Var, other: Const)(using Context) + extends Filtered(source, !other.accountsFor(_)) + + class Intersected(cs1: CaptureSet, cs2: CaptureSet)(using Context) + extends Var(elemIntersection(cs1, cs2)): + addAsDependentTo(cs1) + addAsDependentTo(cs2) + deps += cs1 + deps += cs2 + + override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + val added = + if origin eq cs1 then newElems.filter(cs2.accountsFor) + else if origin eq cs2 then newElems.filter(cs1.accountsFor) + else newElems + // If origin is not cs1 or cs2, then newElems will be propagated to + // cs1, cs2 since they are in deps. + super.addNewElems(added, origin) + + override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + if (origin eq cs1) || (origin eq cs2) then + // it's a combination of origin with some other set, so not a superset of `origin`, + // therefore don't contribute to the intersection. + universal + else + CaptureSet(elemIntersection(cs1.upperApprox(this), cs2.upperApprox(this))) + + override def propagateSolved()(using Context) = + if cs1.isConst && cs2.isConst && !isConst then markSolved() + end Intersected + + def elemIntersection(cs1: CaptureSet, cs2: CaptureSet)(using Context): Refs = + cs1.elems.filter(cs2.mightAccountFor) ++ cs2.elems.filter(cs1.mightAccountFor) + + /** Extrapolate tm(r) according to `variance`. Let r1 be the result of tm(r). + * - If r1 is a tracked CaptureRef, return {r1} + * - If r1 has an empty capture set, return {} + * - Otherwise, + * - if the variance is covariant, return r1's capture set + * - if the variance is contravariant, return {} + * - Otherwise assertion failure + */ + def extrapolateCaptureRef(r: CaptureRef, tm: TypeMap, variance: Int)(using Context): CaptureSet = + val r1 = tm(r) + val upper = r1.captureSet + def isExact = + upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) + if variance > 0 || isExact then upper + else if variance < 0 then CaptureSet.empty + else assert(false, i"trying to add $upper from $r via ${tm.getClass} in a non-variant setting") + + /** Apply `f` to each element in `xs`, and join result sets with `++` */ + def mapRefs(xs: Refs, f: CaptureRef => CaptureSet)(using Context): CaptureSet = + ((empty: CaptureSet) /: xs)((cs, x) => cs ++ f(x)) + + /** Apply extrapolated `tm` to each element in `xs`, and join result sets with `++` */ + def mapRefs(xs: Refs, tm: TypeMap, variance: Int)(using Context): CaptureSet = + mapRefs(xs, extrapolateCaptureRef(_, tm, variance)) + + /** Return true iff + * - arg1 is a TypeBounds >: CL T <: CH T of two capturing types with equal parents. + * - arg2 is a capturing type CA U + * - CH <: CA <: CL + * In other words, we can unify CL, CH and CA. + */ + def subCapturesRange(arg1: TypeBounds, arg2: Type)(using Context): Boolean = arg1 match + case TypeBounds(CapturingType(lo, loRefs), CapturingType(hi, hiRefs)) if lo =:= hi => + given VarState = VarState() + val cs2 = arg2.captureSet + hiRefs.subCaptures(cs2).isOK && cs2.subCaptures(loRefs).isOK + case _ => + false + + /** A TypeMap with the property that every capture reference in the image + * of the map is mapped to itself. I.e. for all capture references r1, r2, + * if M(r1) == r2 then M(r2) == r2. + */ + trait IdempotentCaptRefMap extends TypeMap + + /** A TypeMap that is the identity on capture references */ + trait IdentityCaptRefMap extends TypeMap + + type CompareResult = CompareResult.TYPE + + /** The result of subcapturing comparisons is an opaque type CompareResult.TYPE. + * This is either OK, indicating success, or + * another capture set, indicating failure. The failure capture set + * is the one that did not allow propagaton of elements into it. + */ + object CompareResult: + opaque type TYPE = CaptureSet + val OK: TYPE = Const(emptySet) + def fail(cs: CaptureSet): TYPE = cs + + extension (result: TYPE) + /** The result is OK */ + def isOK: Boolean = result eq OK + /** If not isOK, the blocking capture set */ + def blocking: CaptureSet = result + inline def andAlso(op: Context ?=> TYPE)(using Context): TYPE = if result.isOK then op else result + def show(using Context): String = if result.isOK then "OK" else i"$result" + end CompareResult + + /** A VarState serves as a snapshot mechanism that can undo + * additions of elements or super sets if an operation fails + */ + class VarState: + + /** A map from captureset variables to their elements at the time of the snapshot. */ + private val elemsMap: util.EqHashMap[Var, Refs] = new util.EqHashMap + + /** A map from captureset variables to their dependent sets at the time of the snapshot. */ + private val depsMap: util.EqHashMap[Var, Deps] = new util.EqHashMap + + /** The recorded elements of `v` (it's required that a recording was made) */ + def elems(v: Var): Refs = elemsMap(v) + + /** Optionally the recorded elements of `v`, None if nothing was recorded for `v` */ + def getElems(v: Var): Option[Refs] = elemsMap.get(v) + + /** Record elements, return whether this was allowed. + * By default, recording is allowed but the special state FrozenState + * overrides this. + */ + def putElems(v: Var, elems: Refs): Boolean = { elemsMap(v) = elems; true } + + /** The recorded dependent sets of `v` (it's required that a recording was made) */ + def deps(v: Var): Deps = depsMap(v) + + /** Optionally the recorded dependent sets of `v`, None if nothing was recorded for `v` */ + def getDeps(v: Var): Option[Deps] = depsMap.get(v) + + /** Record dependent sets, return whether this was allowed. + * By default, recording is allowed but the special state FrozenState + * overrides this. + */ + def putDeps(v: Var, deps: Deps): Boolean = { depsMap(v) = deps; true } + + /** Roll back global state to what was recorded in this VarState */ + def rollBack(): Unit = + elemsMap.keysIterator.foreach(_.resetElems()(using this)) + depsMap.keysIterator.foreach(_.resetDeps()(using this)) + end VarState + + /** A special state that does not allow to record elements or dependent sets. + * In effect this means that no new elements or dependent sets can be added + * in this state (since the previous state cannot be recorded in a snapshot) + */ + @sharable + object FrozenState extends VarState: + override def putElems(v: Var, refs: Refs) = false + override def putDeps(v: Var, deps: Deps) = false + override def rollBack(): Unit = () + + @sharable + /** A special state that turns off recording of elements. Used only + * in `addSub` to prevent cycles in recordings. + */ + private object UnrecordedState extends VarState: + override def putElems(v: Var, refs: Refs) = true + override def putDeps(v: Var, deps: Deps) = true + override def rollBack(): Unit = () + + /** The current VarState, as passed by the implicit context */ + def varState(using state: VarState): VarState = state + + /* Not needed: + def ofClass(cinfo: ClassInfo, argTypes: List[Type])(using Context): CaptureSet = + CaptureSet.empty + def captureSetOf(tp: Type): CaptureSet = tp match + case tp: TypeRef if tp.symbol.is(ParamAccessor) => + def mapArg(accs: List[Symbol], tps: List[Type]): CaptureSet = accs match + case acc :: accs1 if tps.nonEmpty => + if acc == tp.symbol then tps.head.captureSet + else mapArg(accs1, tps.tail) + case _ => + empty + mapArg(cinfo.cls.paramAccessors, argTypes) + case _ => + tp.captureSet + val css = + for + parent <- cinfo.parents if parent.classSymbol == defn.RetainingClass + arg <- parent.argInfos + yield captureSetOf(arg) + css.foldLeft(empty)(_ ++ _) + */ + + /** The capture set of the type underlying a CaptureRef */ + def ofInfo(ref: CaptureRef)(using Context): CaptureSet = ref match + case ref: TermRef if ref.isRootCapability => ref.singletonCaptureSet + case _ => ofType(ref.underlying) + + /** Capture set of a type */ + def ofType(tp: Type)(using Context): CaptureSet = + def recur(tp: Type): CaptureSet = tp.dealias match + case tp: TermRef => + tp.captureSet + case tp: TermParamRef => + tp.captureSet + case _: TypeRef => + if tp.classSymbol.hasAnnotation(defn.CapabilityAnnot) then universal else empty + case _: TypeParamRef => + empty + case CapturingType(parent, refs) => + recur(parent) ++ refs + case AppliedType(tycon, args) => + val cs = recur(tycon) + tycon.typeParams match + case tparams @ (LambdaParam(tl, _) :: _) => cs.substParams(tl, args) + case _ => cs + case tp: TypeProxy => + recur(tp.underlying) + case AndType(tp1, tp2) => + recur(tp1) ** recur(tp2) + case OrType(tp1, tp2) => + recur(tp1) ++ recur(tp2) + case _ => + empty + recur(tp) + .showing(i"capture set of $tp = $result", capt) + + private val ShownVars: Property.Key[mutable.Set[Var]] = Property.Key() + + /** Perform `op`. Under -Ycc-debug, collect and print info about all variables reachable + * via `(_.deps)*` from the variables that were shown in `op`. + */ + def withCaptureSetsExplained[T](op: Context ?=> T)(using ctx: Context): T = + if ctx.settings.YccDebug.value then + val shownVars = mutable.Set[Var]() + inContext(ctx.withProperty(ShownVars, Some(shownVars))) { + try op + finally + val reachable = mutable.Set[Var]() + val todo = mutable.Queue[Var]() ++= shownVars + def incl(cv: Var): Unit = + if !reachable.contains(cv) then todo += cv + while todo.nonEmpty do + val cv = todo.dequeue() + if !reachable.contains(cv) then + reachable += cv + cv.deps.foreach { + case cv: Var => incl(cv) + case _ => + } + cv match + case cv: DerivedVar => incl(cv.source) + case _ => + val allVars = reachable.toArray.sortBy(_.id) + println(i"Capture set dependencies:") + for cv <- allVars do + println(i" ${cv.show.padTo(20, ' ')} :: ${cv.deps.toList}%, %") + } + else op +end CaptureSet diff --git a/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala b/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala new file mode 100644 index 000000000000..e9862f1f20b8 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala @@ -0,0 +1,72 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.* + +/** A (possibly boxed) capturing type. This is internally represented as an annotated type with a @retains + * or @retainsByName annotation, but the extractor will succeed only at phase CheckCaptures. + * That way, we can ignore caturing information until phase CheckCaptures since it is + * wrapped in a plain annotation. + * + * The same trick does not work for the boxing information. Boxing is context dependent, so + * we have to add that information in the Setup step preceding CheckCaptures. Boxes are + * added for all type arguments of methods. For type arguments of applied types a different + * strategy is used where we box arguments of applied types that are not functions when + * accessing the argument. + * + * An alternative strategy would add boxes also to arguments of applied types during setup. + * But this would have to be done for all possibly accessibly types from the compiled units + * as well as their dependencies. It's difficult to do this in a DenotationTransformer without + * accidentally forcing symbol infos. That's why this alternative was not implemented. + * If we would go back on this it would make sense to also treat captuyring types different + * from annotations and to generate them all during Setup and in DenotationTransformers. + */ +object CapturingType: + + /** Smart constructor that drops empty capture sets and fuses compatible capturiong types. + * An outer type capturing type A can be fused with an inner capturing type B if their + * boxing status is the same or if A is boxed. + */ + def apply(parent: Type, refs: CaptureSet, boxed: Boolean = false)(using Context): Type = + if refs.isAlwaysEmpty then parent + else parent match + case parent @ CapturingType(parent1, refs1) if boxed || !parent.isBoxed => + apply(parent1, refs ++ refs1, boxed) + case _ => + AnnotatedType(parent, CaptureAnnotation(refs, boxed)(defn.RetainsAnnot)) + + /** An extractor that succeeds only during CheckCapturingPhase. Boxing statis is + * returned separately by CaptureOps.isBoxed. + */ + def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = + if ctx.phase == Phases.checkCapturesPhase + && tp.annot.symbol == defn.RetainsAnnot + && !ctx.mode.is(Mode.IgnoreCaptures) + then + EventuallyCapturingType.unapply(tp) + else None + +end CapturingType + +/** An extractor for types that will be capturing types at phase CheckCaptures. Also + * included are types that indicate captures on enclosing call-by-name parameters + * before phase ElimByName. + */ +object EventuallyCapturingType: + + def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = + val sym = tp.annot.symbol + if sym == defn.RetainsAnnot || sym == defn.RetainsByNameAnnot then + tp.annot match + case ann: CaptureAnnotation => + Some((tp.parent, ann.refs)) + case ann => + try Some((tp.parent, ann.tree.toCaptureSet)) + catch case ex: IllegalCaptureRef => None + else None + +end EventuallyCapturingType + + diff --git a/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala b/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala new file mode 100644 index 000000000000..97f8e1eea405 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala @@ -0,0 +1,949 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Phases.*, DenotTransformers.*, SymDenotations.* +import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* +import Types.*, StdNames.*, Denotations.* +import config.Printers.{capt, recheckr} +import config.{Config, Feature} +import ast.{tpd, untpd, Trees} +import Trees.* +import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents} +import typer.Checking.{checkBounds, checkAppliedTypesIn} +import util.{SimpleIdentitySet, EqHashMap, SrcPos} +import transform.SymUtils.* +import transform.{Recheck, PreRecheck} +import Recheck.* +import scala.collection.mutable +import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap} +import StdNames.nme +import NameKinds.DefaultGetterName +import reporting.trace +import language.experimental.pureFunctions + +/** The capture checker */ +object CheckCaptures: + import ast.tpd.* + + class Pre extends PreRecheck, SymTransformer: + + override def isEnabled(using Context) = true + + /** Reset `private` flags of parameter accessors so that we can refine them + * in Setup if they have non-empty capture sets. Special handling of some + * symbols defined for case classes. + */ + def transformSym(sym: SymDenotation)(using Context): SymDenotation = + if sym.isAllOf(PrivateParamAccessor) && !sym.hasAnnotation(defn.ConstructorOnlyAnnot) then + sym.copySymDenotation(initFlags = sym.flags &~ Private | Recheck.ResetPrivate) + else if Synthetics.needsTransform(sym) then + Synthetics.transformToCC(sym) + else + sym + end Pre + + /** A class describing environments. + * @param owner the current owner + * @param nestedInOwner true if the environment is a temporary one nested in the owner's environment, + * and does not have a different actual owner symbol (this happens when doing box adaptation). + * @param captured the caputure set containing all references to tracked free variables outside of boxes + * @param isBoxed true if the environment is inside a box (in which case references are not counted) + * @param outer0 the next enclosing environment + */ + case class Env( + owner: Symbol, + nestedInOwner: Boolean, + captured: CaptureSet, + isBoxed: Boolean, + outer0: Env | Null + ): + def outer = outer0.nn + + def isOutermost = outer0 == null + + /** If an environment is open it tracks free references */ + def isOpen = !captured.isAlwaysEmpty && !isBoxed + end Env + + /** Similar normal substParams, but this is an approximating type map that + * maps parameters in contravariant capture sets to the empty set. + * TODO: check what happens with non-variant. + */ + final class SubstParamsMap(from: BindingType, to: List[Type])(using Context) + extends ApproximatingTypeMap, IdempotentCaptRefMap: + def apply(tp: Type): Type = tp match + case tp: ParamRef => + if tp.binder == from then to(tp.paramNum) else tp + case tp: NamedType => + if tp.prefix `eq` NoPrefix then tp + else tp.derivedSelect(apply(tp.prefix)) + case _: ThisType => + tp + case _ => + mapOver(tp) + + /** Check that a @retains annotation only mentions references that can be tracked. + * This check is performed at Typer. + */ + def checkWellformed(ann: Tree)(using Context): Unit = + for elem <- retainedElems(ann) do + elem.tpe match + case ref: CaptureRef => + if !ref.canBeTracked then + report.error(em"$elem cannot be tracked since it is not a parameter or local value", elem.srcPos) + case tpe => + report.error(em"$elem: $tpe is not a legal element of a capture set", elem.srcPos) + + /** If `tp` is a capturing type, check that all references it mentions have non-empty + * capture sets. Also: warn about redundant capture annotations. + * This check is performed after capture sets are computed in phase cc. + */ + def checkWellformedPost(tp: Type, pos: SrcPos)(using Context): Unit = tp match + case CapturingType(parent, refs) => + for ref <- refs.elems do + if ref.captureSetOfInfo.elems.isEmpty then + report.error(em"$ref cannot be tracked since its capture set is empty", pos) + else if parent.captureSet.accountsFor(ref) then + report.warning(em"redundant capture: $parent already accounts for $ref", pos) + case _ => + + /** Warn if `ann`, which is a tree of a @retains annotation, defines some elements that + * are already accounted for by other elements of the same annotation. + * Note: We need to perform the check on the original annotation rather than its + * capture set since the conversion to a capture set already eliminates redundant elements. + */ + def warnIfRedundantCaptureSet(ann: Tree)(using Context): Unit = + // The lists `elems(i) :: prev.reverse :: elems(0),...,elems(i-1),elems(i+1),elems(n)` + // where `n == elems.length-1`, i <- 0..n`. + // I.e. + // choices(Nil, elems) = [[elems(i), elems(0), ..., elems(i-1), elems(i+1), .... elems(n)] | i <- 0..n] + def choices(prev: List[Tree], elems: List[Tree]): List[List[Tree]] = elems match + case Nil => Nil + case elem :: elems => + List(elem :: (prev reverse_::: elems)) ++ choices(elem :: prev, elems) + for case first :: others <- choices(Nil, retainedElems(ann)) do + val firstRef = first.toCaptureRef + val remaining = CaptureSet(others.map(_.toCaptureRef)*) + if remaining.accountsFor(firstRef) then + report.warning(em"redundant capture: $remaining already accounts for $firstRef", ann.srcPos) + +class CheckCaptures extends Recheck, SymTransformer: + thisPhase => + + import ast.tpd.* + import CheckCaptures.* + + def phaseName: String = "cc" + override def isEnabled(using Context) = true + + def newRechecker()(using Context) = CaptureChecker(ctx) + + override def run(using Context): Unit = + if Feature.ccEnabled then + checkOverrides.traverse(ctx.compilationUnit.tpdTree) + super.run + + override def transformSym(sym: SymDenotation)(using Context): SymDenotation = + if Synthetics.needsTransform(sym) then Synthetics.transformFromCC(sym) + else super.transformSym(sym) + + /** Check overrides again, taking capture sets into account. + * TODO: Can we avoid doing overrides checks twice? + * We need to do them here since only at this phase CaptureTypes are relevant + * But maybe we can then elide the check during the RefChecks phase under captureChecking? + */ + def checkOverrides = new TreeTraverser: + def traverse(t: Tree)(using Context) = + t match + case t: Template => checkAllOverrides(ctx.owner.asClass) + case _ => + traverseChildren(t) + + class CaptureChecker(ictx: Context) extends Rechecker(ictx): + import ast.tpd.* + + override def keepType(tree: Tree) = + super.keepType(tree) + || tree.isInstanceOf[Try] // type of `try` needs tp be checked for * escapes + + /** Instantiate capture set variables appearing contra-variantly to their + * upper approximation. + */ + private def interpolator(startingVariance: Int = 1)(using Context) = new TypeTraverser: + variance = startingVariance + override def traverse(t: Type) = + t match + case CapturingType(parent, refs: CaptureSet.Var) => + if variance < 0 then + capt.println(i"solving $t") + refs.solve() + traverse(parent) + case t @ RefinedType(_, nme.apply, rinfo) if defn.isFunctionOrPolyType(t) => + traverse(rinfo) + case tp: TypeVar => + case tp: TypeRef => + traverse(tp.prefix) + case _ => + traverseChildren(t) + + /** If `tpt` is an inferred type, interpolate capture set variables appearing contra- + * variantly in it. + */ + private def interpolateVarsIn(tpt: Tree)(using Context): Unit = + if tpt.isInstanceOf[InferredTypeTree] then + interpolator().traverse(tpt.knownType) + .showing(i"solved vars in ${tpt.knownType}", capt) + + /** Assert subcapturing `cs1 <: cs2` */ + def assertSub(cs1: CaptureSet, cs2: CaptureSet)(using Context) = + assert(cs1.subCaptures(cs2, frozen = false).isOK, i"$cs1 is not a subset of $cs2") + + /** Check subcapturing `{elem} <: cs`, report error on failure */ + def checkElem(elem: CaptureRef, cs: CaptureSet, pos: SrcPos)(using Context) = + val res = elem.singletonCaptureSet.subCaptures(cs, frozen = false) + if !res.isOK then + report.error(i"$elem cannot be referenced here; it is not included in the allowed capture set ${res.blocking}", pos) + + /** Check subcapturing `cs1 <: cs2`, report error on failure */ + def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos)(using Context) = + val res = cs1.subCaptures(cs2, frozen = false) + if !res.isOK then + def header = + if cs1.elems.size == 1 then i"reference ${cs1.elems.toList}%, % is not" + else i"references $cs1 are not all" + report.error(i"$header included in allowed capture set ${res.blocking}", pos) + + /** The current environment */ + private var curEnv: Env = Env(NoSymbol, nestedInOwner = false, CaptureSet.empty, isBoxed = false, null) + + private val myCapturedVars: util.EqHashMap[Symbol, CaptureSet] = EqHashMap() + + /** If `sym` is a class or method nested inside a term, a capture set variable representing + * the captured variables of the environment associated with `sym`. + */ + def capturedVars(sym: Symbol)(using Context) = + myCapturedVars.getOrElseUpdate(sym, + if sym.ownersIterator.exists(_.isTerm) then CaptureSet.Var() + else CaptureSet.empty) + + /** For all nested environments up to `limit` perform `op` */ + def forallOuterEnvsUpTo(limit: Symbol)(op: Env => Unit)(using Context): Unit = + def recur(env: Env): Unit = + if env.isOpen && env.owner != limit then + op(env) + if !env.isOutermost then + var nextEnv = env.outer + if env.owner.isConstructor then + if nextEnv.owner != limit && !nextEnv.isOutermost then + recur(nextEnv.outer) + else recur(nextEnv) + recur(curEnv) + + /** Include `sym` in the capture sets of all enclosing environments nested in the + * the environment in which `sym` is defined. + */ + def markFree(sym: Symbol, pos: SrcPos)(using Context): Unit = + if sym.exists then + val ref = sym.termRef + if ref.isTracked then + forallOuterEnvsUpTo(sym.enclosure) { env => + capt.println(i"Mark $sym with cs ${ref.captureSet} free in ${env.owner}") + checkElem(ref, env.captured, pos) + } + + /** Make sure (projected) `cs` is a subset of the capture sets of all enclosing + * environments. At each stage, only include references from `cs` that are outside + * the environment's owner + */ + def markFree(cs: CaptureSet, pos: SrcPos)(using Context): Unit = + if !cs.isAlwaysEmpty then + forallOuterEnvsUpTo(ctx.owner.topLevelClass) { env => + val included = cs.filter { + case ref: TermRef => + (env.nestedInOwner || env.owner != ref.symbol.owner) + && env.owner.isContainedIn(ref.symbol.owner) + case ref: ThisType => + (env.nestedInOwner || env.owner != ref.cls) + && env.owner.isContainedIn(ref.cls) + case _ => false + } + capt.println(i"Include call capture $included in ${env.owner}") + checkSubset(included, env.captured, pos) + } + + /** Include references captured by the called method in the current environment stack */ + def includeCallCaptures(sym: Symbol, pos: SrcPos)(using Context): Unit = + if sym.exists && curEnv.isOpen then markFree(capturedVars(sym), pos) + + override def recheckIdent(tree: Ident)(using Context): Type = + if tree.symbol.is(Method) then includeCallCaptures(tree.symbol, tree.srcPos) + else markFree(tree.symbol, tree.srcPos) + super.recheckIdent(tree) + + /** A specialized implementation of the selection rule. + * + * E |- f: Cf f { m: Cr R } + * ------------------------ + * E |- f.m: C R + * + * The implementation picks as `C` one of `{f}` or `Cr`, depending on the + * outcome of a `mightSubcapture` test. It picks `{f}` if this might subcapture Cr + * and Cr otherwise. + */ + override def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context) = { + def disambiguate(denot: Denotation): Denotation = denot match + case MultiDenotation(denot1, denot2) => + // This case can arise when we try to merge multiple types that have different + // capture sets on some part. For instance an asSeenFrom might produce + // a bi-mapped capture set arising from a substition. Applying the same substitution + // to the same type twice will nevertheless produce different capture setsw which can + // lead to a failure in disambiguation since neither alternative is better than the + // other in a frozen constraint. An example test case is disambiguate-select.scala. + // We address the problem by disambiguating while ignoring all capture sets as a fallback. + withMode(Mode.IgnoreCaptures) { + disambiguate(denot1).meet(disambiguate(denot2), qualType) + } + case _ => denot + + val selType = recheckSelection(tree, qualType, name, disambiguate) + val selCs = selType.widen.captureSet + if selCs.isAlwaysEmpty || selType.widen.isBoxedCapturing || qualType.isBoxedCapturing then + selType + else + val qualCs = qualType.captureSet + capt.println(i"intersect $qualType, ${selType.widen}, $qualCs, $selCs in $tree") + if qualCs.mightSubcapture(selCs) + && !selCs.mightSubcapture(qualCs) + && !pt.stripCapturing.isInstanceOf[SingletonType] + then + selType.widen.stripCapturing.capturing(qualCs) + .showing(i"alternate type for select $tree: $selType --> $result, $qualCs / $selCs", capt) + else + selType + }//.showing(i"recheck sel $tree, $qualType = $result") + + /** A specialized implementation of the apply rule. + * + * E |- f: Cf (Ra -> Cr Rr) + * E |- a: Ca Ra + * ------------------------ + * E |- f a: C Rr + * + * The implementation picks as `C` one of `{f, a}` or `Cr`, depending on the + * outcome of a `mightSubcapture` test. It picks `{f, a}` if this might subcapture Cr + * and Cr otherwise. + */ + override def recheckApply(tree: Apply, pt: Type)(using Context): Type = + val meth = tree.fun.symbol + includeCallCaptures(meth, tree.srcPos) + def mapArgUsing(f: Type => Type) = + val arg :: Nil = tree.args: @unchecked + val argType0 = f(recheckStart(arg, pt)) + val argType = super.recheckFinish(argType0, arg, pt) + super.recheckFinish(argType, tree, pt) + + if meth == defn.Caps_unsafeBox then + mapArgUsing(_.forceBoxStatus(true)) + else if meth == defn.Caps_unsafeUnbox then + mapArgUsing(_.forceBoxStatus(false)) + else if meth == defn.Caps_unsafeBoxFunArg then + mapArgUsing { + case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual, isErased) => + defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual, isErased) + } + else + super.recheckApply(tree, pt) match + case appType @ CapturingType(appType1, refs) => + tree.fun match + case Select(qual, _) + if !tree.fun.symbol.isConstructor + && !qual.tpe.isBoxedCapturing + && !tree.args.exists(_.tpe.isBoxedCapturing) + && qual.tpe.captureSet.mightSubcapture(refs) + && tree.args.forall(_.tpe.captureSet.mightSubcapture(refs)) + => + val callCaptures = tree.args.foldLeft(qual.tpe.captureSet)((cs, arg) => + cs ++ arg.tpe.captureSet) + appType.derivedCapturingType(appType1, callCaptures) + .showing(i"narrow $tree: $appType, refs = $refs, qual = ${qual.tpe.captureSet} --> $result", capt) + case _ => appType + case appType => appType + end recheckApply + + /** Handle an application of method `sym` with type `mt` to arguments of types `argTypes`. + * This means: + * - Instantiate result type with actual arguments + * - If call is to a constructor: + * - remember types of arguments corresponding to tracked + * parameters in refinements. + * - add capture set of instantiated class to capture set of result type. + */ + override def instantiate(mt: MethodType, argTypes: List[Type], sym: Symbol)(using Context): Type = + val ownType = + if mt.isResultDependent then SubstParamsMap(mt, argTypes)(mt.resType) + else mt.resType + + if sym.isConstructor then + val cls = sym.owner.asClass + + /** First half of result pair: + * Refine the type of a constructor call `new C(t_1, ..., t_n)` + * to C{val x_1: T_1, ..., x_m: T_m} where x_1, ..., x_m are the tracked + * parameters of C and T_1, ..., T_m are the types of the corresponding arguments. + * + * Second half: union of all capture sets of arguments to tracked parameters. + */ + def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = + mt.paramNames.lazyZip(argTypes).foldLeft((core, initCs)) { (acc, refine) => + val (core, allCaptures) = acc + val (getterName, argType) = refine + val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol + if getter.termRef.isTracked && !getter.is(Private) + then (RefinedType(core, getterName, argType), allCaptures ++ argType.captureSet) + else (core, allCaptures) + } + + def augmentConstructorType(core: Type, initCs: CaptureSet): Type = core match + case core: MethodType => + // more parameters to follow; augment result type + core.derivedLambdaType(resType = augmentConstructorType(core.resType, initCs)) + case CapturingType(parent, refs) => + // can happen for curried constructors if instantiate of a previous step + // added capture set to result. + augmentConstructorType(parent, initCs ++ refs) + case _ => + val (refined, cs) = addParamArgRefinements(core, initCs) + refined.capturing(cs) + + augmentConstructorType(ownType, CaptureSet.empty) match + case augmented: MethodType => + augmented + case augmented => + // add capture sets of class and constructor to final result of constructor call + augmented.capturing(capturedVars(cls) ++ capturedVars(sym)) + .showing(i"constr type $mt with $argTypes%, % in $cls = $result", capt) + else ownType + end instantiate + + override def recheckClosure(tree: Closure, pt: Type)(using Context): Type = + val cs = capturedVars(tree.meth.symbol) + capt.println(i"typing closure $tree with cvs $cs") + super.recheckClosure(tree, pt).capturing(cs) + .showing(i"rechecked $tree / $pt = $result", capt) + + /** Additionally to normal processing, update types of closures if the expected type + * is a function with only pure parameters. In that case, make the anonymous function + * also have the same parameters as the prototype. + * TODO: Develop a clearer rationale for this. + * TODO: Can we generalize this to arbitrary parameters? + * Currently some tests fail if we do this. (e.g. neg.../stackAlloc.scala, others) + */ + override def recheckBlock(block: Block, pt: Type)(using Context): Type = + block match + case closureDef(mdef) => + pt.dealias match + case defn.FunctionOf(ptformals, _, _, _) + if ptformals.nonEmpty && ptformals.forall(_.captureSet.isAlwaysEmpty) => + // Redo setup of the anonymous function so that formal parameters don't + // get capture sets. This is important to avoid false widenings to `*` + // when taking the base type of the actual closures's dependent function + // type so that it conforms to the expected non-dependent function type. + // See withLogFile.scala for a test case. + val meth = mdef.symbol + // First, undo the previous setup which installed a completer for `meth`. + atPhase(preRecheckPhase.prev)(meth.denot.copySymDenotation()) + .installAfter(preRecheckPhase) + + // Next, update all parameter symbols to match expected formals + meth.paramSymss.head.lazyZip(ptformals).foreach { (psym, pformal) => + psym.updateInfoBetween(preRecheckPhase, thisPhase, pformal.mapExprType) + } + // Next, update types of parameter ValDefs + mdef.paramss.head.lazyZip(ptformals).foreach { (param, pformal) => + val ValDef(_, tpt, _) = param: @unchecked + tpt.rememberTypeAlways(pformal) + } + // Next, install a new completer reflecting the new parameters for the anonymous method + val mt = meth.info.asInstanceOf[MethodType] + val completer = new LazyType: + def complete(denot: SymDenotation)(using Context) = + denot.info = mt.companion(ptformals, mdef.tpt.knownType) + .showing(i"simplify info of $meth to $result", capt) + recheckDef(mdef, meth) + meth.updateInfoBetween(preRecheckPhase, thisPhase, completer) + case _ => + case _ => + super.recheckBlock(block, pt) + + override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Unit = + try + if !sym.is(Module) then // Modules are checked by checking the module class + super.recheckValDef(tree, sym) + finally + if !sym.is(Param) then + // Parameters with inferred types belong to anonymous methods. We need to wait + // for more info from the context, so we cannot interpolate. Note that we cannot + // expect to have all necessary info available at the point where the anonymous + // function is compiled since we do not propagate expected types into blocks. + interpolateVarsIn(tree.tpt) + + override def recheckDefDef(tree: DefDef, sym: Symbol)(using Context): Unit = + if !Synthetics.isExcluded(sym) then + val saved = curEnv + val localSet = capturedVars(sym) + if !localSet.isAlwaysEmpty then curEnv = Env(sym, nestedInOwner = false, localSet, isBoxed = false, curEnv) + try super.recheckDefDef(tree, sym) + finally + interpolateVarsIn(tree.tpt) + curEnv = saved + + /** Class-specific capture set relations: + * 1. The capture set of a class includes the capture sets of its parents. + * 2. The capture set of the self type of a class includes the capture set of the class. + * 3. The capture set of the self type of a class includes the capture set of every class parameter, + * unless the parameter is marked @constructorOnly. + */ + override def recheckClassDef(tree: TypeDef, impl: Template, cls: ClassSymbol)(using Context): Type = + val saved = curEnv + val localSet = capturedVars(cls) + for parent <- impl.parents do // (1) + checkSubset(capturedVars(parent.tpe.classSymbol), localSet, parent.srcPos) + if !localSet.isAlwaysEmpty then curEnv = Env(cls, nestedInOwner = false, localSet, isBoxed = false, curEnv) + try + val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") + checkSubset(localSet, thisSet, tree.srcPos) // (2) + for param <- cls.paramGetters do + if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then + checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) + if cls.derivesFrom(defn.ThrowableClass) then + checkSubset(thisSet, CaptureSet.emptyOfException, tree.srcPos) + super.recheckClassDef(tree, impl, cls) + finally + curEnv = saved + + /** If type is of the form `T @requiresCapability(x)`, + * mark `x` as free in the current environment. This is used to require the + * correct `CanThrow` capability when encountering a `throw`. + */ + override def recheckTyped(tree: Typed)(using Context): Type = + tree.tpt.tpe match + case AnnotatedType(_, annot) if annot.symbol == defn.RequiresCapabilityAnnot => + annot.tree match + case Apply(_, cap :: Nil) => + markFree(cap.symbol, tree.srcPos) + case _ => + case _ => + super.recheckTyped(tree) + + /* Currently not needed, since capture checking takes place after ElimByName. + * Keep around in case we need to get back to it + def recheckByNameArg(tree: Tree, pt: Type)(using Context): Type = + val closureDef(mdef) = tree: @unchecked + val arg = mdef.rhs + val localSet = CaptureSet.Var() + curEnv = Env(mdef.symbol, localSet, isBoxed = false, curEnv) + val result = + try + inContext(ctx.withOwner(mdef.symbol)) { + recheckStart(arg, pt).capturing(localSet) + } + finally curEnv = curEnv.outer + recheckFinish(result, arg, pt) + */ + + /** If expected type `pt` is boxed and the tree is a function or a reference, + * don't propagate free variables. + * Otherwise, if the result type is boxed, simulate an unboxing by + * adding all references in the boxed capture set to the current environment. + */ + override def recheck(tree: Tree, pt: Type = WildcardType)(using Context): Type = + if tree.isTerm && pt.isBoxedCapturing then + val saved = curEnv + + tree match + case _: RefTree | closureDef(_) => + curEnv = Env(curEnv.owner, nestedInOwner = false, CaptureSet.Var(), isBoxed = true, curEnv) + case _ => + + try super.recheck(tree, pt) + finally curEnv = saved + else + val res = super.recheck(tree, pt) + if tree.isTerm then markFree(res.boxedCaptureSet, tree.srcPos) + res + + /** If `tree` is a reference or an application where the result type refers + * to an enclosing class or method parameter of the reference, check that the result type + * does not capture the universal capability. This is justified since the + * result type would have to be implicitly unboxed. + * TODO: Can we find a cleaner way to achieve this? Logically, this should be part + * of simulated boxing and unboxing. + */ + override def recheckFinish(tpe: Type, tree: Tree, pt: Type)(using Context): Type = + val typeToCheck = tree match + case _: Ident | _: Select | _: Apply | _: TypeApply if tree.symbol.unboxesResult => + tpe + case _: Try => + tpe + case _ => + NoType + def checkNotUniversal(tp: Type): Unit = tp.widenDealias match + case wtp @ CapturingType(parent, refs) => + refs.disallowRootCapability { () => + val kind = if tree.isInstanceOf[ValDef] then "mutable variable" else "expression" + report.error( + em"""The $kind's type $wtp is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime.""", + tree.srcPos) + } + checkNotUniversal(parent) + case _ => + checkNotUniversal(typeToCheck) + super.recheckFinish(tpe, tree, pt) + + /** Massage `actual` and `expected` types using the methods below before checking conformance */ + override def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = + val expected1 = addOuterRefs(expected, actual) + val actual1 = adaptBoxed(actual, expected1, tree.srcPos) + //println(i"check conforms $actual1 <<< $expected1") + super.checkConformsExpr(actual1, expected1, tree) + + /** For the expected type, implement the rule outlined in #14390: + * - when checking an expression `a: Ca Ta` against an expected type `Ce Te`, + * - where the capture set `Ce` contains Cls.this, + * - and where and all method definitions enclosing `a` inside class `Cls` + * have only pure parameters, + * - add to `Ce` all references to variables or this-references in `Ca` + * that are outside `Cls`. These are all accessed through `Cls.this`, + * so we can assume they are already accounted for by `Ce` and adding + * them explicitly to `Ce` changes nothing. + */ + private def addOuterRefs(expected: Type, actual: Type)(using Context): Type = + def isPure(info: Type): Boolean = info match + case info: PolyType => isPure(info.resType) + case info: MethodType => info.paramInfos.forall(_.captureSet.isAlwaysEmpty) && isPure(info.resType) + case _ => true + def isPureContext(owner: Symbol, limit: Symbol): Boolean = + if owner == limit then true + else if !owner.exists then false + else isPure(owner.info) && isPureContext(owner.owner, limit) + def augment(erefs: CaptureSet, arefs: CaptureSet): CaptureSet = + (erefs /: erefs.elems) { (erefs, eref) => + eref match + case eref: ThisType if isPureContext(ctx.owner, eref.cls) => + erefs ++ arefs.filter { + case aref: TermRef => eref.cls.isProperlyContainedIn(aref.symbol.owner) + case aref: ThisType => eref.cls.isProperlyContainedIn(aref.cls) + case _ => false + } + case _ => + erefs + } + expected match + case CapturingType(ecore, erefs) => + val erefs1 = augment(erefs, actual.captureSet) + if erefs1 ne erefs then + capt.println(i"augmented $expected from ${actual.captureSet} --> $erefs1") + expected.derivedCapturingType(ecore, erefs1) + case _ => + expected + + /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions */ + def adaptBoxed(actual: Type, expected: Type, pos: SrcPos)(using Context): Type = + + /** Adapt function type `actual`, which is `aargs -> ares` (possibly with dependencies) + * to `expected` type. + * It returns the adapted type along with the additionally captured variable + * during adaptation. + * @param reconstruct how to rebuild the adapted function type + */ + def adaptFun(actual: Type, aargs: List[Type], ares: Type, expected: Type, + covariant: Boolean, boxed: Boolean, + reconstruct: (List[Type], Type) => Type): (Type, CaptureSet) = + val saved = curEnv + curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) + + try + val (eargs, eres) = expected.dealias.stripCapturing match + case defn.FunctionOf(eargs, eres, _, _) => (eargs, eres) + case expected: MethodType => (expected.paramInfos, expected.resType) + case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(expected) => (rinfo.paramInfos, rinfo.resType) + case _ => (aargs.map(_ => WildcardType), WildcardType) + val aargs1 = aargs.zipWithConserve(eargs) { (aarg, earg) => adapt(aarg, earg, !covariant) } + val ares1 = adapt(ares, eres, covariant) + + val resTp = + if (ares1 eq ares) && (aargs1 eq aargs) then actual + else reconstruct(aargs1, ares1) + + (resTp, curEnv.captured) + finally + curEnv = saved + + /** Adapt type function type `actual` to the expected type. + * @see [[adaptFun]] + */ + def adaptTypeFun( + actual: Type, ares: Type, expected: Type, + covariant: Boolean, boxed: Boolean, + reconstruct: Type => Type): (Type, CaptureSet) = + val saved = curEnv + curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) + + try + val eres = expected.dealias.stripCapturing match + case RefinedType(_, _, rinfo: PolyType) => rinfo.resType + case expected: PolyType => expected.resType + case _ => WildcardType + + val ares1 = adapt(ares, eres, covariant) + + val resTp = + if ares1 eq ares then actual + else reconstruct(ares1) + + (resTp, curEnv.captured) + finally + curEnv = saved + end adaptTypeFun + + def adaptInfo(actual: Type, expected: Type, covariant: Boolean): String = + val arrow = if covariant then "~~>" else "<~~" + i"adapting $actual $arrow $expected" + + /** Destruct a capturing type `tp` to a tuple (cs, tp0, boxed), + * where `tp0` is not a capturing type. + * + * If `tp` is a nested capturing type, the return tuple always represents + * the innermost capturing type. The outer capture annotations can be + * reconstructed with the returned function. + */ + def destructCapturingType(tp: Type, reconstruct: Type -> Type = (x: Type) => x) // !cc! need monomorphic default argument + : (Type, CaptureSet, Boolean, Type -> Type) = + tp.dealias match + case tp @ CapturingType(parent, cs) => + if parent.dealias.isCapturingType then + destructCapturingType(parent, res => reconstruct(tp.derivedCapturingType(res, cs))) + else + (parent, cs, tp.isBoxed, reconstruct) + case actual => + (actual, CaptureSet(), false, reconstruct) + + def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { + if expected.isInstanceOf[WildcardType] then actual + else + val (parent, cs, actualIsBoxed, recon: (Type -> Type)) = destructCapturingType(actual) + + val needsAdaptation = actualIsBoxed != expected.isBoxedCapturing + val insertBox = needsAdaptation && covariant != actualIsBoxed + + val (parent1, cs1) = parent match { + case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => + val (parent1, leaked) = adaptFun(parent, args.init, args.last, expected, covariant, insertBox, + (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) + (parent1, leaked ++ cs) + case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(actual) => + // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) + val (parent1, leaked) = adaptFun(parent, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, + (aargs1, ares1) => + rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) + .toFunctionType(isJava = false, alwaysDependent = true)) + (parent1, leaked ++ cs) + case actual: MethodType => + val (parent1, leaked) = adaptFun(parent, actual.paramInfos, actual.resType, expected, covariant, insertBox, + (aargs1, ares1) => + actual.derivedLambdaType(paramInfos = aargs1, resType = ares1)) + (parent1, leaked ++ cs) + case actual @ RefinedType(p, nme, rinfo: PolyType) if defn.isFunctionOrPolyType(actual) => + val (parent1, leaked) = adaptTypeFun(parent, rinfo.resType, expected, covariant, insertBox, + ares1 => + val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) + val actual1 = actual.derivedRefinedType(p, nme, rinfo1) + actual1 + ) + (parent1, leaked ++ cs) + case _ => + (parent, cs) + } + + if needsAdaptation then + val criticalSet = // the set which is not allowed to have `*` + if covariant then cs1 // can't box with `*` + else expected.captureSet // can't unbox with `*` + if criticalSet.isUniversal && expected.isValueType then + // We can't box/unbox the universal capability. Leave `actual` as it is + // so we get an error in checkConforms. This tends to give better error + // messages than disallowing the root capability in `criticalSet`. + if ctx.settings.YccDebug.value then + println(i"cannot box/unbox $actual vs $expected") + actual + else + // Disallow future addition of `*` to `criticalSet`. + criticalSet.disallowRootCapability { () => + report.error( + em"""$actual cannot be box-converted to $expected + |since one of their capture sets contains the root capability `*`""", + pos) + } + if !insertBox then // unboxing + markFree(criticalSet, pos) + recon(CapturingType(parent1, cs1, !actualIsBoxed)) + else + recon(CapturingType(parent1, cs1, actualIsBoxed)) + } + + var actualw = actual.widenDealias + actual match + case ref: CaptureRef if ref.isTracked => + actualw match + case CapturingType(p, refs) => + actualw = actualw.derivedCapturingType(p, ref.singletonCaptureSet) + // given `a: C T`, improve `C T` to `{a} T` + case _ => + case _ => + val adapted = adapt(actualw, expected, covariant = true) + if adapted ne actualw then + capt.println(i"adapt boxed $actual vs $expected ===> $adapted") + adapted + else actual + end adaptBoxed + + override def checkUnit(unit: CompilationUnit)(using Context): Unit = + Setup(preRecheckPhase, thisPhase, recheckDef) + .traverse(ctx.compilationUnit.tpdTree) + //println(i"SETUP:\n${Recheck.addRecheckedTypes.transform(ctx.compilationUnit.tpdTree)}") + withCaptureSetsExplained { + super.checkUnit(unit) + checkSelfTypes(unit.tpdTree) + postCheck(unit.tpdTree) + if ctx.settings.YccDebug.value then + show(unit.tpdTree) // this does not print tree, but makes its variables visible for dependency printing + } + + /** Check that self types of subclasses conform to self types of super classes. + * (See comment below how this is achieved). The check assumes that classes + * without an explicit self type have the universal capture set `{*}` on the + * self type. If a class without explicit self type is not `effectivelyFinal` + * it is checked that the inferred self type is universal, in order to assure + * that joint and separate compilation give the same result. + */ + def checkSelfTypes(unit: tpd.Tree)(using Context): Unit = + val parentTrees = mutable.HashMap[Symbol, List[Tree]]() + unit.foreachSubTree { + case cdef @ TypeDef(_, impl: Template) => parentTrees(cdef.symbol) = impl.parents + case _ => + } + // Perform self type checking. The problem here is that `checkParents` compares a + // self type of a subclass with the result of an asSeenFrom of the self type of the + // superclass. That's no good. We need to constrain the original superclass self type + // capture set, not the set mapped by asSeenFrom. + // + // Instead, we proceed from parent classes to child classes. For every class + // we first check its parents, and then interpolate the self type to an + // upper approximation that satisfies all constraints on its capture set. + // That means all capture sets of parent self types are constants, so mapping + // them with asSeenFrom is OK. + while parentTrees.nonEmpty do + val roots = parentTrees.keysIterator.filter { + cls => !parentTrees(cls).exists(ptree => parentTrees.contains(ptree.tpe.classSymbol)) + } + assert(roots.nonEmpty) + for case root: ClassSymbol <- roots do + checkSelfAgainstParents(root, root.baseClasses) + val selfType = root.asClass.classInfo.selfType + interpolator(startingVariance = -1).traverse(selfType) + if !root.isEffectivelySealed then + def matchesExplicitRefsInBaseClass(refs: CaptureSet, cls: ClassSymbol): Boolean = + cls.baseClasses.tail.exists { psym => + val selfType = psym.asClass.givenSelfType + selfType.exists && selfType.captureSet.elems == refs.elems + } + selfType match + case CapturingType(_, refs: CaptureSet.Var) + if !refs.isUniversal && !matchesExplicitRefsInBaseClass(refs, root) => + // Forbid inferred self types unless they are already implied by an explicit + // self type in a parent. + report.error( + i"""$root needs an explicitly declared self type since its + |inferred self type $selfType + |is not visible in other compilation units that define subclasses.""", + root.srcPos) + case _ => + parentTrees -= root + capt.println(i"checked $root with $selfType") + end checkSelfTypes + + /** Perform the following kinds of checks + * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. + * - Check that externally visible `val`s or `def`s have empty capture sets. If not, + * suggest an explicit type. This is so that separate compilation (where external + * symbols have empty capture sets) gives the same results as joint compilation. + */ + def postCheck(unit: tpd.Tree)(using Context): Unit = + unit.foreachSubTree { + case _: InferredTypeTree => + case tree: TypeTree if !tree.span.isZeroExtent => + tree.knownType.foreachPart { tp => + checkWellformedPost(tp, tree.srcPos) + tp match + case AnnotatedType(_, annot) if annot.symbol == defn.RetainsAnnot => + warnIfRedundantCaptureSet(annot.tree) + case _ => + } + case t: ValOrDefDef + if t.tpt.isInstanceOf[InferredTypeTree] && !Synthetics.isExcluded(t.symbol) => + val sym = t.symbol + val isLocal = + sym.owner.ownersIterator.exists(_.isTerm) + || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) + def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly + sym.is(Private) // private symbols can always have inferred types + || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be + // too annoying. This is a hole since a defualt getter's result type + // might leak into a type variable. + || // non-local symbols cannot have inferred types since external capture types are not inferred + isLocal // local symbols still need explicit types if + && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference + def isNotPureThis(ref: CaptureRef) = ref match { + case ref: ThisType => !ref.cls.isPureClass + case _ => true + } + if !canUseInferred then + val inferred = t.tpt.knownType + def checkPure(tp: Type) = tp match + case CapturingType(_, refs) + if !refs.elems.filter(isNotPureThis).isEmpty => + val resultStr = if t.isInstanceOf[DefDef] then " result" else "" + report.error( + em"""Non-local $sym cannot have an inferred$resultStr type + |$inferred + |with non-empty capture set $refs. + |The type needs to be declared explicitly.""", t.srcPos) + case _ => + inferred.foreachPart(checkPure, StopAt.Static) + case t @ TypeApply(fun, args) => + fun.knownType.widen match + case tl: PolyType => + val normArgs = args.lazyZip(tl.paramInfos).map { (arg, bounds) => + arg.withType(arg.knownType.forceBoxStatus( + bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) + } + checkBounds(normArgs, tl) + case _ => + case _ => + } + if !ctx.reporter.errorsReported then + // We dont report errors hre if previous errors were reported, because other + // errors often result in bad applied types, but flagging these bad types gives + // often worse error messages than the original errors. + val checkApplied = new TreeTraverser: + def traverse(t: Tree)(using Context) = t match + case tree: InferredTypeTree => + case tree: New => + case tree: TypeTree => checkAppliedTypesIn(tree.withKnownType) + case _ => traverseChildren(t) + checkApplied.traverse(unit) + end CaptureChecker +end CheckCaptures diff --git a/tests/pos-with-compiler-cc/dotc/cc/Setup.scala b/tests/pos-with-compiler-cc/dotc/cc/Setup.scala new file mode 100644 index 000000000000..95f2e71437a8 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/Setup.scala @@ -0,0 +1,482 @@ +package dotty.tools +package dotc +package cc + +import core._ +import Phases.*, DenotTransformers.*, SymDenotations.* +import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* +import Types.*, StdNames.* +import config.Printers.capt +import ast.tpd +import transform.Recheck.* +import CaptureSet.IdentityCaptRefMap +import Synthetics.isExcluded + +/** A tree traverser that prepares a compilation unit to be capture checked. + * It does the following: + * - For every inferred type, drop any retains annotations, + * add capture sets to all its parts, add refinements to class types and function types. + * (c.f. mapInferred) + * - For explicit capturing types, expand throws aliases to the underlying (pure) function, + * and add some implied capture sets to curried functions (c.f. expandThrowsAlias, expandAbbreviations). + * - Add capture sets to self types of classes and objects, unless the self type was written explicitly. + * - Box the types of mutable variables and type arguments to methods (type arguments of types + * are boxed on access). + * - Link the external types of val and def symbols with the inferred types based on their parameter symbols. + */ +class Setup( + preRecheckPhase: DenotTransformer, + thisPhase: DenotTransformer, + recheckDef: (tpd.ValOrDefDef, Symbol) => Context ?=> Unit) +extends tpd.TreeTraverser: + import tpd.* + + /** Create dependent function with underlying function class `tycon` and given + * arguments `argTypes` and result `resType`. + */ + private def depFun(tycon: Type, argTypes: List[Type], resType: Type)(using Context): Type = + MethodType.companion( + isContextual = defn.isContextFunctionClass(tycon.classSymbol), + isErased = defn.isErasedFunctionClass(tycon.classSymbol) + )(argTypes, resType) + .toFunctionType(isJava = false, alwaysDependent = true) + + /** If `tp` is an unboxed capturing type or a function returning an unboxed capturing type, + * convert it to be boxed. + */ + private def box(tp: Type)(using Context): Type = + def recur(tp: Type): Type = tp.dealias match + case tp @ CapturingType(parent, refs) if !tp.isBoxed => + tp.boxed + case tp1 @ AppliedType(tycon, args) if defn.isNonRefinedFunction(tp1) => + val res = args.last + val boxedRes = recur(res) + if boxedRes eq res then tp + else tp1.derivedAppliedType(tycon, args.init :+ boxedRes) + case tp1 @ RefinedType(_, _, rinfo) if defn.isFunctionType(tp1) => + val boxedRinfo = recur(rinfo) + if boxedRinfo eq rinfo then tp + else boxedRinfo.toFunctionType(isJava = false, alwaysDependent = true) + case tp1: MethodOrPoly => + val res = tp1.resType + val boxedRes = recur(res) + if boxedRes eq res then tp + else tp1.derivedLambdaType(resType = boxedRes) + case _ => tp + tp match + case tp: MethodOrPoly => tp // don't box results of methods outside refinements + case _ => recur(tp) + + /** Perform the following transformation steps everywhere in a type: + * 1. Drop retains annotations + * 2. Turn plain function types into dependent function types, so that + * we can refer to their parameters in capture sets. Currently this is + * only done at the toplevel, i.e. for function types that are not + * themselves argument types of other function types. Without this restriction + * pos.../lists.scala and pos/...curried-shorthands.scala fail. + * Need to figure out why. + * 3. Refine other class types C by adding capture set variables to their parameter getters + * (see addCaptureRefinements) + * 4. Add capture set variables to all types that can be tracked + * + * Polytype bounds are only cleaned using step 1, but not otherwise transformed. + */ + private def mapInferred(using Context) = new TypeMap: + + /** Drop @retains annotations everywhere */ + object cleanup extends TypeMap: + def apply(t: Type) = t match + case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => + apply(parent) + case _ => + mapOver(t) + + /** Refine a possibly applied class type C where the class has tracked parameters + * x_1: T_1, ..., x_n: T_n to C { val x_1: CV_1 T_1, ..., val x_n: CV_n T_n } + * where CV_1, ..., CV_n are fresh capture sets. + */ + def addCaptureRefinements(tp: Type): Type = tp match + case _: TypeRef | _: AppliedType if tp.typeParams.isEmpty => + tp.typeSymbol match + case cls: ClassSymbol + if !defn.isFunctionClass(cls) && !cls.is(JavaDefined) => + // We assume that Java classes can refer to capturing Scala types only indirectly, + // using type parameters. Hence, no need to refine them. + cls.paramGetters.foldLeft(tp) { (core, getter) => + if getter.termRef.isTracked then + val getterType = tp.memberInfo(getter).strippedDealias + RefinedType(core, getter.name, CapturingType(getterType, CaptureSet.Var())) + .showing(i"add capture refinement $tp --> $result", capt) + else + core + } + case _ => tp + case _ => tp + + private def superTypeIsImpure(tp: Type): Boolean = { + tp.dealias match + case CapturingType(_, refs) => + !refs.isAlwaysEmpty + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then + sym == defn.AnyClass + // we assume Any is a shorthand of {*} Any, so if Any is an upper + // bound, the type is taken to be impure. + else superTypeIsImpure(tp.superType) + case tp: (RefinedOrRecType | MatchType) => + superTypeIsImpure(tp.underlying) + case tp: AndType => + superTypeIsImpure(tp.tp1) || needsVariable(tp.tp2) + case tp: OrType => + superTypeIsImpure(tp.tp1) && superTypeIsImpure(tp.tp2) + case _ => + false + }.showing(i"super type is impure $tp = $result", capt) + + /** Should a capture set variable be added on type `tp`? */ + def needsVariable(tp: Type): Boolean = { + tp.typeParams.isEmpty && tp.match + case tp: (TypeRef | AppliedType) => + val tp1 = tp.dealias + if tp1 ne tp then needsVariable(tp1) + else + val sym = tp1.typeSymbol + if sym.isClass then + !sym.isPureClass && sym != defn.AnyClass + else superTypeIsImpure(tp1) + case tp: (RefinedOrRecType | MatchType) => + needsVariable(tp.underlying) + case tp: AndType => + needsVariable(tp.tp1) && needsVariable(tp.tp2) + case tp: OrType => + needsVariable(tp.tp1) || needsVariable(tp.tp2) + case CapturingType(parent, refs) => + needsVariable(parent) + && refs.isConst // if refs is a variable, no need to add another + && !refs.isUniversal // if refs is {*}, an added variable would not change anything + case _ => + false + }.showing(i"can have inferred capture $tp = $result", capt) + + /** Add a capture set variable to `tp` if necessary, or maybe pull out + * an embedded capture set variable from a part of `tp`. + */ + def addVar(tp: Type) = tp match + case tp @ RefinedType(parent @ CapturingType(parent1, refs), rname, rinfo) => + CapturingType(tp.derivedRefinedType(parent1, rname, rinfo), refs, parent.isBoxed) + case tp: RecType => + tp.parent match + case parent @ CapturingType(parent1, refs) => + CapturingType(tp.derivedRecType(parent1), refs, parent.isBoxed) + case _ => + tp // can return `tp` here since unlike RefinedTypes, RecTypes are never created + // by `mapInferred`. Hence if the underlying type admits capture variables + // a variable was already added, and the first case above would apply. + case AndType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => + assert(refs1.asVar.elems.isEmpty) + assert(refs2.asVar.elems.isEmpty) + assert(tp1.isBoxed == tp2.isBoxed) + CapturingType(AndType(parent1, parent2), refs1 ** refs2, tp1.isBoxed) + case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => + assert(refs1.asVar.elems.isEmpty) + assert(refs2.asVar.elems.isEmpty) + assert(tp1.isBoxed == tp2.isBoxed) + CapturingType(OrType(parent1, parent2, tp.isSoft), refs1 ++ refs2, tp1.isBoxed) + case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2) => + CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) + case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => + CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) + case _ if needsVariable(tp) => + val cs = tp.dealias match + case CapturingType(_, refs) => CaptureSet.Var(refs.elems) + case _ => CaptureSet.Var() + CapturingType(tp, cs) + case _ => + tp + + private var isTopLevel = true + + private def mapNested(ts: List[Type]): List[Type] = + val saved = isTopLevel + isTopLevel = false + try ts.mapConserve(this) finally isTopLevel = saved + + def apply(t: Type) = + val tp = expandThrowsAlias(t) + val tp1 = tp match + case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => + // Drop explicit retains annotations + apply(parent) + case tp @ AppliedType(tycon, args) => + val tycon1 = this(tycon) + if defn.isNonRefinedFunction(tp) then + // Convert toplevel generic function types to dependent functions + val args0 = args.init + var res0 = args.last + val args1 = mapNested(args0) + val res1 = this(res0) + if isTopLevel then + depFun(tycon1, args1, res1) + .showing(i"add function refinement $tp --> $result", capt) + else if (tycon1 eq tycon) && (args1 eq args0) && (res1 eq res0) then + tp + else + tp.derivedAppliedType(tycon1, args1 :+ res1) + else + tp.derivedAppliedType(tycon1, args.mapConserve(arg => this(arg))) + case tp @ RefinedType(core, rname, rinfo) if defn.isFunctionType(tp) => + val rinfo1 = apply(rinfo) + if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) + else tp + case tp: MethodType => + tp.derivedLambdaType( + paramInfos = mapNested(tp.paramInfos), + resType = this(tp.resType)) + case tp: TypeLambda => + // Don't recurse into parameter bounds, just cleanup any stray retains annotations + tp.derivedLambdaType( + paramInfos = tp.paramInfos.mapConserve(cleanup(_).bounds), + resType = this(tp.resType)) + case _ => + mapOver(tp) + addVar(addCaptureRefinements(tp1)) + end apply + end mapInferred + + private def transformInferredType(tp: Type, boxed: Boolean)(using Context): Type = + val tp1 = mapInferred(tp) + if boxed then box(tp1) else tp1 + + /** Expand some aliases of function types to the underlying functions. + * Right now, these are only $throws aliases, but this could be generalized. + */ + private def expandThrowsAlias(tp: Type)(using Context) = tp match + case AppliedType(tycon, res :: exc :: Nil) if tycon.typeSymbol == defn.throwsAlias => + // hard-coded expansion since $throws aliases in stdlib are defined with `?=>` rather than `?->` + defn.FunctionOf(defn.CanThrowClass.typeRef.appliedTo(exc) :: Nil, res, isContextual = true, isErased = true) + case _ => tp + + private def expandThrowsAliases(using Context) = new TypeMap: + def apply(t: Type) = t match + case _: AppliedType => + val t1 = expandThrowsAlias(t) + if t1 ne t then apply(t1) else mapOver(t) + case _: LazyRef => + t + case t @ AnnotatedType(t1, ann) => + // Don't map capture sets, since that would implicitly normalize sets that + // are not well-formed. + t.derivedAnnotatedType(apply(t1), ann) + case _ => + mapOver(t) + + /** Fill in capture sets of curried function types from left to right, using + * a combination of the following two rules: + * + * 1. Expand `{c} (x: A) -> (y: B) -> C` + * to `{c} (x: A) -> {c} (y: B) -> C` + * 2. Expand `(x: A) -> (y: B) -> C` where `x` is tracked + * to `(x: A) -> {x} (y: B) -> C` + * + * TODO: Should we also propagate capture sets to the left? + */ + private def expandAbbreviations(using Context) = new TypeMap: + + /** Propagate `outerCs` as well as all tracked parameters as capture set to the result type + * of the dependent function type `tp`. + */ + def propagateDepFunctionResult(tp: Type, outerCs: CaptureSet): Type = tp match + case RefinedType(parent, nme.apply, rinfo: MethodType) => + val localCs = CaptureSet(rinfo.paramRefs.filter(_.isTracked)*) + val rinfo1 = rinfo.derivedLambdaType( + resType = propagateEnclosing(rinfo.resType, CaptureSet.empty, outerCs ++ localCs)) + if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) + else tp + + /** If `tp` is a function type: + * - add `outerCs` as its capture set, + * - propagate `currentCs`, `outerCs`, and all tracked parameters of `tp` to the right. + */ + def propagateEnclosing(tp: Type, currentCs: CaptureSet, outerCs: CaptureSet): Type = tp match + case tp @ AppliedType(tycon, args) if defn.isFunctionClass(tycon.typeSymbol) => + val tycon1 = this(tycon) + val args1 = args.init.mapConserve(this) + val tp1 = + if args1.exists(!_.captureSet.isAlwaysEmpty) then + val propagated = propagateDepFunctionResult( + depFun(tycon, args1, args.last), currentCs ++ outerCs) + propagated match + case RefinedType(_, _, mt: MethodType) => + if mt.isCaptureDependent then propagated + else + // No need to introduce dependent type, switch back to generic function type + tp.derivedAppliedType(tycon1, args1 :+ mt.resType) + else + val resType1 = propagateEnclosing( + args.last, CaptureSet.empty, currentCs ++ outerCs) + tp.derivedAppliedType(tycon1, args1 :+ resType1) + tp1.capturing(outerCs) + case tp @ RefinedType(parent, nme.apply, rinfo: MethodType) if defn.isFunctionType(tp) => + propagateDepFunctionResult(mapOver(tp), currentCs ++ outerCs) + .capturing(outerCs) + case _ => + mapOver(tp) + + def apply(tp: Type): Type = tp match + case CapturingType(parent, cs) => + tp.derivedCapturingType(propagateEnclosing(parent, cs, CaptureSet.empty), cs) + case _ => + propagateEnclosing(tp, CaptureSet.empty, CaptureSet.empty) + end expandAbbreviations + + private def transformExplicitType(tp: Type, boxed: Boolean)(using Context): Type = + val tp1 = expandThrowsAliases(if boxed then box(tp) else tp) + if tp1 ne tp then capt.println(i"expanded: $tp --> $tp1") + if ctx.settings.YccNoAbbrev.value then tp1 + else expandAbbreviations(tp1) + + /** Transform type of type tree, and remember the transformed type as the type the tree */ + private def transformTT(tree: TypeTree, boxed: Boolean, exact: Boolean)(using Context): Unit = + if !tree.hasRememberedType then + tree.rememberType( + if tree.isInstanceOf[InferredTypeTree] && !exact + then transformInferredType(tree.tpe, boxed) + else transformExplicitType(tree.tpe, boxed)) + + /** Substitute parameter symbols in `from` to paramRefs in corresponding + * method or poly types `to`. We use a single BiTypeMap to do everything. + * @param from a list of lists of type or term parameter symbols of a curried method + * @param to a list of method or poly types corresponding one-to-one to the parameter lists + */ + private class SubstParams(from: List[List[Symbol]], to: List[LambdaType])(using Context) + extends DeepTypeMap, BiTypeMap: + + def apply(t: Type): Type = t match + case t: NamedType => + val sym = t.symbol + def outer(froms: List[List[Symbol]], tos: List[LambdaType]): Type = + def inner(from: List[Symbol], to: List[ParamRef]): Type = + if from.isEmpty then outer(froms.tail, tos.tail) + else if sym eq from.head then to.head + else inner(from.tail, to.tail) + if tos.isEmpty then t + else inner(froms.head, tos.head.paramRefs) + outer(from, to) + case _ => + mapOver(t) + + def inverse(t: Type): Type = t match + case t: ParamRef => + def recur(from: List[LambdaType], to: List[List[Symbol]]): Type = + if from.isEmpty then t + else if t.binder eq from.head then to.head(t.paramNum).namedType + else recur(from.tail, to.tail) + recur(to, from) + case _ => + mapOver(t) + end SubstParams + + /** Update info of `sym` for CheckCaptures phase only */ + private def updateInfo(sym: Symbol, info: Type)(using Context) = + sym.updateInfoBetween(preRecheckPhase, thisPhase, info) + + def traverse(tree: Tree)(using Context): Unit = + tree match + case tree: DefDef => + if isExcluded(tree.symbol) then + return + tree.tpt match + case tpt: TypeTree if tree.symbol.allOverriddenSymbols.hasNext => + tree.paramss.foreach(traverse) + transformTT(tpt, boxed = false, exact = true) + traverse(tree.rhs) + //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") + case _ => + traverseChildren(tree) + case tree @ ValDef(_, tpt: TypeTree, _) => + transformTT(tpt, + boxed = tree.symbol.is(Mutable), // types of mutable variables are boxed + exact = tree.symbol.allOverriddenSymbols.hasNext // types of symbols that override a parent don't get a capture set + ) + traverse(tree.rhs) + case tree @ TypeApply(fn, args) => + traverse(fn) + for case arg: TypeTree <- args do + transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed + case _ => + traverseChildren(tree) + tree match + case tree: TypeTree => + transformTT(tree, boxed = false, exact = false) // other types are not boxed + case tree: ValOrDefDef => + val sym = tree.symbol + + // replace an existing symbol info with inferred types where capture sets of + // TypeParamRefs and TermParamRefs put in correspondence by BiTypeMaps with the + // capture sets of the types of the method's parameter symbols and result type. + def integrateRT( + info: Type, // symbol info to replace + psymss: List[List[Symbol]], // the local (type and term) parameter symbols corresponding to `info` + prevPsymss: List[List[Symbol]], // the local parameter symbols seen previously in reverse order + prevLambdas: List[LambdaType] // the outer method and polytypes generated previously in reverse order + ): Type = + info match + case mt: MethodOrPoly => + val psyms = psymss.head + mt.companion(mt.paramNames)( + mt1 => + if !psyms.exists(_.isUpdatedAfter(preRecheckPhase)) && !mt.isParamDependent && prevLambdas.isEmpty then + mt.paramInfos + else + val subst = SubstParams(psyms :: prevPsymss, mt1 :: prevLambdas) + psyms.map(psym => subst(psym.info).asInstanceOf[mt.PInfo]), + mt1 => + integrateRT(mt.resType, psymss.tail, psyms :: prevPsymss, mt1 :: prevLambdas) + ) + case info: ExprType => + info.derivedExprType(resType = + integrateRT(info.resType, psymss, prevPsymss, prevLambdas)) + case _ => + val restp = tree.tpt.knownType + if prevLambdas.isEmpty then restp + else SubstParams(prevPsymss, prevLambdas)(restp) + + if tree.tpt.hasRememberedType && !sym.isConstructor then + val newInfo = integrateRT(sym.info, sym.paramSymss, Nil, Nil) + .showing(i"update info $sym: ${sym.info} --> $result", capt) + if newInfo ne sym.info then + val completer = new LazyType: + def complete(denot: SymDenotation)(using Context) = + denot.info = newInfo + recheckDef(tree, sym) + updateInfo(sym, completer) + case tree: Bind => + val sym = tree.symbol + updateInfo(sym, transformInferredType(sym.info, boxed = false)) + case tree: TypeDef => + tree.symbol match + case cls: ClassSymbol => + val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo + if (selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic then + // add capture set to self type of nested classes if no self type is given explicitly + val localRefs = CaptureSet.Var() + val newInfo = ClassInfo(prefix, cls, ps, decls, + CapturingType(cinfo.selfType, localRefs) + .showing(i"inferred self type for $cls: $result", capt)) + updateInfo(cls, newInfo) + cls.thisType.asInstanceOf[ThisType].invalidateCaches() + if cls.is(ModuleClass) then + // if it's a module, the capture set of the module reference is the capture set of the self type + val modul = cls.sourceModule + updateInfo(modul, CapturingType(modul.info, localRefs)) + modul.termRef.invalidateCaches() + case _ => + val info = atPhase(preRecheckPhase)(tree.symbol.info) + val newInfo = transformExplicitType(info, boxed = false) + if newInfo ne info then + updateInfo(tree.symbol, newInfo) + capt.println(i"update info of ${tree.symbol} from $info to $newInfo") + case _ => + end traverse +end Setup diff --git a/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala b/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala new file mode 100644 index 000000000000..dacbd27e0f35 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala @@ -0,0 +1,189 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Symbols.*, SymDenotations.*, Contexts.*, Flags.*, Types.*, Decorators.* +import StdNames.nme +import Names.Name +import NameKinds.DefaultGetterName +import Phases.checkCapturesPhase +import config.Printers.capt + +/** Classification and transformation methods for synthetic + * case class methods that need to be treated specially. + * In particular, compute capturing types for some of these methods which + * have inferred (result-)types that need to be established under separate + * compilation. + */ +object Synthetics: + private def isSyntheticCopyMethod(sym: SymDenotation)(using Context) = + sym.name == nme.copy && sym.is(Synthetic) && sym.owner.isClass && sym.owner.is(Case) + + private def isSyntheticCompanionMethod(sym: SymDenotation, names: Name*)(using Context): Boolean = + names.contains(sym.name) && sym.is(Synthetic) && sym.owner.is(Module) && sym.owner.companionClass.is(Case) + + private def isSyntheticCopyDefaultGetterMethod(sym: SymDenotation)(using Context) = sym.name match + case DefaultGetterName(nme.copy, _) => sym.is(Synthetic) && sym.owner.isClass && sym.owner.is(Case) + case _ => false + + /** Is `sym` a synthetic apply, copy, or copy default getter method? + * The types of these symbols are transformed in a special way without + * looking at the definitions's RHS + */ + def needsTransform(symd: SymDenotation)(using Context): Boolean = + isSyntheticCopyMethod(symd) + || isSyntheticCompanionMethod(symd, nme.apply, nme.unapply) + || isSyntheticCopyDefaultGetterMethod(symd) + || (symd.symbol eq defn.Object_eq) + || (symd.symbol eq defn.Object_ne) + + /** Method is excluded from regular capture checking. + * Excluded are synthetic class members + * - that override a synthesized case class symbol, or + * - the fromProduct method, or + * - members transformed specially as indicated by `needsTransform`. + */ + def isExcluded(sym: Symbol)(using Context): Boolean = + sym.is(Synthetic) + && sym.owner.isClass + && ( defn.caseClassSynthesized.exists( + ccsym => sym.overriddenSymbol(ccsym.owner.asClass) == ccsym) + || isSyntheticCompanionMethod(sym, nme.fromProduct) + || needsTransform(sym)) + + /** Add capture dependencies to the type of the `apply` or `copy` method of a case class. + * An apply method in a case class like this: + * case class CC(a: {d} A, b: B, {*} c: C) + * would get type + * def apply(a': {d} A, b: B, {*} c': C): {a', c'} CC { val a = {a'} A, val c = {c'} C } + * where `'` is used to indicate the difference between parameter symbol and refinement name. + * Analogous for the copy method. + */ + private def addCaptureDeps(info: Type)(using Context): Type = info match + case info: MethodType => + val trackedParams = info.paramRefs.filter(atPhase(checkCapturesPhase)(_.isTracked)) + def augmentResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = augmentResult(tp.resType)) + case _ => + val refined = trackedParams.foldLeft(tp) { (parent, pref) => + RefinedType(parent, pref.paramName, + CapturingType( + atPhase(ctx.phase.next)(pref.underlying.stripCapturing), + CaptureSet(pref))) + } + CapturingType(refined, CaptureSet(trackedParams*)) + if trackedParams.isEmpty then info + else augmentResult(info).showing(i"augment apply/copy type $info to $result", capt) + case info: PolyType => + info.derivedLambdaType(resType = addCaptureDeps(info.resType)) + case _ => + info + + /** Drop capture dependencies from the type of `apply` or `copy` method of a case class */ + private def dropCaptureDeps(tp: Type)(using Context): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = dropCaptureDeps(tp.resType)) + case CapturingType(parent, _) => + dropCaptureDeps(parent) + case RefinedType(parent, _, _) => + dropCaptureDeps(parent) + case _ => + tp + + /** Add capture information to the type of the default getter of a case class copy method */ + private def addDefaultGetterCapture(info: Type, owner: Symbol, idx: Int)(using Context): Type = info match + case info: MethodOrPoly => + info.derivedLambdaType(resType = addDefaultGetterCapture(info.resType, owner, idx)) + case info: ExprType => + info.derivedExprType(addDefaultGetterCapture(info.resType, owner, idx)) + case EventuallyCapturingType(parent, _) => + addDefaultGetterCapture(parent, owner, idx) + case info @ AnnotatedType(parent, annot) => + info.derivedAnnotatedType(addDefaultGetterCapture(parent, owner, idx), annot) + case _ if idx < owner.asClass.paramGetters.length => + val param = owner.asClass.paramGetters(idx) + val pinfo = param.info + atPhase(ctx.phase.next) { + if pinfo.captureSet.isAlwaysEmpty then info + else CapturingType(pinfo.stripCapturing, CaptureSet(param.termRef)) + } + case _ => + info + + /** Drop capture information from the type of the default getter of a case class copy method */ + private def dropDefaultGetterCapture(info: Type)(using Context): Type = info match + case info: MethodOrPoly => + info.derivedLambdaType(resType = dropDefaultGetterCapture(info.resType)) + case CapturingType(parent, _) => + parent + case info @ AnnotatedType(parent, annot) => + info.derivedAnnotatedType(dropDefaultGetterCapture(parent), annot) + case _ => + info + + /** Augment an unapply of type `(x: C): D` to `(x: {*} C): {x} D` */ + private def addUnapplyCaptures(info: Type)(using Context): Type = info match + case info: MethodType => + val paramInfo :: Nil = info.paramInfos: @unchecked + val newParamInfo = + CapturingType(paramInfo, CaptureSet.universal) + val trackedParam = info.paramRefs.head + def newResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = newResult(tp.resType)) + case _ => + CapturingType(tp, CaptureSet(trackedParam)) + info.derivedLambdaType(paramInfos = newParamInfo :: Nil, resType = newResult(info.resType)) + .showing(i"augment unapply type $info to $result", capt) + case info: PolyType => + info.derivedLambdaType(resType = addUnapplyCaptures(info.resType)) + + /** Drop added capture information from the type of an `unapply` */ + private def dropUnapplyCaptures(info: Type)(using Context): Type = info match + case info: MethodType => + info.paramInfos match + case CapturingType(oldParamInfo, _) :: Nil => + def oldResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = oldResult(tp.resType)) + case CapturingType(tp, _) => + tp + info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) + case _ => + info + case info: PolyType => + info.derivedLambdaType(resType = dropUnapplyCaptures(info.resType)) + + /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method + * of a case class, transform it to account for capture information. + * The method is run in phase CheckCaptures.Pre + * @pre needsTransform(sym) + */ + def transformToCC(sym: SymDenotation)(using Context): SymDenotation = sym.name match + case DefaultGetterName(nme.copy, n) => + sym.copySymDenotation(info = addDefaultGetterCapture(sym.info, sym.owner, n)) + case nme.unapply => + sym.copySymDenotation(info = addUnapplyCaptures(sym.info)) + case nme.apply | nme.copy => + sym.copySymDenotation(info = addCaptureDeps(sym.info)) + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = + MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType)) + + /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method + * of a case class, transform it back to what it was before the CC phase. + * @pre needsTransform(sym) + */ + def transformFromCC(sym: SymDenotation)(using Context): SymDenotation = sym.name match + case DefaultGetterName(nme.copy, n) => + sym.copySymDenotation(info = dropDefaultGetterCapture(sym.info)) + case nme.unapply => + sym.copySymDenotation(info = dropUnapplyCaptures(sym.info)) + case nme.apply | nme.copy => + sym.copySymDenotation(info = dropCaptureDeps(sym.info)) + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = defn.methOfAnyRef(defn.BooleanType)) + +end Synthetics \ No newline at end of file diff --git a/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala new file mode 100644 index 000000000000..51b261583feb --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools +package dotc.classpath + +import scala.language.unsafeNulls + +import java.net.URL +import scala.collection.mutable.ArrayBuffer +import scala.collection.immutable.ArraySeq +import dotc.util + +import dotty.tools.io.{ AbstractFile, ClassPath, ClassRepresentation, EfficientClassPath } + +/** + * A classpath unifying multiple class- and sourcepath entries. + * The Classpath can obtain entries for classes and sources independently + * so it tries to do operations quite optimally - iterating only these collections + * which are needed in the given moment and only as far as it's necessary. + * + * @param aggregates classpath instances containing entries which this class processes + */ +case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { + override def findClassFile(className: String): Option[AbstractFile] = { + val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) + aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClassFile(className)).collectFirst { + case Some(x) => x + } + } + private val packageIndex: collection.mutable.Map[String, Seq[ClassPath]] = collection.mutable.Map() + private def aggregatesForPackage(pkg: PackageName): Seq[ClassPath] = packageIndex.synchronized { + packageIndex.getOrElseUpdate(pkg.dottedString, aggregates.filter(_.hasPackage(pkg))) + } + + override def findClass(className: String): Option[ClassRepresentation] = { + val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) + + def findEntry(isSource: Boolean): Option[ClassRepresentation] = + aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClass(className)).collectFirst { + case Some(s: SourceFileEntry) if isSource => s + case Some(s: ClassFileEntry) if !isSource => s + } + + val classEntry = findEntry(isSource = false) + val sourceEntry = findEntry(isSource = true) + + (classEntry, sourceEntry) match { + case (Some(c: ClassFileEntry), Some(s: SourceFileEntry)) => Some(ClassAndSourceFilesEntry(c.file, s.file)) + case (c @ Some(_), _) => c + case (_, s) => s + } + } + + override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs) + + override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct + + override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*) + + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct + aggregatedPackages + } + + override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = + getDistinctEntries(_.classes(inPackage)) + + override private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = + getDistinctEntries(_.sources(inPackage)) + + override private[dotty] def hasPackage(pkg: PackageName): Boolean = aggregates.exists(_.hasPackage(pkg)) + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = { + val packages: java.util.HashSet[PackageEntry] = new java.util.HashSet[PackageEntry]() + val classesAndSourcesBuffer = collection.mutable.ArrayBuffer[ClassRepresentation]() + val onPackage: PackageEntry => Unit = packages.add(_) + val onClassesAndSources: ClassRepresentation => Unit = classesAndSourcesBuffer += _ + + aggregates.foreach { cp => + try { + cp match { + case ecp: EfficientClassPath => + ecp.list(inPackage, onPackage, onClassesAndSources) + case _ => + val entries = cp.list(inPackage) + entries._1.foreach(entry => packages.add(entry)) + classesAndSourcesBuffer ++= entries._2 + } + } catch { + case ex: java.io.IOException => + val e = FatalError(ex.getMessage) + e.initCause(ex) + throw e + } + } + + val distinctPackages: Seq[PackageEntry] = { + val arr = packages.toArray(new Array[PackageEntry](packages.size())) + ArraySeq.unsafeWrapArray(arr) + } + val distinctClassesAndSources = mergeClassesAndSources(classesAndSourcesBuffer) + ClassPathEntries(distinctPackages, distinctClassesAndSources) + } + + /** + * Returns only one entry for each name. If there's both a source and a class entry, it + * creates an entry containing both of them. If there would be more than one class or source + * entries for the same class it always would use the first entry of each type found on a classpath. + */ + private def mergeClassesAndSources(entries: scala.collection.Seq[ClassRepresentation]): Seq[ClassRepresentation] = { + // based on the implementation from MergedClassPath + var count = 0 + val indices = util.HashMap[String, Int]() + val mergedEntries = new ArrayBuffer[ClassRepresentation](entries.size) + for { + entry <- entries + } { + val name = entry.name + if (indices.contains(name)) { + val index = indices(name) + val existing = mergedEntries(index) + + if (existing.binary.isEmpty && entry.binary.isDefined) + mergedEntries(index) = ClassAndSourceFilesEntry(entry.binary.get, existing.source.get) + if (existing.source.isEmpty && entry.source.isDefined) + mergedEntries(index) = ClassAndSourceFilesEntry(existing.binary.get, entry.source.get) + } + else { + indices(name) = count + mergedEntries += entry + count += 1 + } + } + if (mergedEntries.isEmpty) Nil else mergedEntries.toIndexedSeq + } + + private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = { + val seenNames = util.HashSet[String]() + val entriesBuffer = new ArrayBuffer[EntryType](1024) + for { + cp <- aggregates + entry <- getEntries(cp) if !seenNames.contains(entry.name) + } + { + entriesBuffer += entry + seenNames += entry.name + } + entriesBuffer.toIndexedSeq + } +} + +object AggregateClassPath { + def createAggregate(parts: ClassPath*): ClassPath = { + val elems = new ArrayBuffer[ClassPath]() + parts foreach { + case AggregateClassPath(ps) => elems ++= ps + case p => elems += p + } + if (elems.size == 1) elems.head + else AggregateClassPath(elems.toIndexedSeq) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala new file mode 100644 index 000000000000..176b6acf9c6c --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import dotty.tools.io.AbstractFile +import dotty.tools.io.ClassRepresentation + +case class ClassPathEntries(packages: scala.collection.Seq[PackageEntry], classesAndSources: scala.collection.Seq[ClassRepresentation]) { + def toTuple: (scala.collection.Seq[PackageEntry], scala.collection.Seq[ClassRepresentation]) = (packages, classesAndSources) +} + +object ClassPathEntries { + val empty = ClassPathEntries(Seq.empty, Seq.empty) +} + +trait ClassFileEntry extends ClassRepresentation { + def file: AbstractFile +} + +trait SourceFileEntry extends ClassRepresentation { + def file: AbstractFile +} + +case class PackageName(dottedString: String) { + val dirPathTrailingSlashJar: String = FileUtils.dirPathInJar(dottedString) + "/" + + val dirPathTrailingSlash: String = + if (java.io.File.separatorChar == '/') + dirPathTrailingSlashJar + else + FileUtils.dirPath(dottedString) + java.io.File.separator + + def isRoot: Boolean = dottedString.isEmpty + + def entryName(entry: String): String = { + if (isRoot) entry else { + val builder = new java.lang.StringBuilder(dottedString.length + 1 + entry.length) + builder.append(dottedString) + builder.append('.') + builder.append(entry) + builder.toString + } + } +} + +trait PackageEntry { + def name: String +} + +private[dotty] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry { + final def fileName: String = file.name + def name: String = FileUtils.stripClassExtension(file.name) // class name + + def binary: Option[AbstractFile] = Some(file) + def source: Option[AbstractFile] = None +} + +private[dotty] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry { + final def fileName: String = file.name + def name: String = FileUtils.stripSourceExtension(file.name) + + def binary: Option[AbstractFile] = None + def source: Option[AbstractFile] = Some(file) +} + +private[dotty] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepresentation { + final def fileName: String = classFile.name + def name: String = FileUtils.stripClassExtension(classFile.name) + + def binary: Option[AbstractFile] = Some(classFile) + def source: Option[AbstractFile] = Some(srcFile) +} + +private[dotty] case class PackageEntryImpl(name: String) extends PackageEntry + +private[dotty] trait NoSourcePaths { + def asSourcePathString: String = "" + private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = Seq.empty +} + +private[dotty] trait NoClassPaths { + def findClassFile(className: String): Option[AbstractFile] = None + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = Seq.empty +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala b/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala new file mode 100644 index 000000000000..ac8b69381938 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import dotty.tools.io.{AbstractFile, VirtualDirectory} +import FileUtils._ +import dotty.tools.io.ClassPath +import dotty.tools.dotc.core.Contexts._ + +/** + * Provides factory methods for classpath. When creating classpath instances for a given path, + * it uses proper type of classpath depending on a types of particular files containing sources or classes. + */ +class ClassPathFactory { + /** + * Create a new classpath based on the abstract file. + */ + def newClassPath(file: AbstractFile)(using Context): ClassPath = ClassPathFactory.newClassPath(file) + + /** + * Creators for sub classpaths which preserve this context. + */ + def sourcesInPath(path: String)(using Context): List[ClassPath] = + for { + file <- expandPath(path, expandStar = false) + dir <- Option(AbstractFile getDirectory file) + } + yield createSourcePath(dir) + + + def expandPath(path: String, expandStar: Boolean = true): List[String] = dotty.tools.io.ClassPath.expandPath(path, expandStar) + + def expandDir(extdir: String): List[String] = dotty.tools.io.ClassPath.expandDir(extdir) + + def contentsOfDirsInPath(path: String)(using Context): List[ClassPath] = + for { + dir <- expandPath(path, expandStar = false) + name <- expandDir(dir) + entry <- Option(AbstractFile.getDirectory(name)) + } + yield newClassPath(entry) + + def classesInExpandedPath(path: String)(using Context): IndexedSeq[ClassPath] = + classesInPathImpl(path, expand = true).toIndexedSeq + + def classesInPath(path: String)(using Context): List[ClassPath] = classesInPathImpl(path, expand = false) + + def classesInManifest(useManifestClassPath: Boolean)(using Context): List[ClassPath] = + if (useManifestClassPath) dotty.tools.io.ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url)) + else Nil + + // Internal + protected def classesInPathImpl(path: String, expand: Boolean)(using Context): List[ClassPath] = + for { + file <- expandPath(path, expand) + dir <- { + def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None + Option(AbstractFile.getDirectory(file)).orElse(asImage) + } + } + yield newClassPath(dir) + + private def createSourcePath(file: AbstractFile)(using Context): ClassPath = + if (file.isJarOrZip) + ZipAndJarSourcePathFactory.create(file) + else if (file.isDirectory) + new DirectorySourcePath(file.file) + else + sys.error(s"Unsupported sourcepath element: $file") +} + +object ClassPathFactory { + def newClassPath(file: AbstractFile)(using Context): ClassPath = file match { + case vd: VirtualDirectory => VirtualDirectoryClassPath(vd) + case _ => + if (file.isJarOrZip) + ZipAndJarClassPathFactory.create(file) + else if (file.isDirectory) + new DirectoryClassPath(file.file) + else + sys.error(s"Unsupported classpath element: $file") + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala new file mode 100644 index 000000000000..a5678970411b --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala @@ -0,0 +1,313 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import scala.language.unsafeNulls + +import java.io.{File => JFile} +import java.net.URL +import java.nio.file.{FileSystems, Files} + +import dotty.tools.dotc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} +import dotty.tools.io.{AbstractFile, PlainFile, ClassPath, ClassRepresentation, EfficientClassPath, JDK9Reflectors} +import FileUtils._ +import PlainFile.toPlainFile + +import scala.jdk.CollectionConverters._ +import scala.collection.immutable.ArraySeq +import scala.util.control.NonFatal +import language.experimental.pureFunctions + +/** + * A trait allowing to look for classpath entries in directories. It provides common logic for + * classes handling class and source files. + * It makes use of the fact that in the case of nested directories it's easy to find a file + * when we have a name of a package. + * It abstracts over the file representation to work with both JFile and AbstractFile. + */ +trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath { + type F + + val dir: F + + protected def emptyFiles: Array[F] // avoids reifying ClassTag[F] + protected def getSubDir(dirName: String): Option[F] + protected def listChildren(dir: F, filter: Option[F -> Boolean] = (None: Option[F -> Boolean])): Array[F] // !cc! need explicit typing of default argument + protected def getName(f: F): String + protected def toAbstractFile(f: F): AbstractFile + protected def isPackage(f: F): Boolean + + protected def createFileEntry(file: AbstractFile): FileEntryType + protected def isMatchingFile(f: F): Boolean + + private def getDirectory(forPackage: PackageName): Option[F] = + if (forPackage.isRoot) + Some(dir) + else + getSubDir(forPackage.dirPathTrailingSlash) + + override private[dotty] def hasPackage(pkg: PackageName): Boolean = getDirectory(pkg).isDefined + + private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + val dirForPackage = getDirectory(inPackage) + val nestedDirs: Array[F] = dirForPackage match { + case None => emptyFiles + case Some(directory) => listChildren(directory, Some(isPackage)) + } + ArraySeq.unsafeWrapArray(nestedDirs).map(f => PackageEntryImpl(inPackage.entryName(getName(f)))) + } + + protected def files(inPackage: PackageName): Seq[FileEntryType] = { + val dirForPackage = getDirectory(inPackage) + val files: Array[F] = dirForPackage match { + case None => emptyFiles + case Some(directory) => listChildren(directory, Some(isMatchingFile)) + } + files.iterator.map(f => createFileEntry(toAbstractFile(f))).toSeq + } + + override def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = { + val dirForPackage = getDirectory(inPackage) + dirForPackage match { + case None => + case Some(directory) => + for (file <- listChildren(directory)) { + if (isPackage(file)) + onPackageEntry(PackageEntryImpl(inPackage.entryName(getName(file)))) + else if (isMatchingFile(file)) + onClassesAndSources(createFileEntry(toAbstractFile(file))) + } + } + } +} + +trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends DirectoryLookup[FileEntryType] { + type F = JFile + + protected def emptyFiles: Array[JFile] = Array.empty + protected def getSubDir(packageDirName: String): Option[JFile] = { + val packageDir = new JFile(dir, packageDirName) + if (packageDir.exists && packageDir.isDirectory) Some(packageDir) + else None + } + protected def listChildren(dir: JFile, filter: Option[JFile -> Boolean]): Array[JFile] = { + val listing = filter match { + case Some(f) => dir.listFiles(mkFileFilter(f)) + case None => dir.listFiles() + } + + if (listing != null) { + // Sort by file name for stable order of directory .class entries in package scope. + // This gives stable results ordering of base type sequences for unrelated classes + // with the same base type depth. + // + // Notably, this will stably infer`Product with Serializable` + // as the type of `case class C(); case class D(); List(C(), D()).head`, rather than the opposite order. + // On Mac, the HFS performs this sorting transparently, but on Linux the order is unspecified. + // + // Note this behaviour can be enabled in javac with `javac -XDsortfiles`, but that's only + // intended to improve determinism of the compiler for compiler hackers. + java.util.Arrays.sort(listing, + new java.util.Comparator[JFile] { + def compare(o1: JFile, o2: JFile) = o1.getName.compareTo(o2.getName) + }) + listing + } + else Array() + } + protected def getName(f: JFile): String = f.getName + protected def toAbstractFile(f: JFile): AbstractFile = f.toPath.toPlainFile + protected def isPackage(f: JFile): Boolean = f.isPackage + + assert(dir != null, "Directory file in DirectoryFileLookup cannot be null") + + def asURLs: Seq[URL] = Seq(dir.toURI.toURL) + def asClassPathStrings: Seq[String] = Seq(dir.getPath) +} + +object JrtClassPath { + import java.nio.file._, java.net.URI + def apply(release: Option[String]): Option[ClassPath] = { + import scala.util.Properties._ + if (!isJavaAtLeast("9")) None + else { + // Longer term we'd like an official API for this in the JDK + // Discussion: http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/thread.html#11738 + + val currentMajorVersion: Int = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() + release match { + case Some(v) if v.toInt < currentMajorVersion => + try { + val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") + if (Files.notExists(ctSym)) None + else Some(new CtSymClassPath(ctSym, v.toInt)) + } catch { + case NonFatal(_) => None + } + case _ => + try { + val fs = FileSystems.getFileSystem(URI.create("jrt:/")) + Some(new JrtClassPath(fs)) + } catch { + case _: ProviderNotFoundException | _: FileSystemNotFoundException => None + } + } + } + } +} + +/** + * Implementation `ClassPath` based on the JDK 9 encapsulated runtime modules (JEP-220) + * + * https://bugs.openjdk.java.net/browse/JDK-8066492 is the most up to date reference + * for the structure of the jrt:// filesystem. + * + * The implementation assumes that no classes exist in the empty package. + */ +final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths { + import java.nio.file.Path, java.nio.file._ + type F = Path + private val dir: Path = fs.getPath("/packages") + + // e.g. "java.lang" -> Seq("/modules/java.base") + private val packageToModuleBases: Map[String, Seq[Path]] = { + val ps = Files.newDirectoryStream(dir).iterator().asScala + def lookup(pack: Path): Seq[Path] = + Files.list(pack).iterator().asScala.map(l => if (Files.isSymbolicLink(l)) Files.readSymbolicLink(l) else l).toList + ps.map(p => (p.toString.stripPrefix("/packages/"), lookup(p))).toMap + } + + /** Empty string represents root package */ + override private[dotty] def hasPackage(pkg: PackageName): Boolean = packageToModuleBases.contains(pkg.dottedString) + + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = + packageToModuleBases.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector + + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = + if (inPackage.isRoot) Nil + else + packageToModuleBases.getOrElse(inPackage.dottedString, Nil).flatMap(x => + Files.list(x.resolve(inPackage.dirPathTrailingSlash)).iterator().asScala.filter(_.getFileName.toString.endsWith(".class"))).map(x => + ClassFileEntryImpl(x.toPlainFile)).toVector + + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = + if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) + else ClassPathEntries(packages(inPackage), classes(inPackage)) + + def asURLs: Seq[URL] = Seq(new URL("jrt:/")) + // We don't yet have a scheme to represent the JDK modules in our `-classpath`. + // java models them as entries in the new "module path", we'll probably need to follow this. + def asClassPathStrings: Seq[String] = Nil + + def findClassFile(className: String): Option[AbstractFile] = + if (!className.contains(".")) None + else { + val (inPackage, _) = separatePkgAndClassNames(className) + packageToModuleBases.getOrElse(inPackage, Nil).iterator.flatMap{ x => + val file = x.resolve(FileUtils.dirPath(className) + ".class") + if (Files.exists(file)) file.toPlainFile :: Nil else Nil + }.take(1).toList.headOption + } +} + +/** + * Implementation `ClassPath` based on the \$JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247 + */ +final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { + import java.nio.file.Path, java.nio.file._ + + private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null: ClassLoader) + private val root: Path = fileSystem.getRootDirectories.iterator.next + private val roots = Files.newDirectoryStream(root).iterator.asScala.toList + + // http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/011737.html + private def codeFor(major: Int): String = if (major < 10) major.toString else ('A' + (major - 10)).toChar.toString + + private val releaseCode: String = codeFor(release) + private def fileNameMatchesRelease(fileName: String) = !fileName.contains("-") && fileName.contains(releaseCode) // exclude `9-modules` + private val rootsForRelease: List[Path] = roots.filter(root => fileNameMatchesRelease(root.getFileName.toString)) + + // e.g. "java.lang" -> Seq(/876/java/lang, /87/java/lang, /8/java/lang)) + private val packageIndex: scala.collection.Map[String, scala.collection.Seq[Path]] = { + val index = collection.mutable.AnyRefMap[String, collection.mutable.ListBuffer[Path]]() + val isJava12OrHigher = scala.util.Properties.isJavaAtLeast("12") + rootsForRelease.foreach(root => Files.walk(root).iterator().asScala.filter(Files.isDirectory(_)).foreach { p => + val moduleNamePathElementCount = if (isJava12OrHigher) 1 else 0 + if (p.getNameCount > root.getNameCount + moduleNamePathElementCount) { + val packageDotted = p.subpath(moduleNamePathElementCount + root.getNameCount, p.getNameCount).toString.replace('/', '.') + index.getOrElseUpdate(packageDotted, new collection.mutable.ListBuffer) += p + } + }) + index + } + + /** Empty string represents root package */ + override private[dotty] def hasPackage(pkg: PackageName) = packageIndex.contains(pkg.dottedString) + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + packageIndex.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector + } + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = { + if (inPackage.isRoot) Nil + else { + val sigFiles = packageIndex.getOrElse(inPackage.dottedString, Nil).iterator.flatMap(p => + Files.list(p).iterator.asScala.filter(_.getFileName.toString.endsWith(".sig"))) + sigFiles.map(f => ClassFileEntryImpl(f.toPlainFile)).toVector + } + } + + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = + if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) + else ClassPathEntries(packages(inPackage), classes(inPackage)) + + def asURLs: Seq[URL] = Nil + def asClassPathStrings: Seq[String] = Nil + def findClassFile(className: String): Option[AbstractFile] = { + if (!className.contains(".")) None + else { + val (inPackage, classSimpleName) = separatePkgAndClassNames(className) + packageIndex.getOrElse(inPackage, Nil).iterator.flatMap { p => + val path = p.resolve(classSimpleName + ".sig") + if (Files.exists(path)) path.toPlainFile :: Nil else Nil + }.take(1).toList.headOption + } + } +} + +case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { + override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply + + def findClassFile(className: String): Option[AbstractFile] = { + val relativePath = FileUtils.dirPath(className) + val classFile = new JFile(dir, relativePath + ".class") + if (classFile.exists) { + Some(classFile.toPath.toPlainFile) + } + else None + } + + protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) + protected def isMatchingFile(f: JFile): Boolean = f.isClass + + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) +} + +case class DirectorySourcePath(dir: JFile) extends JFileDirectoryLookup[SourceFileEntryImpl] with NoClassPaths { + def asSourcePathString: String = asClassPathString + + protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file) + protected def isMatchingFile(f: JFile): Boolean = endsScalaOrJava(f.getName) + + override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className) map SourceFileEntryImpl.apply + + private def findSourceFile(className: String): Option[AbstractFile] = { + val relativePath = FileUtils.dirPath(className) + val sourceFile = LazyList("scala", "java") + .map(ext => new JFile(dir, relativePath + "." + ext)) + .collectFirst { case file if file.exists() => file } + + sourceFile.map(_.toPath.toPlainFile) + } + + private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala b/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala new file mode 100644 index 000000000000..0f5ac16b40bf --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools +package dotc.classpath + +import scala.language.unsafeNulls + +import java.io.{File => JFile, FileFilter} +import java.net.URL +import dotty.tools.io.AbstractFile +import language.experimental.pureFunctions + +/** + * Common methods related to Java files and abstract files used in the context of classpath + */ +object FileUtils { + extension (file: AbstractFile) { + def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) + + def isClass: Boolean = !file.isDirectory && file.hasExtension("class") && !file.name.endsWith("$class.class") + // FIXME: drop last condition when we stop being compatible with Scala 2.11 + + def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) + + // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip? + def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip") + + /** + * Safe method returning a sequence containing one URL representing this file, when underlying file exists, + * and returning given default value in other case + */ + def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL) + } + + extension (file: JFile) { + def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName) + + def isClass: Boolean = file.isFile && file.getName.endsWith(".class") && !file.getName.endsWith("$class.class") + // FIXME: drop last condition when we stop being compatible with Scala 2.11 + } + + private val SUFFIX_CLASS = ".class" + private val SUFFIX_SCALA = ".scala" + private val SUFFIX_JAVA = ".java" + private val SUFFIX_SIG = ".sig" + + def stripSourceExtension(fileName: String): String = + if (endsScala(fileName)) stripClassExtension(fileName) + else if (endsJava(fileName)) stripJavaExtension(fileName) + else throw new FatalError("Unexpected source file ending: " + fileName) + + def dirPath(forPackage: String): String = forPackage.replace('.', JFile.separatorChar) + + def dirPathInJar(forPackage: String): String = forPackage.replace('.', '/') + + inline private def ends (filename:String, suffix:String) = filename.endsWith(suffix) && filename.length > suffix.length + + def endsClass(fileName: String): Boolean = + ends (fileName, SUFFIX_CLASS) || fileName.endsWith(SUFFIX_SIG) + + def endsScalaOrJava(fileName: String): Boolean = + endsScala(fileName) || endsJava(fileName) + + def endsJava(fileName: String): Boolean = + ends (fileName, SUFFIX_JAVA) + + def endsScala(fileName: String): Boolean = + ends (fileName, SUFFIX_SCALA) + + def stripClassExtension(fileName: String): String = + fileName.substring(0, fileName.lastIndexOf('.')) + + def stripJavaExtension(fileName: String): String = + fileName.substring(0, fileName.length - 5) // equivalent of fileName.length - SUFFIX_JAVA.length + + // probably it should match a pattern like [a-z_]{1}[a-z0-9_]* but it cannot be changed + // because then some tests in partest don't pass + def mayBeValidPackage(dirName: String): Boolean = + (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.') + + def mkFileFilter(f: JFile -> Boolean): FileFilter = new FileFilter { + def accept(pathname: JFile): Boolean = f(pathname) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala b/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala new file mode 100644 index 000000000000..ea7412f15d8a --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import dotty.tools.io.ClassPath.RootPackage + +/** + * Common methods related to package names represented as String + */ +object PackageNameUtils { + + /** + * @param fullClassName full class name with package + * @return (package, simple class name) + */ + inline def separatePkgAndClassNames(fullClassName: String): (String, String) = { + val lastDotIndex = fullClassName.lastIndexOf('.') + if (lastDotIndex == -1) + (RootPackage, fullClassName) + else + (fullClassName.substring(0, lastDotIndex).nn, fullClassName.substring(lastDotIndex + 1).nn) + } + + def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "." + + /** + * `true` if `packageDottedName` is a package directly nested in `inPackage`, for example: + * - `packageContains("scala", "scala.collection")` + * - `packageContains("", "scala")` + */ + def packageContains(inPackage: String, packageDottedName: String) = { + if (packageDottedName.contains(".")) + packageDottedName.startsWith(inPackage) && packageDottedName.lastIndexOf('.') == inPackage.length + else inPackage == "" + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala new file mode 100644 index 000000000000..ac80d543b539 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala @@ -0,0 +1,55 @@ +package dotty.tools.dotc.classpath + +import scala.language.unsafeNulls + +import dotty.tools.io.ClassRepresentation +import dotty.tools.io.{AbstractFile, VirtualDirectory} +import FileUtils._ +import java.net.URL + +import dotty.tools.io.ClassPath +import language.experimental.pureFunctions + +case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { + type F = AbstractFile + + // From AbstractFileClassLoader + private final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = { + var file: AbstractFile = base + val dirParts = pathParts.init.iterator + while (dirParts.hasNext) { + val dirPart = dirParts.next + file = file.lookupName(dirPart, directory = true) + if (file == null) + return null + } + file.lookupName(pathParts.last, directory = directory) + } + + protected def emptyFiles: Array[AbstractFile] = Array.empty + protected def getSubDir(packageDirName: String): Option[AbstractFile] = + Option(lookupPath(dir)(packageDirName.split(java.io.File.separator).toIndexedSeq, directory = true)) + protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile -> Boolean]): Array[F] = filter match { + case Some(f) => dir.iterator.filter(f).toArray + case _ => dir.toArray + } + def getName(f: AbstractFile): String = f.name + def toAbstractFile(f: AbstractFile): AbstractFile = f + def isPackage(f: AbstractFile): Boolean = f.isPackage + + // mimic the behavior of the old nsc.util.DirectoryClassPath + def asURLs: Seq[URL] = Seq(new URL(dir.name)) + def asClassPathStrings: Seq[String] = Seq(dir.path) + + override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply + + def findClassFile(className: String): Option[AbstractFile] = { + val relativePath = FileUtils.dirPath(className) + ".class" + Option(lookupPath(dir)(relativePath.split(java.io.File.separator).toIndexedSeq, directory = false)) + } + + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) + + protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) + protected def isMatchingFile(f: AbstractFile): Boolean = f.isClass +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala b/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala new file mode 100644 index 000000000000..865f95551a0b --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -0,0 +1,205 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc +package classpath + +import scala.language.unsafeNulls + +import java.io.File +import java.net.URL +import java.nio.file.Files +import java.nio.file.attribute.{BasicFileAttributes, FileTime} + +import scala.annotation.tailrec +import dotty.tools.io.{AbstractFile, ClassPath, ClassRepresentation, FileZipArchive, ManifestResources} +import dotty.tools.dotc.core.Contexts._ +import FileUtils._ + +/** + * A trait providing an optional cache for classpath entries obtained from zip and jar files. + * It allows us to e.g. reduce significantly memory used by PresentationCompilers in Scala IDE + * when there are a lot of projects having a lot of common dependencies. + */ +sealed trait ZipAndJarFileLookupFactory { + private val cache = new FileBasedCache[ClassPath] + + def create(zipFile: AbstractFile)(using Context): ClassPath = + val release = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) + if (ctx.settings.YdisableFlatCpCaching.value || zipFile.file == null) createForZipFile(zipFile, release) + else createUsingCache(zipFile, release) + + protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath + + private def createUsingCache(zipFile: AbstractFile, release: Option[String]): ClassPath = + cache.getOrCreate(zipFile.file.toPath, () => createForZipFile(zipFile, release)) +} + +/** + * Manages creation of classpath for class files placed in zip and jar files. + * It should be the only way of creating them as it provides caching. + */ +object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { + private case class ZipArchiveClassPath(zipFile: File, override val release: Option[String]) + extends ZipArchiveFileLookup[ClassFileEntryImpl] + with NoSourcePaths { + + override def findClassFile(className: String): Option[AbstractFile] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + file(PackageName(pkg), simpleClassName + ".class").map(_.file) + } + + // This method is performance sensitive as it is used by SBT's ExtractDependencies phase. + override def findClass(className: String): Option[ClassRepresentation] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + file(PackageName(pkg), simpleClassName + ".class") + } + + override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) + + override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file) + override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass + } + + /** + * This type of classpath is closely related to the support for JSR-223. + * Its usage can be observed e.g. when running: + * jrunscript -classpath scala-compiler.jar;scala-reflect.jar;scala-library.jar -l scala + * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry: + * Name: scala/Function2$mcFJD$sp.class + */ + private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths { + override def findClassFile(className: String): Option[AbstractFile] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + classes(PackageName(pkg)).find(_.name == simpleClassName).map(_.file) + } + + override def asClassPathStrings: Seq[String] = Seq(file.path) + + override def asURLs: Seq[URL] = file.toURLs() + + import ManifestResourcesClassPath.PackageFileInfo + import ManifestResourcesClassPath.PackageInfo + + /** + * A cache mapping package name to abstract file for package directory and subpackages of given package. + * + * ManifestResources can iterate through the collections of entries from e.g. remote jar file. + * We can't just specify the path to the concrete directory etc. so we can't just 'jump' into + * given package, when it's needed. On the other hand we can iterate over entries to get + * AbstractFiles, iterate over entries of these files etc. + * + * Instead of traversing a tree of AbstractFiles once and caching all entries or traversing each time, + * when we need subpackages of a given package or its classes, we traverse once and cache only packages. + * Classes for given package can be then easily loaded when they are needed. + */ + private lazy val cachedPackages: util.HashMap[String, PackageFileInfo] = { + val packages = util.HashMap[String, PackageFileInfo]() + + def getSubpackages(dir: AbstractFile): List[AbstractFile] = + (for (file <- dir if file.isPackage) yield file).toList + + @tailrec + def traverse(packagePrefix: String, + filesForPrefix: List[AbstractFile], + subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match { + case pkgFile :: remainingFiles => + val subpackages = getSubpackages(pkgFile) + val fullPkgName = packagePrefix + pkgFile.name + packages(fullPkgName) = PackageFileInfo(pkgFile, subpackages) + val newPackagePrefix = fullPkgName + "." + subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages)) + traverse(packagePrefix, remainingFiles, subpackagesQueue) + case Nil if subpackagesQueue.nonEmpty => + val PackageInfo(packagePrefix, filesForPrefix) = subpackagesQueue.dequeue() + traverse(packagePrefix, filesForPrefix, subpackagesQueue) + case _ => + } + + val subpackages = getSubpackages(file) + packages(ClassPath.RootPackage) = PackageFileInfo(file, subpackages) + traverse(ClassPath.RootPackage, subpackages, collection.mutable.Queue()) + packages + } + + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = cachedPackages.get(inPackage.dottedString) match { + case None => Seq.empty + case Some(PackageFileInfo(_, subpackages)) => + subpackages.map(packageFile => PackageEntryImpl(inPackage.entryName(packageFile.name))) + } + + override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = cachedPackages.get(inPackage.dottedString) match { + case None => Seq.empty + case Some(PackageFileInfo(pkg, _)) => + (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file)).toSeq + } + + override private[dotty] def hasPackage(pkg: PackageName) = cachedPackages.contains(pkg.dottedString) + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = ClassPathEntries(packages(inPackage), classes(inPackage)) + } + + private object ManifestResourcesClassPath { + case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile]) + case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) + } + + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = + if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) + else ZipArchiveClassPath(zipFile.file, release) + + private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match { + case manifestRes: ManifestResources => + ManifestResourcesClassPath(manifestRes) + case _ => + val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile" + throw new IllegalArgumentException(errorMsg) + } +} + +/** + * Manages creation of classpath for source files placed in zip and jar files. + * It should be the only way of creating them as it provides caching. + */ +object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { + private case class ZipArchiveSourcePath(zipFile: File) + extends ZipArchiveFileLookup[SourceFileEntryImpl] + with NoClassPaths { + + def release: Option[String] = None + + override def asSourcePathString: String = asClassPathString + + override private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) + + override protected def createFileEntry(file: FileZipArchive#Entry): SourceFileEntryImpl = SourceFileEntryImpl(file) + override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource + } + + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = ZipArchiveSourcePath(zipFile.file) +} + +final class FileBasedCache[T] { + private case class Stamp(lastModified: FileTime, fileKey: Object) + private val cache = collection.mutable.Map.empty[java.nio.file.Path, (Stamp, T)] + + def getOrCreate(path: java.nio.file.Path, create: () => T): T = cache.synchronized { + val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) + val lastModified = attrs.lastModifiedTime() + // only null on some platforms, but that's okay, we just use the last modified timestamp as our stamp + val fileKey = attrs.fileKey() + val stamp = Stamp(lastModified, fileKey) + cache.get(path) match { + case Some((cachedStamp, cached)) if cachedStamp == stamp => cached + case _ => + val value = create() + cache.put(path, (stamp, value)) + value + } + } + + def clear(): Unit = cache.synchronized { + // TODO support closing + // cache.valuesIterator.foreach(_.close()) + cache.clear() + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala b/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala new file mode 100644 index 000000000000..e241feee8244 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import scala.language.unsafeNulls + +import java.io.File +import java.net.URL + +import dotty.tools.io.{ AbstractFile, FileZipArchive } +import FileUtils._ +import dotty.tools.io.{EfficientClassPath, ClassRepresentation} + +/** + * A trait allowing to look for classpath entries of given type in zip and jar files. + * It provides common logic for classes handling class and source files. + * It's aware of things like e.g. META-INF directory which is correctly skipped. + */ +trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath { + val zipFile: File + def release: Option[String] + + assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null") + + override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL) + override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath) + + private val archive = new FileZipArchive(zipFile.toPath, release) + + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + for { + dirEntry <- findDirEntry(inPackage).toSeq + entry <- dirEntry.iterator if entry.isPackage + } + yield PackageEntryImpl(inPackage.entryName(entry.name)) + } + + protected def files(inPackage: PackageName): Seq[FileEntryType] = + for { + dirEntry <- findDirEntry(inPackage).toSeq + entry <- dirEntry.iterator if isRequiredFileType(entry) + } + yield createFileEntry(entry) + + protected def file(inPackage: PackageName, name: String): Option[FileEntryType] = + for { + dirEntry <- findDirEntry(inPackage) + entry <- Option(dirEntry.lookupName(name, directory = false)) + if isRequiredFileType(entry) + } + yield createFileEntry(entry) + + override def hasPackage(pkg: PackageName) = findDirEntry(pkg).isDefined + def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = + findDirEntry(inPackage) match { + case Some(dirEntry) => + for (entry <- dirEntry.iterator) { + if (entry.isPackage) + onPackageEntry(PackageEntryImpl(inPackage.entryName(entry.name))) + else if (isRequiredFileType(entry)) + onClassesAndSources(createFileEntry(entry)) + } + case None => + } + + private def findDirEntry(pkg: PackageName): Option[archive.DirEntry] = + archive.allDirs.get(pkg.dirPathTrailingSlashJar) + + protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType + protected def isRequiredFileType(file: AbstractFile): Boolean +} diff --git a/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala b/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala new file mode 100644 index 000000000000..68c900e405da --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala @@ -0,0 +1,198 @@ +package dotty.tools.dotc +package config + +import scala.language.unsafeNulls + +import Settings._ +import core.Contexts._ +import printing.Highlighting + +import scala.util.chaining.given +import scala.PartialFunction.cond + +trait CliCommand: + + type ConcreteSettings <: CommonScalaSettings with Settings.SettingGroup + + def versionMsg: String + + def ifErrorsMsg: String + + /** The name of the command */ + def cmdName: String + + def isHelpFlag(using settings: ConcreteSettings)(using SettingsState): Boolean + + def helpMsg(using settings: ConcreteSettings)(using SettingsState, Context): String + + private def explainAdvanced = """ + |-- Notes on option parsing -- + |Boolean settings are always false unless set. + |Where multiple values are accepted, they should be comma-separated. + | example: -Xplugin:plugin1,plugin2 + | means one or a comma-separated list of: + | - (partial) phase names with an optional "+" suffix to include the next phase + | - the string "all" + | example: -Xprint:all prints all phases. + | example: -Xprint:typer,mixin prints the typer and mixin phases. + | example: -Ylog:erasure+ logs the erasure phase and the phase after the erasure phase. + | This is useful because during the tree transform of phase X, we often + | already are in phase X + 1. + """ + + /** Distill arguments into summary detailing settings, errors and files to main */ + def distill(args: Array[String], sg: Settings.SettingGroup)(ss: SettingsState = sg.defaultState)(using Context): ArgsSummary = + + // expand out @filename to the contents of that filename + def expandedArguments = args.toList flatMap { + case x if x startsWith "@" => CommandLineParser.expandArg(x) + case x => List(x) + } + + sg.processArguments(expandedArguments, processAll = true, settingsState = ss) + end distill + + /** Creates a help message for a subset of options based on cond */ + protected def availableOptionsMsg(p: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = + // result is (Option Name, descrption\ndefault: value\nchoices: x, y, z + def help(s: Setting[?]): (String, String) = + // For now, skip the default values that do not make sense for the end user, such as 'false' for the version command. + def defaultValue = s.default match + case _: Int | _: String => s.default.toString + case _ => "" + val info = List(shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices ${s.legalChoices}" else "") + (s.name, info.filter(_.nonEmpty).mkString("\n")) + end help + + val ss = settings.allSettings.filter(p).toList.sortBy(_.name) + val formatter = Columnator("", "", maxField = 30) + val fresh = ContextBase().initialCtx.fresh.setSettings(summon[SettingsState]) + formatter(List(ss.map(help) :+ ("@", "A text file containing compiler arguments (options and source files).")))(using fresh) + end availableOptionsMsg + + protected def shortUsage: String = s"Usage: $cmdName " + + protected def createUsageMsg(label: String, shouldExplain: Boolean, cond: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = + val prefix = List( + Some(shortUsage), + Some(explainAdvanced).filter(_ => shouldExplain), + Some(label + " options include:") + ).flatten.mkString("\n") + + prefix + "\n" + availableOptionsMsg(cond) + + protected def isStandard(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + !isVerbose(s) && !isWarning(s) && !isAdvanced(s) && !isPrivate(s) || s.name == "-Werror" || s.name == "-Wconf" + protected def isVerbose(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-V") && s.name != "-V" + protected def isWarning(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-W") && s.name != "-W" || s.name == "-Xlint" + protected def isAdvanced(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-X") && s.name != "-X" + protected def isPrivate(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-Y") && s.name != "-Y" + protected def shortHelp(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): String = + s.description.linesIterator.next() + protected def isHelping(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + cond(s.value) { + case ss: List[?] if s.isMultivalue => ss.contains("help") + case s: String => "help" == s + } + + /** Messages explaining usage and options */ + protected def usageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("where possible standard", shouldExplain = false, isStandard) + protected def vusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible verbose", shouldExplain = true, isVerbose) + protected def wusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible warning", shouldExplain = true, isWarning) + protected def xusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible advanced", shouldExplain = true, isAdvanced) + protected def yusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible private", shouldExplain = true, isPrivate) + + /** Used for the formatted output of -Xshow-phases */ + protected def phasesMessage(using Context): String = + val phases = new Compiler().phases + val formatter = Columnator("phase name", "description", maxField = 25) + formatter(phases.map(mega => mega.map(p => (p.phaseName, p.description)))) + + /** Provide usage feedback on argument summary, assuming that all settings + * are already applied in context. + * @return Either Some list of files passed as arguments or None if further processing should be interrupted. + */ + def checkUsage(summary: ArgsSummary, sourcesRequired: Boolean)(using settings: ConcreteSettings)(using SettingsState, Context): Option[List[String]] = + // Print all warnings encountered during arguments parsing + summary.warnings.foreach(report.warning(_)) + + if summary.errors.nonEmpty then + summary.errors foreach (report.error(_)) + report.echo(ifErrorsMsg) + None + else if settings.version.value then + report.echo(versionMsg) + None + else if isHelpFlag then + report.echo(helpMsg) + None + else if (sourcesRequired && summary.arguments.isEmpty) + report.echo(usageMessage) + None + else + Some(summary.arguments) + + extension [T](setting: Setting[T]) + protected def value(using ss: SettingsState): T = setting.valueIn(ss) + + extension (s: String) + def padLeft(width: Int): String = String.format(s"%${width}s", s) + + // Formatting for -help and -Vphases in two columns, handling long field1 and wrapping long field2 + class Columnator(heading1: String, heading2: String, maxField: Int, separation: Int = 2): + def apply(texts: List[List[(String, String)]])(using Context): String = StringBuilder().tap(columnate(_, texts)).toString + + private def columnate(sb: StringBuilder, texts: List[List[(String, String)]])(using Context): Unit = + import Highlighting.* + val colors = Seq(Green(_), Yellow(_), Magenta(_), Cyan(_), Red(_)) + val nocolor = texts.length == 1 + def color(index: Int): String => Highlight = if nocolor then NoColor(_) else colors(index % colors.length) + val maxCol = ctx.settings.pageWidth.value + val field1 = maxField.min(texts.flatten.map(_._1.length).filter(_ < maxField).max) // widest field under maxField + val field2 = if field1 + separation + maxField < maxCol then maxCol - field1 - separation else 0 // skinny window -> terminal wrap + val separator = " " * separation + val EOL = "\n" + def formatField1(text: String): String = if text.length <= field1 then text.padLeft(field1) else text + EOL + "".padLeft(field1) + def formatField2(text: String): String = + def loopOverField2(fld: String): List[String] = + if field2 == 0 || fld.length <= field2 then List(fld) + else + fld.lastIndexOf(" ", field2) match + case -1 => List(fld) + case i => val (prefix, rest) = fld.splitAt(i) ; prefix :: loopOverField2(rest.trim) + text.split("\n").toList.flatMap(loopOverField2).filter(_.nonEmpty).mkString(EOL + "".padLeft(field1) + separator) + end formatField2 + def format(first: String, second: String, index: Int, colorPicker: Int => String => Highlight) = + sb.append(colorPicker(index)(formatField1(first)).show) + .append(separator) + .append(formatField2(second)) + .append(EOL): Unit + def fancy(first: String, second: String, index: Int) = format(first, second, index, color) + def plain(first: String, second: String) = format(first, second, 0, _ => NoColor(_)) + + if heading1.nonEmpty then + plain(heading1, heading2) + plain("-" * heading1.length, "-" * heading2.length) + + def emit(index: Int)(textPair: (String, String)): Unit = fancy(textPair._1, textPair._2, index) + def group(index: Int)(body: Int => Unit): Unit = + if !ctx.useColors then plain(s"{", "") + body(index) + if !ctx.useColors then plain(s"}", "") + + texts.zipWithIndex.foreach { (text, index) => + text match + case List(single) => emit(index)(single) + case Nil => + case mega => group(index)(i => mega.foreach(emit(i))) + } + end Columnator diff --git a/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala b/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala new file mode 100644 index 000000000000..2e76561c9913 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala @@ -0,0 +1,125 @@ +package dotty.tools.dotc.config + +import java.lang.Character.isWhitespace +import java.nio.file.{Files, Paths} +import scala.annotation.tailrec +import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters.* + +/** Split a line of text using shell conventions. + */ +object CommandLineParser: + inline private val DQ = '"' + inline private val SQ = '\'' + inline private val EOF = -1 + + /** Split the line into tokens separated by whitespace. + * + * Single or double quotes can be embedded to preserve internal whitespace: + * + * `""" echo "hello, world!" """` => "echo" :: "hello, world!" :: Nil + * `""" echo hello,' 'world! """` => "echo" :: "hello, world!" :: Nil + * `""" echo \"hello, world!\" """` => "echo" :: "\"hello," :: "world!\"" :: Nil + * + * The embedded quotes are stripped. Escaping backslash is not stripped. + * + * Invoke `errorFn` with a descriptive message if an end quote is missing. + */ + def tokenize(line: String, errorFn: String => Unit): List[String] = + + var accum: List[String] = Nil + + var pos = 0 + var start = 0 + val qpos = new ArrayBuffer[Int](16) // positions of paired quotes in current token + + inline def cur = if done then EOF else line.charAt(pos): Int + inline def bump() = pos += 1 + inline def done = pos >= line.length + + // Skip to the given unescaped end quote; false on no more input. + def skipToEndQuote(q: Int): Boolean = + var escaped = false + def terminal = cur match + case _ if escaped => escaped = false ; false + case '\\' => escaped = true ; false + case `q` | EOF => true + case _ => false + while !terminal do bump() + !done + + // Skip to the next whitespace word boundary; record unescaped embedded quotes; false on missing quote. + def skipToDelim(): Boolean = + var escaped = false + inline def quote() = { qpos += pos ; bump() } + @tailrec def advance(): Boolean = cur match + case _ if escaped => escaped = false ; bump() ; advance() + case '\\' => escaped = true ; bump() ; advance() + case q @ (DQ | SQ) => { quote() ; skipToEndQuote(q) } && { quote() ; advance() } + case EOF => true + case c if isWhitespace(c) => true + case _ => bump(); advance() + advance() + + def copyText(): String = + val buf = new java.lang.StringBuilder + var p = start + var i = 0 + while p < pos do + if i >= qpos.size then + buf.append(line, p, pos) + p = pos + else if p == qpos(i) then + buf.append(line, qpos(i)+1, qpos(i+1)) + p = qpos(i+1)+1 + i += 2 + else + buf.append(line, p, qpos(i)) + p = qpos(i) + buf.toString + + // the current token, stripped of any embedded quotes. + def text(): String = + val res = + if qpos.isEmpty then line.substring(start, pos) + else if qpos(0) == start && qpos(1) == pos then line.substring(start+1, pos-1) + else copyText() + qpos.clear() + res.nn + + inline def badquote() = errorFn(s"Unmatched quote [${qpos.last}](${line.charAt(qpos.last)})") + + inline def skipWhitespace() = while isWhitespace(cur) do bump() + + @tailrec def loop(): List[String] = + skipWhitespace() + start = pos + if done then + accum.reverse + else if !skipToDelim() then + badquote() + Nil + else + accum ::= text() + loop() + end loop + + loop() + end tokenize + + def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x)) + + /** Expands all arguments starting with @ to the contents of the file named like each argument. + */ + def expandArg(arg: String): List[String] = + val path = Paths.get(arg.stripPrefix("@")) + if !Files.exists(path) then + System.err.nn.println(s"Argument file ${path.nn.getFileName} could not be found") + Nil + else + def stripComment(s: String) = s.indexOf('#') match { case -1 => s case i => s.substring(0, i) } + val lines = Files.readAllLines(path).nn + val params = lines.asScala.map(stripComment).filter(!_.nn.isEmpty).mkString(" ") + tokenize(params) + + class ParseException(msg: String) extends RuntimeException(msg) diff --git a/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala b/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala new file mode 100644 index 000000000000..41e123472a75 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala @@ -0,0 +1,26 @@ +package dotty.tools.dotc +package config + +import Settings._ +import core.Contexts._ + +abstract class CompilerCommand extends CliCommand: + type ConcreteSettings = ScalaSettings + + final def helpMsg(using settings: ScalaSettings)(using SettingsState, Context): String = + settings.allSettings.find(isHelping) match + case Some(s) => s.description + case _ => + if (settings.help.value) usageMessage + else if (settings.Vhelp.value) vusageMessage + else if (settings.Whelp.value) wusageMessage + else if (settings.Xhelp.value) xusageMessage + else if (settings.Yhelp.value) yusageMessage + else if (settings.showPlugins.value) ctx.base.pluginDescriptions + else if (settings.XshowPhases.value) phasesMessage + else "" + + final def isHelpFlag(using settings: ScalaSettings)(using SettingsState): Boolean = + import settings._ + val flags = Set(help, Vhelp, Whelp, Xhelp, Yhelp, showPlugins, XshowPhases) + flags.exists(_.value) || allSettings.exists(isHelping) diff --git a/tests/pos-with-compiler-cc/dotc/config/Config.scala b/tests/pos-with-compiler-cc/dotc/config/Config.scala new file mode 100644 index 000000000000..17e3ec352e7c --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Config.scala @@ -0,0 +1,248 @@ +package dotty.tools.dotc.config + +object Config { + + inline val cacheMembersNamed = true + inline val cacheAsSeenFrom = true + inline val cacheMemberNames = true + inline val cacheImplicitScopes = true + inline val cacheMatchReduced = true + + /** If true, the `runWithOwner` operation uses a re-usable context, + * similar to explore. This requires that the context does not escape + * the call. If false, `runWithOwner` runs its operation argument + * in a fresh context. + */ + inline val reuseOwnerContexts = true + + inline val checkCacheMembersNamed = false + + /** When updating a constraint bound, check that the constrained parameter + * does not appear at the top-level of either of its bounds. + */ + inline val checkConstraintsNonCyclic = false + + /** Check that each constraint resulting from a subtype test + * is satisfiable. Also check that a type variable instantiation + * satisfies its constraints. + * Note that this can fail when bad bounds are in scope, like in + * tests/neg/i4721a.scala. + */ + inline val checkConstraintsSatisfiable = false + + /** Check that each constraint is fully propagated. i.e. + * If P <: Q then the upper bound of P is a subtype of the upper bound of Q + * and the lower bound of Q is a subtype of the lower bound of P. + */ + inline val checkConstraintsPropagated = false + + /** Check that constraint bounds do not contain wildcard types */ + inline val checkNoWildcardsInConstraint = false + + /** If a constraint is over a type lambda `tl` and `tvar` is one of + * the type variables associated with `tl` in the constraint, check + * that the origin of `tvar` is a parameter of `tl`. + */ + inline val checkConsistentVars = false + + /** Check that constraints of globally committable typer states are closed. + * NOTE: When enabled, the check can cause CyclicReference errors because + * it traverses all elements of a type. Such failures were observed when + * compiling all of dotty together (source seems to be in GenBCode which + * accesses javac's settings.) + * + * It is recommended to turn this option on only when chasing down + * a TypeParamRef instantiation error. See comment in Types.TypeVar.instantiate. + */ + inline val debugCheckConstraintsClosed = false + + /** Check that no type appearing as the info of a SymDenotation contains + * skolem types. + */ + inline val checkNoSkolemsInInfo = false + + /** Check that Name#toString is not called directly from backend by analyzing + * the stack trace of each toString call on names. This is very expensive, + * so not suitable for continuous testing. But it can be used to find a problem + * when running a specific test. + */ + inline val checkBackendNames = false + + /** Check that re-used type comparers are in their initialization state */ + inline val checkTypeComparerReset = false + + /** Type comparer will fail with an assert if the upper bound + * of a constrained parameter becomes Nothing. This should be turned + * on only for specific debugging as normally instantiation to Nothing + * is not an error condition. + */ + inline val failOnInstantiationToNothing = false + + /** Enable noDoubleDef checking if option "-YnoDoubleDefs" is set. + * The reason to have an option as well as the present global switch is + * that the noDoubleDef checking is done in a hotspot, and we do not + * want to incur the overhead of checking an option each time. + */ + inline val checkNoDoubleBindings = true + + /** Check positions for consistency after parsing */ + inline val checkPositions = true + + /** Check that typed trees don't point to untyped ones */ + inline val checkTreesConsistent = false + + /** Show subtype traces for all deep subtype recursions */ + inline val traceDeepSubTypeRecursions = false + + /** When explaining subtypes and this flag is set, also show the classes of the compared types. */ + inline val verboseExplainSubtype = false + + /** If this flag is set, take the fast path when comparing same-named type-aliases and types */ + inline val fastPathForRefinedSubtype = true + + /** If this flag is set, and we compute `T1[X1]` & `T2[X2]` as a new + * upper bound of a constrained parameter, try to align the arguments by computing + * `S1 =:= S2` (which might instantiate type parameters). + * This rule is contentious because it cuts the constraint set. + * + * For more info, see the comment in `TypeComparer#glbArgs`. + */ + inline val alignArgsInAnd = true + + /** If this flag is set, higher-kinded applications are checked for validity + */ + inline val checkHKApplications = false + + /** If this flag is set, method types are checked for valid parameter references + */ + inline val checkMethodTypes = false + + /** If this flag is set, it is checked that TypeRefs don't refer directly + * to themselves. + */ + inline val checkTypeRefCycles = false + + /** If this flag is set, we check that types assigned to trees are error types only + * if some error was already reported. There are complicicated scenarios where this + * is not true. An example is TestNonCyclic in posTwice. If we remove the + * first (unused) import `import dotty.tools.dotc.core.Types.Type` in `CompilationUnit`, + * we end up assigning a CyclicReference error type to an import expression `annotation` + * before the cyclic reference is reported. What happens is that the error was reported + * as a result of a completion in a not-yet committed typerstate. So we cannot enforce + * this in all circumstances. But since it is almost always true it is useful to + * keep the Config option for debugging. + */ + inline val checkUnreportedErrors = false + + /** If this flag is set, it is checked that class type parameters are + * only references with NoPrefix or ThisTypes as prefixes. This option + * is usually disabled, because there are still some legitimate cases where + * this can arise (e.g. for pos/Map.scala, in LambdaType.integrate). + */ + inline val checkTypeParamRefs = false + + /** The recursion depth for showing a summarized string */ + inline val summarizeDepth = 2 + + /** Check that variances of lambda arguments match the + * variance of the underlying lambda class. + */ + inline val checkLambdaVariance = false + + /** Check that certain types cannot be created in erasedTypes phases. + * Note: Turning this option on will get some false negatives, since it is + * possible that And/Or types are still created during erasure as the result + * of some operation on an existing type. + */ + inline val checkUnerased = false + + /** Check that atoms-based comparisons match regular comparisons that do not + * take atoms into account. The two have to give the same results, since + * atoms comparison is intended to be just an optimization. + */ + inline val checkAtomsComparisons = false + + /** In `derivedSelect`, rewrite + * + * (S & T)#A --> S#A & T#A + * (S | T)#A --> S#A | T#A + * + * Not sure whether this is useful. Preliminary measurements show a slowdown of about + * 7% for the build when this option is enabled. + */ + inline val splitProjections = false + + /** If this flag is on, always rewrite an application `S[Ts]` where `S` is an alias for + * `[Xs] -> U` to `[Xs := Ts]U`. + * Turning this flag on was observed to give a ~6% speedup on the JUnit test suite. + */ + inline val simplifyApplications = true + + /** Assume -indent by default */ + inline val defaultIndent = true + + /** If set, prints a trace of all symbol completions */ + inline val showCompletions = false + + /** If set, method results that are context functions are flattened by adding + * the parameters of the context function results to the methods themselves. + * This is an optimization that reduces closure allocations. + */ + inline val flattenContextFunctionResults = true + + /** If set, enables tracing */ + inline val tracingEnabled = false + + /** Initial capacity of the uniques HashMap. + * Note: This should be a power of two to work with util.HashSet + */ + inline val initialUniquesCapacity = 0x8000 + + /** How many recursive calls to NamedType#underlying are performed before logging starts. */ + inline val LogPendingUnderlyingThreshold = 50 + + /** How many recursive calls to isSubType are performed before logging starts. */ + inline val LogPendingSubTypesThreshold = 50 + + /** How many recursive calls to findMember are performed before logging names starts + * Note: this threshold has to be chosen carefully. Too large, and programs + * like tests/pos/IterableSelfRec go into polynomial (or even exponential?) + * compile time slowdown. Too small and normal programs will cause the compiler to + * do inefficient operations on findMember. The current value is determined + * so that (1) IterableSelfRec still compiles in reasonable time (< 10sec) (2) Compiling + * dotty itself only causes small pending names lists to be generated (we measured + * at max 6 elements) and these lists are never searched with contains. + */ + inline val LogPendingFindMemberThreshold = 9 + + /** When in IDE, turn StaleSymbol errors into warnings instead of crashing */ + inline val ignoreStaleInIDE = true + + /** If true, `Denotation#asSeenFrom` is allowed to return an existing + * `SymDenotation` instead of allocating a new `SingleDenotation` if + * the two would only differ in their `prefix` (SymDenotation always + * have `NoPrefix` as their prefix). + * This is done for performance reasons: when compiling Dotty itself this + * reduces the number of allocated denotations by ~50%. + */ + inline val reuseSymDenotations = true + + /** If `checkLevelsOnConstraints` is true, check levels of type variables + * and create fresh ones as needed when bounds are first entered intot he constraint. + * If `checkLevelsOnInstantiation` is true, allow level-incorrect constraints but + * fix levels on type variable instantiation. + */ + inline val checkLevelsOnConstraints = false + inline val checkLevelsOnInstantiation = true + + /** If true, print capturing types in the form `{c} T`. + * If false, print them in the form `T @retains(c)`. + */ + inline val printCaptureSetsAsPrefix = true + + /** If true, allow mappping capture set variables under captureChecking with maps that are neither + * bijective nor idempotent. We currently do now know how to do this correctly in all + * cases, though. + */ + inline val ccAllowUnsoundMaps = false +} diff --git a/tests/pos-with-compiler-cc/dotc/config/Feature.scala b/tests/pos-with-compiler-cc/dotc/config/Feature.scala new file mode 100644 index 000000000000..c482bbe0911f --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Feature.scala @@ -0,0 +1,168 @@ +package dotty.tools +package dotc +package config + +import core._ +import Contexts._, Symbols._, Names._ +import StdNames.nme +import Decorators.* +import util.{SrcPos, NoSourcePosition} +import SourceVersion._ +import reporting.Message +import NameKinds.QualifiedName +import language.experimental.pureFunctions + +object Feature: + + def experimental(str: PreName): TermName = + QualifiedName(nme.experimental, str.toTermName) + + private def deprecated(str: PreName): TermName = + QualifiedName(nme.deprecated, str.toTermName) + + private val namedTypeArguments = experimental("namedTypeArguments") + private val genericNumberLiterals = experimental("genericNumberLiterals") + val scala2macros = experimental("macros") + + val dependent = experimental("dependent") + val erasedDefinitions = experimental("erasedDefinitions") + val symbolLiterals = deprecated("symbolLiterals") + val fewerBraces = experimental("fewerBraces") + val saferExceptions = experimental("saferExceptions") + val pureFunctions = experimental("pureFunctions") + val captureChecking = experimental("captureChecking") + + val globalOnlyImports: Set[TermName] = Set(pureFunctions, captureChecking) + + /** Is `feature` enabled by by a command-line setting? The enabling setting is + * + * -language:feature + * + * where is the fully qualified name of `owner`, followed by a ".", + * but subtracting the prefix `scala.language.` at the front. + */ + def enabledBySetting(feature: TermName)(using Context): Boolean = + ctx.base.settings.language.value.contains(feature.toString) + + /** Is `feature` enabled by by an import? This is the case if the feature + * is imported by a named import + * + * import owner.feature + * + * and there is no visible nested import that excludes the feature, as in + * + * import owner.{ feature => _ } + */ + def enabledByImport(feature: TermName)(using Context): Boolean = + //atPhase(typerPhase) { + val info = ctx.importInfo + info != null && info.featureImported(feature) + //} + + /** Is `feature` enabled by either a command line setting or an import? + * @param feature The name of the feature + * @param owner The prefix symbol (nested in `scala.language`) where the + * feature is defined. + */ + def enabled(feature: TermName)(using Context): Boolean = + enabledBySetting(feature) || enabledByImport(feature) + + /** Is auto-tupling enabled? */ + def autoTuplingEnabled(using Context): Boolean = !enabled(nme.noAutoTupling) + + def dynamicsEnabled(using Context): Boolean = enabled(nme.dynamics) + + def dependentEnabled(using Context) = enabled(dependent) + + def namedTypeArgsEnabled(using Context) = enabled(namedTypeArguments) + + def genericNumberLiteralsEnabled(using Context) = enabled(genericNumberLiterals) + + def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) + + /** Is pureFunctions enabled for this compilation unit? */ + def pureFunsEnabled(using Context) = + enabledBySetting(pureFunctions) + || ctx.compilationUnit.knowsPureFuns + || ccEnabled + + /** Is captureChecking enabled for this compilation unit? */ + def ccEnabled(using Context) = + enabledBySetting(captureChecking) + || ctx.compilationUnit.needsCaptureChecking + + /** Is pureFunctions enabled for any of the currently compiled compilation units? */ + def pureFunsEnabledSomewhere(using Context) = + enabledBySetting(pureFunctions) + || ctx.run != null && ctx.run.nn.pureFunsImportEncountered + || ccEnabledSomewhere + + /** Is captureChecking enabled for any of the currently compiled compilation units? */ + def ccEnabledSomewhere(using Context) = + enabledBySetting(captureChecking) + || ctx.run != null && ctx.run.nn.ccImportEncountered + + def sourceVersionSetting(using Context): SourceVersion = + SourceVersion.valueOf(ctx.settings.source.value) + + def sourceVersion(using Context): SourceVersion = + ctx.compilationUnit.sourceVersion match + case Some(v) => v + case none => sourceVersionSetting + + def migrateTo3(using Context): Boolean = sourceVersion == `3.0-migration` + + /** If current source migrates to `version`, issue given warning message + * and return `true`, otherwise return `false`. + */ + def warnOnMigration(msg: Message, pos: SrcPos, version: SourceVersion)(using Context): Boolean = + if sourceVersion.isMigrating && sourceVersion.stable == version + || (version == `3.0` || version == `3.1`) && migrateTo3 + then + report.migrationWarning(msg, pos) + true + else + false + + def checkExperimentalFeature(which: String, srcPos: SrcPos, note: -> String = "")(using Context) = + if !isExperimentalEnabled then + report.error(i"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) + + def checkExperimentalDef(sym: Symbol, srcPos: SrcPos)(using Context) = + if !isExperimentalEnabled then + val symMsg = + if sym.hasAnnotation(defn.ExperimentalAnnot) then + i"$sym is marked @experimental" + else if sym.owner.hasAnnotation(defn.ExperimentalAnnot) then + i"${sym.owner} is marked @experimental" + else + i"$sym inherits @experimental" + report.error(s"$symMsg and therefore may only be used in an experimental scope.", srcPos) + + /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ + def checkExperimentalSettings(using Context): Unit = + for setting <- ctx.settings.language.value + if setting.startsWith("experimental.") && setting != "experimental.macros" + do checkExperimentalFeature(s"feature $setting", NoSourcePosition) + + def isExperimentalEnabled(using Context): Boolean = + Properties.experimental && !ctx.settings.YnoExperimental.value + + /** Handle language import `import language..` if it is one + * of the global imports `pureFunctions` or `captureChecking`. In this case + * make the compilation unit's and current run's fields accordingly. + * @return true iff import that was handled + */ + def handleGlobalLanguageImport(prefix: TermName, imported: Name)(using Context): Boolean = + val fullFeatureName = QualifiedName(prefix, imported.asTermName) + if fullFeatureName == pureFunctions then + ctx.compilationUnit.knowsPureFuns = true + if ctx.run != null then ctx.run.nn.pureFunsImportEncountered = true + true + else if fullFeatureName == captureChecking then + ctx.compilationUnit.needsCaptureChecking = true + if ctx.run != null then ctx.run.nn.ccImportEncountered = true + true + else + false +end Feature diff --git a/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala b/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala new file mode 100644 index 000000000000..2b2f35e49451 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala @@ -0,0 +1,69 @@ +package dotty.tools +package dotc +package config + +import io._ +import classpath.AggregateClassPath +import core._ +import Symbols._, Types._, Contexts._, StdNames._ +import Flags._ +import transform.ExplicitOuter, transform.SymUtils._ + +class JavaPlatform extends Platform { + + private var currentClassPath: Option[ClassPath] = None + + def classPath(using Context): ClassPath = { + if (currentClassPath.isEmpty) + currentClassPath = Some(new PathResolver().result) + val cp = currentClassPath.get + cp + } + + // The given symbol is a method with the right name and signature to be a runnable java program. + def isMainMethod(sym: Symbol)(using Context): Boolean = + (sym.name == nme.main) && (sym.info match { + case MethodTpe(_, defn.ArrayOf(el) :: Nil, restpe) => el =:= defn.StringType && (restpe isRef defn.UnitClass) + case _ => false + }) + + /** Update classpath with a substituted subentry */ + def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = currentClassPath.get match { + case AggregateClassPath(entries) => + currentClassPath = Some(AggregateClassPath(entries map (e => subst.getOrElse(e, e)))) + case cp: ClassPath => + currentClassPath = Some(subst.getOrElse(cp, cp)) + } + + def rootLoader(root: TermSymbol)(using Context): SymbolLoader = new SymbolLoaders.PackageLoader(root, classPath) + + /** Is the SAMType `cls` also a SAM under the rules of the JVM? */ + def isSam(cls: ClassSymbol)(using Context): Boolean = + cls.isAllOf(NoInitsTrait) && + cls.superClass == defn.ObjectClass && + cls.directlyInheritedTraits.forall(_.is(NoInits)) && + !ExplicitOuter.needsOuterIfReferenced(cls) && + cls.typeRef.fields.isEmpty // Superaccessors already show up as abstract methods here, so no test necessary + + /** We could get away with excluding BoxedBooleanClass for the + * purpose of equality testing since it need not compare equal + * to anything but other booleans, but it should be present in + * case this is put to other uses. + */ + def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean = { + val d = defn + import d._ + (sym == ObjectClass) || + (sym == JavaSerializableClass) || + (sym == ComparableClass) || + (sym derivesFrom BoxedNumberClass) || + (sym derivesFrom BoxedCharClass) || + (sym derivesFrom BoxedBooleanClass) + } + + def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean = + true + + def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader = + new ClassfileLoader(bin) +} diff --git a/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala b/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala new file mode 100644 index 000000000000..0411c5604768 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala @@ -0,0 +1,117 @@ +package dotty.tools +package dotc +package config + +import scala.language.unsafeNulls + +import io._ + +/** A class for holding mappings from source directories to + * their output location. This functionality can be accessed + * only programmatically. The command line compiler uses a + * single output location, but tools may use this functionality + * to set output location per source directory. + */ +class OutputDirs { + /** Pairs of source directory - destination directory. */ + private var outputDirs: List[(AbstractFile, AbstractFile)] = Nil + + /** If this is not None, the output location where all + * classes should go. + */ + private var singleOutDir: Option[AbstractFile] = None + + /** Add a destination directory for sources found under srcdir. + * Both directories should exits. + */ + def add(srcDir: String, outDir: String): Unit = + add(checkDir(AbstractFile.getDirectory(srcDir), srcDir), + checkDir(AbstractFile.getDirectory(outDir), outDir)) + + /** Check that dir is exists and is a directory. */ + private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = ( + if (dir != null && dir.isDirectory) + dir + // was: else if (allowJar && dir == null && Path.isJarOrZip(name, false)) + else if (allowJar && dir == null && Jar.isJarOrZip(File(name), false)) + new PlainFile(Path(name)) + else + throw new FatalError(name + " does not exist or is not a directory")) + + /** Set the single output directory. From now on, all files will + * be dumped in there, regardless of previous calls to 'add'. + */ + def setSingleOutput(outDir: String): Unit = { + val dst = AbstractFile.getDirectory(outDir) + setSingleOutput(checkDir(dst, outDir, true)) + } + + def getSingleOutput: Option[AbstractFile] = singleOutDir + + /** Set the single output directory. From now on, all files will + * be dumped in there, regardless of previous calls to 'add'. + */ + def setSingleOutput(dir: AbstractFile): Unit = + singleOutDir = Some(dir) + + def add(src: AbstractFile, dst: AbstractFile): Unit = { + singleOutDir = None + outputDirs ::= ((src, dst)) + } + + /** Return the list of source-destination directory pairs. */ + def outputs: List[(AbstractFile, AbstractFile)] = outputDirs + + /** Return the output directory for the given file. + */ + def outputDirFor(src: AbstractFile): AbstractFile = { + def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = + src.path.startsWith(srcDir.path) + + singleOutDir match { + case Some(d) => d + case None => + (outputs find (isBelow _).tupled) match { + case Some((_, d)) => d + case _ => + throw new FatalError("Could not find an output directory for " + + src.path + " in " + outputs) + } + } + } + + /** Return the source file path(s) which correspond to the given + * classfile path and SourceFile attribute value, subject to the + * condition that source files are arranged in the filesystem + * according to Java package layout conventions. + * + * The given classfile path must be contained in at least one of + * the specified output directories. If it does not then this + * method returns Nil. + * + * Note that the source file is not required to exist, so assuming + * a valid classfile path this method will always return a list + * containing at least one element. + * + * Also that if two or more source path elements target the same + * output directory there will be two or more candidate source file + * paths. + */ + def srcFilesFor(classFile: AbstractFile, srcPath: String): List[AbstractFile] = { + def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = + classFile.path.startsWith(outDir.path) + + singleOutDir match { + case Some(d) => + d match { + case _: VirtualDirectory | _: io.ZipArchive => Nil + case _ => List(d.lookupPathUnchecked(srcPath, false)) + } + case None => + (outputs filter (isBelow _).tupled) match { + case Nil => Nil + case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false)) + } + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala b/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala new file mode 100644 index 000000000000..afa30e38dc2a --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala @@ -0,0 +1,268 @@ +package dotty.tools +package dotc +package config + +import scala.language.unsafeNulls + +import WrappedProperties.AccessControl +import io.{ClassPath, Directory, Path} +import classpath.{AggregateClassPath, ClassPathFactory, JrtClassPath} +import ClassPath.split +import PartialFunction.condOpt +import core.Contexts._ +import Settings._ +import dotty.tools.io.File + +object PathResolver { + + // Imports property/environment functions which suppress + // security exceptions. + import AccessControl._ + + def firstNonEmpty(xs: String*): String = xs find (_ != "") getOrElse "" + + /** Map all classpath elements to absolute paths and reconstruct the classpath. + */ + def makeAbsolute(cp: String): String = ClassPath.map(cp, x => Path(x).toAbsolute.path) + + /** pretty print class path + */ + def ppcp(s: String): String = split(s) match { + case Nil => "" + case Seq(x) => x + case xs => xs.map("\n" + _).mkString + } + + /** Values found solely by inspecting environment or property variables. + */ + object Environment { + private def searchForBootClasspath = ( + systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse "" + ) + + /** Environment variables which java pays attention to so it + * seems we do as well. + */ + def classPathEnv: String = envOrElse("CLASSPATH", "") + def sourcePathEnv: String = envOrElse("SOURCEPATH", "") + + def javaBootClassPath: String = propOrElse("sun.boot.class.path", searchForBootClasspath) + + def javaExtDirs: String = propOrEmpty("java.ext.dirs") + def scalaHome: String = propOrEmpty("scala.home") + def scalaExtDirs: String = propOrEmpty("scala.ext.dirs") + + /** The java classpath and whether to use it. + */ + def javaUserClassPath: String = propOrElse("java.class.path", "") + def useJavaClassPath: Boolean = propOrFalse("scala.usejavacp") + + override def toString: String = s""" + |object Environment { + | scalaHome = $scalaHome (useJavaClassPath = $useJavaClassPath) + | javaBootClassPath = <${javaBootClassPath.length} chars> + | javaExtDirs = ${ppcp(javaExtDirs)} + | javaUserClassPath = ${ppcp(javaUserClassPath)} + | scalaExtDirs = ${ppcp(scalaExtDirs)} + |}""".trim.stripMargin + } + + /** Default values based on those in Environment as interpreted according + * to the path resolution specification. + */ + object Defaults { + def scalaSourcePath: String = Environment.sourcePathEnv + def javaBootClassPath: String = Environment.javaBootClassPath + def javaUserClassPath: String = Environment.javaUserClassPath + def javaExtDirs: String = Environment.javaExtDirs + def useJavaClassPath: Boolean = Environment.useJavaClassPath + + def scalaHome: String = Environment.scalaHome + def scalaHomeDir: Directory = Directory(scalaHome) + def scalaHomeExists: Boolean = scalaHomeDir.isDirectory + def scalaLibDir: Directory = (scalaHomeDir / "lib").toDirectory + def scalaClassesDir: Directory = (scalaHomeDir / "classes").toDirectory + + def scalaLibAsJar: File = (scalaLibDir / "scala-library.jar").toFile + def scalaLibAsDir: Directory = (scalaClassesDir / "library").toDirectory + + def scalaLibDirFound: Option[Directory] = + if (scalaLibAsJar.isFile) Some(scalaLibDir) + else if (scalaLibAsDir.isDirectory) Some(scalaClassesDir) + else None + + def scalaLibFound: String = + if (scalaLibAsJar.isFile) scalaLibAsJar.path + else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path + else "" + + // XXX It must be time for someone to figure out what all these things + // are intended to do. This is disabled here because it was causing all + // the scala jars to end up on the classpath twice: one on the boot + // classpath as set up by the runner (or regular classpath under -nobootcp) + // and then again here. + def scalaBootClassPath: String = "" + // scalaLibDirFound match { + // case Some(dir) if scalaHomeExists => + // val paths = ClassPath expandDir dir.path + // join(paths: _*) + // case _ => "" + // } + + def scalaExtDirs: String = Environment.scalaExtDirs + + def scalaPluginPath: String = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path + + override def toString: String = """ + |object Defaults { + | scalaHome = %s + | javaBootClassPath = %s + | scalaLibDirFound = %s + | scalaLibFound = %s + | scalaBootClassPath = %s + | scalaPluginPath = %s + |}""".trim.stripMargin.format( + scalaHome, + ppcp(javaBootClassPath), + scalaLibDirFound, scalaLibFound, + ppcp(scalaBootClassPath), ppcp(scalaPluginPath) + ) + } + + def fromPathString(path: String)(using Context): ClassPath = { + val settings = ctx.settings.classpath.update(path) + inContext(ctx.fresh.setSettings(settings)) { + new PathResolver().result + } + } + + /** Show values in Environment and Defaults when no argument is provided. + * Otherwise, show values in Calculated as if those options had been given + * to a scala runner. + */ + def main(args: Array[String]): Unit = + if (args.isEmpty) { + println(Environment) + println(Defaults) + } + else inContext(ContextBase().initialCtx) { + val ArgsSummary(sstate, rest, errors, warnings) = + ctx.settings.processArguments(args.toList, true, ctx.settingsState) + errors.foreach(println) + val pr = inContext(ctx.fresh.setSettings(sstate)) { + new PathResolver() + } + println(" COMMAND: 'scala %s'".format(args.mkString(" "))) + println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) + + pr.result match { + case cp: AggregateClassPath => + println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") + } + } +} + +import PathResolver.{Defaults, ppcp} + +class PathResolver(using c: Context) { + import c.base.settings + + private val classPathFactory = new ClassPathFactory + + private def cmdLineOrElse(name: String, alt: String) = + commandLineFor(name) match { + case Some("") | None => alt + case Some(x) => x + } + + private def commandLineFor(s: String): Option[String] = condOpt(s) { + case "javabootclasspath" => settings.javabootclasspath.value + case "javaextdirs" => settings.javaextdirs.value + case "bootclasspath" => settings.bootclasspath.value + case "extdirs" => settings.extdirs.value + case "classpath" | "cp" => settings.classpath.value + case "sourcepath" => settings.sourcepath.value + } + + /** Calculated values based on any given command line options, falling back on + * those in Defaults. + */ + object Calculated { + def scalaHome: String = Defaults.scalaHome + def useJavaClassPath: Boolean = settings.usejavacp.value || Defaults.useJavaClassPath + def javaBootClassPath: String = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath) + def javaExtDirs: String = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs) + def javaUserClassPath: String = if (useJavaClassPath) Defaults.javaUserClassPath else "" + def scalaBootClassPath: String = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath) + def scalaExtDirs: String = cmdLineOrElse("extdirs", Defaults.scalaExtDirs) + /** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as: + * [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect + * [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg) + * [scaladoc] ^ + * Because bootstrapping looks at the sourcepath and creates the package "reflect" in "" it will cause the + * typedIdentifier to pick .reflect instead of the .scala.reflect package. Thus, no bootstrapping for scaladoc! + */ + def sourcePath: String = cmdLineOrElse("sourcepath", Defaults.scalaSourcePath) + + def userClassPath: String = + if (!settings.classpath.isDefault) settings.classpath.value + else sys.env.getOrElse("CLASSPATH", ".") + + import classPathFactory._ + + // Assemble the elements! + def basis: List[Traversable[ClassPath]] = + val release = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) + + List( + JrtClassPath(release), // 1. The Java 9+ classpath (backed by the jrt:/ virtual system, if available) + classesInPath(javaBootClassPath), // 2. The Java bootstrap class path. + contentsOfDirsInPath(javaExtDirs), // 3. The Java extension class path. + classesInExpandedPath(javaUserClassPath), // 4. The Java application class path. + classesInPath(scalaBootClassPath), // 5. The Scala boot class path. + contentsOfDirsInPath(scalaExtDirs), // 6. The Scala extension class path. + classesInExpandedPath(userClassPath), // 7. The Scala application class path. + sourcesInPath(sourcePath) // 8. The Scala source path. + ) + + lazy val containers: List[ClassPath] = basis.flatten.distinct + + override def toString: String = """ + |object Calculated { + | scalaHome = %s + | javaBootClassPath = %s + | javaExtDirs = %s + | javaUserClassPath = %s + | useJavaClassPath = %s + | scalaBootClassPath = %s + | scalaExtDirs = %s + | userClassPath = %s + | sourcePath = %s + |}""".trim.stripMargin.format( + scalaHome, + ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath), + useJavaClassPath, + ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath), + ppcp(sourcePath) + ) + } + + def containers: List[ClassPath] = Calculated.containers + + lazy val result: ClassPath = { + val cp = AggregateClassPath(containers.toIndexedSeq) + + if (settings.YlogClasspath.value) { + Console.println("Classpath built from " + settings.toConciseString(ctx.settingsState)) + Console.println("Defaults: " + PathResolver.Defaults) + Console.println("Calculated: " + Calculated) + + val xs = (Calculated.basis drop 2).flatten.distinct + println("After java boot/extdirs classpath has %d entries:" format xs.size) + xs foreach (x => println(" " + x)) + } + cp + } + + def asURLs: Seq[java.net.URL] = result.asURLs +} diff --git a/tests/pos-with-compiler-cc/dotc/config/Platform.scala b/tests/pos-with-compiler-cc/dotc/config/Platform.scala new file mode 100644 index 000000000000..0faacf1bcebb --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Platform.scala @@ -0,0 +1,46 @@ +package dotty.tools +package dotc +package config + +import io.{ClassPath, AbstractFile} +import core.Contexts._, core.Symbols._ +import core.SymbolLoader +import core.StdNames.nme +import core.Flags.Module + +/** The platform dependent pieces of Global. + */ +abstract class Platform { + + /** The root symbol loader. */ + def rootLoader(root: TermSymbol)(using Context): SymbolLoader + + /** The compiler classpath. */ + def classPath(using Context): ClassPath + + /** Update classpath with a substitution that maps entries to entries */ + def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit + + /** Any platform-specific phases. */ + //def platformPhases: List[SubComponent] + + /** Is the SAMType `cls` also a SAM under the rules of the platform? */ + def isSam(cls: ClassSymbol)(using Context): Boolean + + /** The various ways a boxed primitive might materialize at runtime. */ + def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean + + /** Is the given class symbol eligible for Java serialization-specific methods? */ + def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean + + /** Create a new class loader to load class file `bin` */ + def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader + + /** The given symbol is a method with the right name and signature to be a runnable program. */ + def isMainMethod(sym: Symbol)(using Context): Boolean + + /** The given class has a main method. */ + final def hasMainMethod(sym: Symbol)(using Context): Boolean = + sym.info.member(nme.main).hasAltWith(d => + isMainMethod(d.symbol) && (sym.is(Module) || d.symbol.isStatic)) +} diff --git a/tests/pos-with-compiler-cc/dotc/config/Printers.scala b/tests/pos-with-compiler-cc/dotc/config/Printers.scala new file mode 100644 index 000000000000..ecb189de9bb3 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Printers.scala @@ -0,0 +1,52 @@ +package dotty.tools.dotc.config + +object Printers { + + class Printer { + def println(msg: => String): Unit = System.out.nn.println(msg) + } + + object noPrinter extends Printer { + inline override def println(msg: => String): Unit = () + } + + val default = new Printer + + val capt = noPrinter + val constr = noPrinter + val core = noPrinter + val checks = noPrinter + val config = noPrinter + val cyclicErrors = noPrinter + val debug = noPrinter + val derive = noPrinter + val desugar = noPrinter + val scaladoc = noPrinter + val exhaustivity = noPrinter + val gadts = noPrinter + val gadtsConstr = noPrinter + val hk = noPrinter + val implicits = noPrinter + val implicitsDetailed = noPrinter + val lexical = noPrinter + val init = noPrinter + val inlining = noPrinter + val interactiv = noPrinter + val matchTypes = noPrinter + val nullables = noPrinter + val overload = noPrinter + val patmatch = noPrinter + val pickling = noPrinter + val quotePickling = noPrinter + val plugins = noPrinter + val recheckr = noPrinter + val refcheck = noPrinter + val simplify = noPrinter + val staging = noPrinter + val subtyping = noPrinter + val tailrec = noPrinter + val transforms = noPrinter + val typr = noPrinter + val unapp = noPrinter + val variances = noPrinter +} diff --git a/tests/pos-with-compiler-cc/dotc/config/Properties.scala b/tests/pos-with-compiler-cc/dotc/config/Properties.scala new file mode 100644 index 000000000000..1e9cc82112af --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Properties.scala @@ -0,0 +1,142 @@ +package dotty.tools +package dotc +package config + +import scala.language.unsafeNulls + +import scala.annotation.internal.sharable + +import java.io.IOException +import java.util.jar.Attributes.{ Name => AttributeName } +import java.nio.charset.StandardCharsets + +/** Loads `library.properties` from the jar. */ +object Properties extends PropertiesTrait { + protected def propCategory: String = "compiler" + protected def pickJarBasedOn: Class[PropertiesTrait] = classOf[PropertiesTrait] + + /** Scala manifest attributes. + */ + @sharable val ScalaCompilerVersion: AttributeName = new AttributeName("Scala-Compiler-Version") +} + +trait PropertiesTrait { + protected def propCategory: String // specializes the remainder of the values + protected def pickJarBasedOn: Class[?] // props file comes from jar containing this + + /** The name of the properties file */ + protected val propFilename: String = "/" + propCategory + ".properties" + + /** The loaded properties */ + @sharable protected lazy val scalaProps: java.util.Properties = { + val props = new java.util.Properties + val stream = pickJarBasedOn getResourceAsStream propFilename + if (stream ne null) + quietlyDispose(props load stream, stream.close) + + props + } + + private def quietlyDispose(action: => Unit, disposal: => Unit) = + try { action } + finally + try { disposal } + catch { case _: IOException => } + + def propIsSet(name: String): Boolean = System.getProperty(name) != null + def propIsSetTo(name: String, value: String): Boolean = propOrNull(name) == value + def propOrElse(name: String, alt: String): String = System.getProperty(name, alt) + def propOrEmpty(name: String): String = propOrElse(name, "") + def propOrNull(name: String): String = propOrElse(name, null) + def propOrNone(name: String): Option[String] = Option(propOrNull(name)) + def propOrFalse(name: String): Boolean = propOrNone(name) exists (x => List("yes", "on", "true") contains x.toLowerCase) + def setProp(name: String, value: String): String = System.setProperty(name, value) + def clearProp(name: String): String = System.clearProperty(name) + + def envOrElse(name: String, alt: String): String = Option(System getenv name) getOrElse alt + def envOrNone(name: String): Option[String] = Option(System getenv name) + + // for values based on propFilename + def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt) + def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "") + def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name)) + + /** Either the development or release version if known, otherwise + * the empty string. + */ + def versionNumberString: String = scalaPropOrEmpty("version.number") + + /** The version number of the jar this was loaded from, + * or `"(unknown)"` if it cannot be determined. + */ + val simpleVersionString: String = { + val v = scalaPropOrElse("version.number", "(unknown)") + v + ( + if (v.contains("SNAPSHOT") || v.contains("NIGHTLY")) + "-git-" + scalaPropOrElse("git.hash", "(unknown)") + else + "" + ) + } + + /** The version number of the jar this was loaded from plus `"version "` prefix, + * or `"version (unknown)"` if it cannot be determined. + */ + val versionString: String = "version " + simpleVersionString + + /** Whether the current version of compiler is experimental + * + * 1. Snapshot, nightly releases and non-bootstrapped compiler are experimental. + * 2. Features supported by experimental versions of the compiler: + * - research plugins + */ + val experimental: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") + + val copyrightString: String = scalaPropOrElse("copyright.string", "(c) 2002-2017 LAMP/EPFL") + + /** This is the encoding to use reading in source files, overridden with -encoding + * Note that it uses "prop" i.e. looks in the scala jar, not the system properties. + */ + def sourceEncoding: String = scalaPropOrElse("file.encoding", StandardCharsets.UTF_8.name) + def sourceReader: String = scalaPropOrElse("source.reader", "scala.tools.nsc.io.SourceReader") + + /** This is the default text encoding, overridden (unreliably) with + * `JAVA_OPTS="-Dfile.encoding=Foo"` + */ + def encodingString: String = propOrElse("file.encoding", StandardCharsets.UTF_8.name) + + /** The default end of line character. + */ + def lineSeparator: String = propOrElse("line.separator", "\n") + + /** Various well-known properties. + */ + def javaClassPath: String = propOrEmpty("java.class.path") + def javaHome: String = propOrEmpty("java.home") + def javaVendor: String = propOrEmpty("java.vendor") + def javaVersion: String = propOrEmpty("java.version") + def javaVmInfo: String = propOrEmpty("java.vm.info") + def javaVmName: String = propOrEmpty("java.vm.name") + def javaVmVendor: String = propOrEmpty("java.vm.vendor") + def javaVmVersion: String = propOrEmpty("java.vm.version") + def osName: String = propOrEmpty("os.name") + def scalaHome: String = propOrEmpty("scala.home") + def tmpDir: String = propOrEmpty("java.io.tmpdir") + def userDir: String = propOrEmpty("user.dir") + def userHome: String = propOrEmpty("user.home") + def userName: String = propOrEmpty("user.name") + + /** Some derived values. + */ + def isWin: Boolean = osName startsWith "Windows" + def isMac: Boolean = javaVendor startsWith "Apple" + + // This is looking for javac, tools.jar, etc. + // Tries JDK_HOME first, then the more common but likely jre JAVA_HOME, + // and finally the system property based javaHome. + def jdkHome: String = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome)) + + def versionMsg: String = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString) + def scalaCmd: String = if (isWin) "scala.bat" else "scala" + def scalacCmd: String = if (isWin) "scalac.bat" else "scalac" +} diff --git a/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala b/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala new file mode 100644 index 000000000000..0275e0d6a227 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala @@ -0,0 +1,35 @@ +package dotty.tools.dotc.config + +import dotty.tools.dotc.core._ +import Contexts._ +import Symbols._ + +import dotty.tools.backend.sjs.JSDefinitions + +object SJSPlatform { + /** The `SJSPlatform` for the current context. */ + def sjsPlatform(using Context): SJSPlatform = + ctx.platform.asInstanceOf[SJSPlatform] +} + +class SJSPlatform()(using Context) extends JavaPlatform { + + /** Scala.js-specific definitions. */ + val jsDefinitions: JSDefinitions = new JSDefinitions() + + /** Is the SAMType `cls` also a SAM under the rules of the Scala.js back-end? */ + override def isSam(cls: ClassSymbol)(using Context): Boolean = + defn.isFunctionClass(cls) + || cls.superClass == jsDefinitions.JSFunctionClass + + /** Is the given class symbol eligible for Java serialization-specific methods? + * + * This is not simply false because we still want to add them to Scala classes + * and objects. They might be transitively used by macros and other compile-time + * code. It feels safer to have them be somewhat equivalent to the ones we would + * get in a JVM project. The JVM back-end will slap an extends `java.io.Serializable` + * to them, so we should be consistent and also emit the proper serialization methods. + */ + override def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean = + !sym.isSubClass(jsDefinitions.JSAnyClass) +} diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala new file mode 100644 index 000000000000..407171f1a0dd --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala @@ -0,0 +1,21 @@ +package dotty.tools.dotc.config + +enum ScalaRelease(val majorVersion: Int, val minorVersion: Int) extends Ordered[ScalaRelease]: + case Release3_0 extends ScalaRelease(3, 0) + case Release3_1 extends ScalaRelease(3, 1) + case Release3_2 extends ScalaRelease(3, 2) + + def show = s"$majorVersion.$minorVersion" + + def compare(that: ScalaRelease) = + val ord = summon[Ordering[(Int, Int)]] + ord.compare((majorVersion, minorVersion), (that.majorVersion, that.minorVersion)) + +object ScalaRelease: + def latest = Release3_1 + + def parse(name: String) = name match + case "3.0" => Some(Release3_0) + case "3.1" => Some(Release3_1) + case "3.2" => Some(Release3_2) + case _ => None diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala new file mode 100644 index 000000000000..a2dba94ad9fc --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala @@ -0,0 +1,346 @@ +package dotty.tools.dotc +package config + +import scala.language.unsafeNulls + +import dotty.tools.dotc.config.PathResolver.Defaults +import dotty.tools.dotc.config.Settings.{Setting, SettingGroup} +import dotty.tools.dotc.config.SourceVersion +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.rewrites.Rewrites +import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory} + +import scala.util.chaining._ + +class ScalaSettings extends SettingGroup with AllScalaSettings + +object ScalaSettings: + // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` + private val minTargetVersion = 8 + private val maxTargetVersion = 19 + + def supportedTargetVersions: List[String] = + (minTargetVersion to maxTargetVersion).toList.map(_.toString) + + def supportedReleaseVersions: List[String] = + if scala.util.Properties.isJavaAtLeast("9") then + val jdkVersion = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() + val maxVersion = Math.min(jdkVersion, maxTargetVersion) + (minTargetVersion to maxVersion).toList.map(_.toString) + else List(minTargetVersion).map(_.toString) + + def supportedScalaReleaseVersions: List[String] = + ScalaRelease.values.toList.map(_.show) + + def supportedSourceVersions: List[String] = + SourceVersion.values.toList.map(_.toString) + + def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") + + def defaultPageWidth: Int = { + val defaultWidth = 80 + val columnsVar = System.getenv("COLUMNS") + if columnsVar != null then columnsVar.toInt + else if Properties.isWin then + val ansiconVar = System.getenv("ANSICON") // eg. "142x32766 (142x26)" + if ansiconVar != null && ansiconVar.matches("[0-9]+x.*") then + ansiconVar.substring(0, ansiconVar.indexOf("x")).toInt + else defaultWidth + else defaultWidth + } + +trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSettings, WarningSettings, XSettings, YSettings: + self: SettingGroup => + + /* Path related settings */ + val semanticdbTarget: Setting[String] = PathSetting("-semanticdb-target", "Specify an alternative output directory for SemanticDB files.", "") + + val source: Setting[String] = ChoiceSetting("-source", "source version", "source version", ScalaSettings.supportedSourceVersions, SourceVersion.defaultSourceVersion.toString, aliases = List("--source")) + val uniqid: Setting[Boolean] = BooleanSetting("-uniqid", "Uniquely tag all identifiers in debugging output.", aliases = List("--unique-id")) + val rewrite: Setting[Option[Rewrites]] = OptionSetting[Rewrites]("-rewrite", "When used in conjunction with a `...-migration` source version, rewrites sources to migrate to new version.", aliases = List("--rewrite")) + val fromTasty: Setting[Boolean] = BooleanSetting("-from-tasty", "Compile classes from tasty files. The arguments are .tasty or .jar files.", aliases = List("--from-tasty")) + + val newSyntax: Setting[Boolean] = BooleanSetting("-new-syntax", "Require `then` and `do` in control expressions.") + val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions.") + val indent: Setting[Boolean] = BooleanSetting("-indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") + val noindent: Setting[Boolean] = BooleanSetting("-no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) + val YindentColons: Setting[Boolean] = BooleanSetting("-Yindent-colons", "(disabled: use -language:experimental.fewerBraces instead)") + + /* Decompiler settings */ + val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) + val printLines: Setting[Boolean] = BooleanSetting("-print-lines", "Show source code line numbers.", aliases = List("--print-lines")) + + /* Scala.js-related settings */ + val scalajsGenStaticForwardersForNonTopLevelObjects: Setting[Boolean] = BooleanSetting("-scalajs-genStaticForwardersForNonTopLevelObjects", "Generate static forwarders even for non-top-level objects (Scala.js only)") + val scalajsMapSourceURI: Setting[List[String]] = MultiStringSetting("-scalajs-mapSourceURI", "uri1[->uri2]", "rebases source URIs from uri1 to uri2 (or to a relative URI) for source maps (Scala.js only)") + + val projectUrl: Setting[String] = StringSetting ( + "-project-url", + "project repository homepage", + "The source repository of your project.", + "" + ) + + val wikiSyntax: Setting[Boolean] = BooleanSetting("-Xwiki-syntax", "Retains the Scala2 behavior of using Wiki Syntax in Scaladoc.") + + val jvmargs = PrefixSetting("-J", "-J", "Pass directly to the runtime system.") + val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.") +end AllScalaSettings + +/** Settings shared by compiler and scaladoc */ +trait CommonScalaSettings: + self: SettingGroup => + + /* Path related settings */ + val bootclasspath: Setting[String] = PathSetting("-bootclasspath", "Override location of bootstrap class files.", Defaults.scalaBootClassPath, aliases = List("--boot-class-path")) + val extdirs: Setting[String] = PathSetting("-extdirs", "Override location of installed extensions.", Defaults.scalaExtDirs, aliases = List("--extension-directories")) + val javabootclasspath: Setting[String] = PathSetting("-javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath, aliases = List("--java-boot-class-path")) + val javaextdirs: Setting[String] = PathSetting("-javaextdirs", "Override java extdirs classpath.", Defaults.javaExtDirs, aliases = List("--java-extension-directories")) + val sourcepath: Setting[String] = PathSetting("-sourcepath", "Specify location(s) of source files.", Defaults.scalaSourcePath, aliases = List("--source-path")) + val sourceroot: Setting[String] = PathSetting("-sourceroot", "Specify workspace root directory.", ".") + + val classpath: Setting[String] = PathSetting("-classpath", "Specify where to find user class files.", ScalaSettings.defaultClasspath, aliases = List("-cp", "--class-path")) + val outputDir: Setting[AbstractFile] = OutputSetting("-d", "directory|jar", "Destination for generated classfiles.", + new PlainDirectory(Directory("."))) + val color: Setting[String] = ChoiceSetting("-color", "mode", "Colored output", List("always", "never"/*, "auto"*/), "always"/* "auto"*/, aliases = List("--color")) + val verbose: Setting[Boolean] = BooleanSetting("-verbose", "Output messages about what the compiler is doing.", aliases = List("--verbose")) + val version: Setting[Boolean] = BooleanSetting("-version", "Print product version and exit.", aliases = List("--version")) + val help: Setting[Boolean] = BooleanSetting("-help", "Print a synopsis of standard options.", aliases = List("--help", "-h")) + val pageWidth: Setting[Int] = IntSetting("-pagewidth", "Set page width", ScalaSettings.defaultPageWidth, aliases = List("--page-width")) + val silentWarnings: Setting[Boolean] = BooleanSetting("-nowarn", "Silence all warnings.", aliases = List("--no-warnings")) + + val javaOutputVersion: Setting[String] = ChoiceSetting("-java-output-version", "version", "Compile code with classes specific to the given version of the Java platform available on the classpath and emit bytecode for this version. Corresponds to -release flag in javac.", ScalaSettings.supportedReleaseVersions, "", aliases = List("-release", "--release")) + + val deprecation: Setting[Boolean] = BooleanSetting("-deprecation", "Emit warning and location for usages of deprecated APIs.", aliases = List("--deprecation")) + val feature: Setting[Boolean] = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.", aliases = List("--feature")) + val explain: Setting[Boolean] = BooleanSetting("-explain", "Explain errors in more detail.", aliases = List("--explain")) + // -explain-types setting is necessary for cross compilation, since it is mentioned in sbt-tpolecat, for instance + // it is otherwise subsumed by -explain, and should be dropped as soon as we can. + val explainTypes: Setting[Boolean] = BooleanSetting("-explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) + val unchecked: Setting[Boolean] = BooleanSetting("-unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) + val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.", aliases = List("--language")) + + /* Coverage settings */ + val coverageOutputDir = PathSetting("-coverage-out", "Destination for coverage classfiles and instrumentation data.", "", aliases = List("--coverage-out")) + + /* Other settings */ + val encoding: Setting[String] = StringSetting("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding, aliases = List("--encoding")) + val usejavacp: Setting[Boolean] = BooleanSetting("-usejavacp", "Utilize the java.class.path in classpath resolution.", aliases = List("--use-java-class-path")) + val scalajs: Setting[Boolean] = BooleanSetting("-scalajs", "Compile in Scala.js mode (requires scalajs-library.jar on the classpath).", aliases = List("--scalajs")) +end CommonScalaSettings + +/** -P "plugin" settings. Various tools might support plugins. */ +private sealed trait PluginSettings: + self: SettingGroup => + val plugin: Setting[List[String]] = MultiStringSetting ("-Xplugin", "paths", "Load a plugin from each classpath.") + val disable: Setting[List[String]] = MultiStringSetting ("-Xplugin-disable", "plugin", "Disable plugins by name.") + val require: Setting[List[String]] = MultiStringSetting ("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.") + val showPlugins: Setting[Boolean] = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.") + val pluginsDir: Setting[String] = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath) + val pluginOptions: Setting[List[String]] = MultiStringSetting ("-P", "plugin:opt", "Pass an option to a plugin, e.g. -P::") + +/** -V "Verbose" settings */ +private sealed trait VerboseSettings: + self: SettingGroup => + val Vhelp: Setting[Boolean] = BooleanSetting("-V", "Print a synopsis of verbose options.") + val Xprint: Setting[List[String]] = PhasesSetting("-Vprint", "Print out program after", aliases = List("-Xprint")) + val XshowPhases: Setting[Boolean] = BooleanSetting("-Vphases", "List compiler phases.", aliases = List("-Xshow-phases")) + + val Vprofile: Setting[Boolean] = BooleanSetting("-Vprofile", "Show metrics about sources and internal representations to estimate compile-time complexity.") + val VprofileSortedBy = ChoiceSetting("-Vprofile-sorted-by", "key", "Show metrics about sources and internal representations sorted by given column name", List("name", "path", "lines", "tokens", "tasty", "complexity"), "") + val VprofileDetails = IntSetting("-Vprofile-details", "Show metrics about sources and internal representations of the most complex methods", 0) + val VreplMaxPrintElements: Setting[Int] = IntSetting("-Vrepl-max-print-elements", "Number of elements to be printed before output is truncated.", 1000) + val VreplMaxPrintCharacters: Setting[Int] = IntSetting("-Vrepl-max-print-characters", "Number of characters to be printed before output is truncated.", 50000) + +/** -W "Warnings" settings + */ +private sealed trait WarningSettings: + self: SettingGroup => + val Whelp: Setting[Boolean] = BooleanSetting("-W", "Print a synopsis of warning options.") + val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) + + val Wunused: Setting[List[String]] = MultiChoiceSetting( + name = "-Wunused", + helpArg = "warning", + descr = "Enable or disable specific `unused` warnings", + choices = List("nowarn", "all"), + default = Nil + ) + object WunusedHas: + def allOr(s: String)(using Context) = Wunused.value.pipe(us => us.contains("all") || us.contains(s)) + def nowarn(using Context) = allOr("nowarn") + + val Wconf: Setting[List[String]] = MultiStringSetting( + "-Wconf", + "patterns", + default = List(), + descr = + s"""Configure compiler warnings. + |Syntax: -Wconf::,:,... + |multiple are combined with &, i.e., &...& + | + | + | - Any message: any + | + | - Message categories: cat=deprecation, cat=feature, cat=unchecked + | + | - Message content: msg=regex + | The regex need only match some part of the message, not all of it. + | + | - Message id: id=E129 + | The message id is printed with the warning. + | + | - Message name: name=PureExpressionInStatementPosition + | The message name is printed with the warning in verbose warning mode. + | + |In verbose warning mode the compiler prints matching filters for warnings. + |Verbose mode can be enabled globally using `-Wconf:any:verbose`, or locally + |using the @nowarn annotation (example: `@nowarn("v") def test = try 1`). + | + | + | - error / e + | - warning / w + | - verbose / v (emit warning, show additional help for writing `-Wconf` filters) + | - info / i (infos are not counted as warnings and not affected by `-Werror`) + | - silent / s + | + |The default configuration is empty. + | + |User-defined configurations are added to the left. The leftmost rule matching + |a warning message defines the action. + | + |Examples: + | - change every warning into an error: -Wconf:any:error + | - silence deprecations: -Wconf:cat=deprecation:s + | + |Note: on the command-line you might need to quote configurations containing `*` or `&` + |to prevent the shell from expanding patterns.""".stripMargin, + ) + +/** -X "Extended" or "Advanced" settings */ +private sealed trait XSettings: + self: SettingGroup => + + val Xhelp: Setting[Boolean] = BooleanSetting("-X", "Print a synopsis of advanced options.") + val XnoForwarders: Setting[Boolean] = BooleanSetting("-Xno-forwarders", "Do not generate static forwarders in mirror classes.") + val XmaxInlines: Setting[Int] = IntSetting("-Xmax-inlines", "Maximal number of successive inlines.", 32) + val XmaxInlinedTrees: Setting[Int] = IntSetting("-Xmax-inlined-trees", "Maximal number of inlined trees.", 2_000_000) + val Xmigration: Setting[ScalaVersion] = VersionSetting("-Xmigration", "Warn about constructs whose behavior may have changed since version.") + val XprintTypes: Setting[Boolean] = BooleanSetting("-Xprint-types", "Print tree types (debugging option).") + val XprintDiff: Setting[Boolean] = BooleanSetting("-Xprint-diff", "Print changed parts of the tree since last print.") + val XprintDiffDel: Setting[Boolean] = BooleanSetting("-Xprint-diff-del", "Print changed parts of the tree since last print including deleted parts.") + val XprintInline: Setting[Boolean] = BooleanSetting("-Xprint-inline", "Show where inlined code comes from.") + val XprintSuspension: Setting[Boolean] = BooleanSetting("-Xprint-suspension", "Show when code is suspended until macros are compiled.") + val Xprompt: Setting[Boolean] = BooleanSetting("-Xprompt", "Display a prompt after each error (debugging option).") + val XreplDisableDisplay: Setting[Boolean] = BooleanSetting("-Xrepl-disable-display", "Do not display definitions in REPL.") + val XverifySignatures: Setting[Boolean] = BooleanSetting("-Xverify-signatures", "Verify generic signatures in generated bytecode.") + val XignoreScala2Macros: Setting[Boolean] = BooleanSetting("-Xignore-scala2-macros", "Ignore errors when compiling code that calls Scala2 macros, these will fail at runtime.") + val XimportSuggestionTimeout: Setting[Int] = IntSetting("-Ximport-suggestion-timeout", "Timeout (in ms) for searching for import suggestions when errors are reported.", 8000) + val Xsemanticdb: Setting[Boolean] = BooleanSetting("-Xsemanticdb", "Store information in SemanticDB.", aliases = List("-Ysemanticdb")) + val XuncheckedJavaOutputVersion: Setting[String] = ChoiceSetting("-Xunchecked-java-output-version", "target", "Emit bytecode for the specified version of the Java platform. This might produce bytecode that will break at runtime. Corresponds to -target flag in javac. When on JDK 9+, consider -java-output-version as a safer alternative.", ScalaSettings.supportedTargetVersions, "", aliases = List("-Xtarget", "--Xtarget")) + val XcheckMacros: Setting[Boolean] = BooleanSetting("-Xcheck-macros", "Check some invariants of macro generated code while expanding macros", aliases = List("--Xcheck-macros")) + val XmainClass: Setting[String] = StringSetting("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") + val XimplicitSearchLimit: Setting[Int] = IntSetting("-Ximplicit-search-limit", "Maximal number of expressions to be generated in an implicit search", 50000) + + val XmixinForceForwarders = ChoiceSetting( + name = "-Xmixin-force-forwarders", + helpArg = "mode", + descr = "Generate forwarder methods in classes inhering concrete methods from traits.", + choices = List("true", "junit", "false"), + default = "true") + + object mixinForwarderChoices { + def isTruthy(using Context) = XmixinForceForwarders.value == "true" + def isAtLeastJunit(using Context) = isTruthy || XmixinForceForwarders.value == "junit" + } + + val XmacroSettings: Setting[List[String]] = MultiStringSetting("-Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") +end XSettings + +/** -Y "Forking" as in forked tongue or "Private" settings */ +private sealed trait YSettings: + self: SettingGroup => + + val Yhelp: Setting[Boolean] = BooleanSetting("-Y", "Print a synopsis of private options.") + val Ycheck: Setting[List[String]] = PhasesSetting("-Ycheck", "Check the tree at the end of") + val YcheckMods: Setting[Boolean] = BooleanSetting("-Ycheck-mods", "Check that symbols and their defining trees have modifiers in sync.") + val Ydebug: Setting[Boolean] = BooleanSetting("-Ydebug", "Increase the quantity of debugging output.") + val YdebugTrace: Setting[Boolean] = BooleanSetting("-Ydebug-trace", "Trace core operations.") + val YdebugFlags: Setting[Boolean] = BooleanSetting("-Ydebug-flags", "Print all flags of definitions.") + val YdebugMissingRefs: Setting[Boolean] = BooleanSetting("-Ydebug-missing-refs", "Print a stacktrace when a required symbol is missing.") + val YdebugNames: Setting[Boolean] = BooleanSetting("-Ydebug-names", "Show internal representation of names.") + val YdebugPos: Setting[Boolean] = BooleanSetting("-Ydebug-pos", "Show full source positions including spans.") + val YdebugTreeWithId: Setting[Int] = IntSetting("-Ydebug-tree-with-id", "Print the stack trace when the tree with the given id is created.", Int.MinValue) + val YdebugTypeError: Setting[Boolean] = BooleanSetting("-Ydebug-type-error", "Print the stack trace when a TypeError is caught", false) + val YdebugError: Setting[Boolean] = BooleanSetting("-Ydebug-error", "Print the stack trace when any error is caught.", false) + val YdebugUnpickling: Setting[Boolean] = BooleanSetting("-Ydebug-unpickling", "Print the stack trace when an error occurs when reading Tasty.", false) + val YtermConflict: Setting[String] = ChoiceSetting("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") + val Ylog: Setting[List[String]] = PhasesSetting("-Ylog", "Log operations during") + val YlogClasspath: Setting[Boolean] = BooleanSetting("-Ylog-classpath", "Output information about what classpath is being applied.") + val YdisableFlatCpCaching: Setting[Boolean] = BooleanSetting("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") + + val Yscala2Unpickler: Setting[String] = StringSetting("-Yscala2-unpickler", "", "Control where we may get Scala 2 symbols from. This is either \"always\", \"never\", or a classpath.", "always") + + val YnoImports: Setting[Boolean] = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") + val YnoGenericSig: Setting[Boolean] = BooleanSetting("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.") + val YnoPredef: Setting[Boolean] = BooleanSetting("-Yno-predef", "Compile without importing Predef.") + val Yskip: Setting[List[String]] = PhasesSetting("-Yskip", "Skip") + val Ydumpclasses: Setting[String] = StringSetting("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") + val YstopAfter: Setting[List[String]] = PhasesSetting("-Ystop-after", "Stop after", aliases = List("-stop")) // backward compat + val YstopBefore: Setting[List[String]] = PhasesSetting("-Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully + val YshowSuppressedErrors: Setting[Boolean] = BooleanSetting("-Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally suppressed.") + val YdetailedStats: Setting[Boolean] = BooleanSetting("-Ydetailed-stats", "Show detailed internal compiler stats (needs Stats.enabled to be set to true).") + val YkindProjector: Setting[String] = ChoiceSetting("-Ykind-projector", "[underscores, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Ykind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable") + val YprintPos: Setting[Boolean] = BooleanSetting("-Yprint-pos", "Show tree positions.") + val YprintPosSyms: Setting[Boolean] = BooleanSetting("-Yprint-pos-syms", "Show symbol definitions positions.") + val YnoDeepSubtypes: Setting[Boolean] = BooleanSetting("-Yno-deep-subtypes", "Throw an exception on deep subtyping call stacks.") + val YnoPatmatOpt: Setting[Boolean] = BooleanSetting("-Yno-patmat-opt", "Disable all pattern matching optimizations.") + val YplainPrinter: Setting[Boolean] = BooleanSetting("-Yplain-printer", "Pretty-print using a plain printer.") + val YprintSyms: Setting[Boolean] = BooleanSetting("-Yprint-syms", "When printing trees print info in symbols instead of corresponding info in trees.") + val YprintDebug: Setting[Boolean] = BooleanSetting("-Yprint-debug", "When printing trees, print some extra information useful for debugging.") + val YprintDebugOwners: Setting[Boolean] = BooleanSetting("-Yprint-debug-owners", "When printing trees, print owners of definitions.") + val YprintLevel: Setting[Boolean] = BooleanSetting("-Yprint-level", "print nesting levels of symbols and type variables.") + val YshowPrintErrors: Setting[Boolean] = BooleanSetting("-Yshow-print-errors", "Don't suppress exceptions thrown during tree printing.") + val YtestPickler: Setting[Boolean] = BooleanSetting("-Ytest-pickler", "Self-test for pickling functionality; should be used with -Ystop-after:pickler.") + val YcheckReentrant: Setting[Boolean] = BooleanSetting("-Ycheck-reentrant", "Check that compiled program does not contain vars that can be accessed from a global root.") + val YdropComments: Setting[Boolean] = BooleanSetting("-Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments")) + val YcookComments: Setting[Boolean] = BooleanSetting("-Ycook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Ycook-comments")) + val YreadComments: Setting[Boolean] = BooleanSetting("-Yread-docs", "Read documentation from tasty.") + val YforceSbtPhases: Setting[Boolean] = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") + val YdumpSbtInc: Setting[Boolean] = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") + val YcheckAllPatmat: Setting[Boolean] = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") + val YretainTrees: Setting[Boolean] = BooleanSetting("-Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") + val YshowTreeIds: Setting[Boolean] = BooleanSetting("-Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") + val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty") + val YnoExperimental: Setting[Boolean] = BooleanSetting("-Yno-experimental", "Disable experimental language features") + + val YprofileEnabled: Setting[Boolean] = BooleanSetting("-Yprofile-enabled", "Enable profiling.") + val YprofileDestination: Setting[String] = StringSetting("-Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") + //.withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileExternalTool: Setting[List[String]] = PhasesSetting("-Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer") + //.withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting("-Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") + //.withPostSetHook( _ => YprofileEnabled.value = true ) + + // Experimental language features + val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting("-Yno-kind-polymorphism", "Disable kind polymorphism.") + val YexplicitNulls: Setting[Boolean] = BooleanSetting("-Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") + val YcheckInit: Setting[Boolean] = BooleanSetting("-Ysafe-init", "Ensure safe initialization of objects") + val YrequireTargetName: Setting[Boolean] = BooleanSetting("-Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation") + val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only)") + val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references") + val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with captureChecking language import, suppress type abbreviations") + + /** Area-specific debug output */ + val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") + val YnoDoubleBindings: Setting[Boolean] = BooleanSetting("-Yno-double-bindings", "Assert no namedtype is bound twice (should be enabled only if program is error-free).") + val YshowVarBounds: Setting[Boolean] = BooleanSetting("-Yshow-var-bounds", "Print type variables with their bounds.") + + val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting("-Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") + + val Yinstrument: Setting[Boolean] = BooleanSetting("-Yinstrument", "Add instrumentation code that counts allocations and closure creations.") + val YinstrumentDefs: Setting[Boolean] = BooleanSetting("-Yinstrument-defs", "Add instrumentation code that counts method calls; needs -Yinstrument to be set, too.") + + val YforceInlineWhileTyping: Setting[Boolean] = BooleanSetting("-Yforce-inline-while-typing", "Make non-transparent inline methods inline when typing. Emulates the old inlining behavior of 3.0.0-M3.") +end YSettings + diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala new file mode 100644 index 000000000000..7fdf57478f1a --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala @@ -0,0 +1,188 @@ +/* @author James Iry + */ +package dotty.tools +package dotc.config + +import scala.language.unsafeNulls + +import scala.annotation.internal.sharable +import scala.util.{Try, Success, Failure} + +/** + * Represents a single Scala version in a manner that + * supports easy comparison and sorting. + */ +sealed abstract class ScalaVersion extends Ordered[ScalaVersion] { + def unparse: String +} + +/** + * A scala version that sorts higher than all actual versions + */ +@sharable case object NoScalaVersion extends ScalaVersion { + def unparse: String = "none" + + def compare(that: ScalaVersion): Int = that match { + case NoScalaVersion => 0 + case _ => 1 + } +} + +/** + * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion + * may or may not be a released version - i.e. this same class is used to represent + * final, release candidate, milestone, and development builds. The build argument is used + * to segregate builds + */ +case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion { + def unparse: String = s"${major}.${minor}.${rev}.${build.unparse}" + + def compare(that: ScalaVersion): Int = that match { + case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) => + // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these + // comparisons a lot so I'm using brute force direct style code + if (major < thatMajor) -1 + else if (major > thatMajor) 1 + else if (minor < thatMinor) -1 + else if (minor > thatMinor) 1 + else if (rev < thatRev) -1 + else if (rev > thatRev) 1 + else build compare thatBuild + case AnyScalaVersion => 1 + case NoScalaVersion => -1 + } +} + +/** + * A Scala version that sorts lower than all actual versions + */ +@sharable case object AnyScalaVersion extends ScalaVersion { + def unparse: String = "any" + + def compare(that: ScalaVersion): Int = that match { + case AnyScalaVersion => 0 + case _ => -1 + } +} + +/** + * Methods for parsing ScalaVersions + */ +@sharable object ScalaVersion { + private val dot = "\\." + private val dash = "\\-" + private def not(s:String) = s"[^${s}]" + private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r + + def parse(versionString : String): Try[ScalaVersion] = { + def failure = Failure(new NumberFormatException( + s"There was a problem parsing ${versionString}. " + + "Versions should be in the form major[.minor[.revision]] " + + "where each part is a positive number, as in 2.10.1. " + + "The minor and revision parts are optional." + )) + + def toInt(s: String) = s match { + case null | "" => 0 + case _ => s.toInt + } + + def isInt(s: String) = Try(toInt(s)).isSuccess + + import ScalaBuild._ + + def toBuild(s: String) = s match { + case null | "FINAL" => Final + case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2))) + case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1))) + case _ => Development(s) + } + + try versionString match { + case "" | "any" => Success(AnyScalaVersion) + case "none" => Success(NoScalaVersion) + case R(_, majorS, _, minorS, _, revS, _, buildS) => + Success(SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))) + case _ => failure + } + catch { + case e: NumberFormatException => failure + } + } + + /** + * The version of the compiler running now + */ + val current: ScalaVersion = parse(util.Properties.versionNumberString).get +} + +/** + * Represents the data after the dash in major.minor.rev-build + */ +abstract class ScalaBuild extends Ordered[ScalaBuild] { + /** + * Return a version of this build information that can be parsed back into the + * same ScalaBuild + */ + def unparse: String +} + +object ScalaBuild { + + /** A development, test, nightly, snapshot or other "unofficial" build + */ + case class Development(id: String) extends ScalaBuild { + def unparse: String = s"-${id}" + + def compare(that: ScalaBuild): Int = that match { + // sorting two development builds based on id is reasonably valid for two versions created with the same schema + // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions + // this is a pragmatic compromise + case Development(thatId) => id compare thatId + // assume a development build is newer than anything else, that's not really true, but good luck + // mapping development build versions to other build types + case _ => 1 + } + } + + /** A final build + */ + case object Final extends ScalaBuild { + def unparse: String = "" + + def compare(that: ScalaBuild): Int = that match { + case Final => 0 + // a final is newer than anything other than a development build or another final + case Development(_) => -1 + case _ => 1 + } + } + + /** A candidate for final release + */ + case class RC(n: Int) extends ScalaBuild { + def unparse: String = s"-RC${n}" + + def compare(that: ScalaBuild): Int = that match { + // compare two rcs based on their RC numbers + case RC(thatN) => n - thatN + // an rc is older than anything other than a milestone or another rc + case Milestone(_) => 1 + case _ => -1 + } + } + + /** An intermediate release + */ + case class Milestone(n: Int) extends ScalaBuild { + def unparse: String = s"-M${n}" + + def compare(that: ScalaBuild): Int = that match { + // compare two milestones based on their milestone numbers + case Milestone(thatN) => n - thatN + // a milestone is older than anything other than another milestone + case _ => -1 + } + } +} + diff --git a/tests/pos-with-compiler-cc/dotc/config/Settings.scala b/tests/pos-with-compiler-cc/dotc/config/Settings.scala new file mode 100644 index 000000000000..277833afbd5d --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Settings.scala @@ -0,0 +1,295 @@ +package dotty.tools.dotc +package config + +import scala.language.unsafeNulls + +import core.Contexts._ + +import dotty.tools.io.{AbstractFile, Directory, JarArchive, PlainDirectory} + +import annotation.tailrec +import collection.mutable.ArrayBuffer +import reflect.ClassTag +import scala.util.{Success, Failure} + +object Settings: + + val BooleanTag: ClassTag[Boolean] = ClassTag.Boolean + val IntTag: ClassTag[Int] = ClassTag.Int + val StringTag: ClassTag[String] = ClassTag(classOf[String]) + val ListTag: ClassTag[List[?]] = ClassTag(classOf[List[?]]) + val VersionTag: ClassTag[ScalaVersion] = ClassTag(classOf[ScalaVersion]) + val OptionTag: ClassTag[Option[?]] = ClassTag(classOf[Option[?]]) + val OutputTag: ClassTag[AbstractFile] = ClassTag(classOf[AbstractFile]) + + class SettingsState(initialValues: Seq[Any]): + private val values = ArrayBuffer(initialValues: _*) + private var _wasRead: Boolean = false + + override def toString: String = s"SettingsState(values: ${values.toList})" + + def value(idx: Int): Any = + _wasRead = true + values(idx) + + def update(idx: Int, x: Any): SettingsState = + if (_wasRead) then SettingsState(values.toSeq).update(idx, x) + else + values(idx) = x + this + end SettingsState + + case class ArgsSummary( + sstate: SettingsState, + arguments: List[String], + errors: List[String], + warnings: List[String]) { + + def fail(msg: String): Settings.ArgsSummary = + ArgsSummary(sstate, arguments.tail, errors :+ msg, warnings) + + def warn(msg: String): Settings.ArgsSummary = + ArgsSummary(sstate, arguments.tail, errors, warnings :+ msg) + } + + case class Setting[T: ClassTag] private[Settings] ( + name: String, + description: String, + default: T, + helpArg: String = "", + choices: Option[Seq[?]] = None, + prefix: String = "", + aliases: List[String] = Nil, + depends: List[(Setting[?], Any)] = Nil, + propertyClass: Option[Class[?]] = None)(private[Settings] val idx: Int) { + + private var changed: Boolean = false + + def valueIn(state: SettingsState): T = state.value(idx).asInstanceOf[T] + + def updateIn(state: SettingsState, x: Any): SettingsState = x match + case _: T => state.update(idx, x) + case _ => throw IllegalArgumentException(s"found: $x of type ${x.getClass.getName}, required: ${implicitly[ClassTag[T]]}") + + def isDefaultIn(state: SettingsState): Boolean = valueIn(state) == default + + def isMultivalue: Boolean = implicitly[ClassTag[T]] == ListTag + + def legalChoices: String = + choices match { + case Some(xs) if xs.isEmpty => "" + case Some(r: Range) => s"${r.head}..${r.last}" + case Some(xs) => xs.mkString(", ") + case None => "" + } + + def tryToSet(state: ArgsSummary): ArgsSummary = { + val ArgsSummary(sstate, arg :: args, errors, warnings) = state: @unchecked + def update(value: Any, args: List[String]): ArgsSummary = + var dangers = warnings + val value1 = + if changed && isMultivalue then + val value0 = value.asInstanceOf[List[String]] + val current = valueIn(sstate).asInstanceOf[List[String]] + value0.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") + current ++ value0 + else + if changed then dangers :+= s"Flag $name set repeatedly" + value + changed = true + ArgsSummary(updateIn(sstate, value1), args, errors, dangers) + end update + + def fail(msg: String, args: List[String]) = + ArgsSummary(sstate, args, errors :+ msg, warnings) + + def missingArg = + fail(s"missing argument for option $name", args) + + def setString(argValue: String, args: List[String]) = + choices match + case Some(xs) if !xs.contains(argValue) => + fail(s"$argValue is not a valid choice for $name", args) + case _ => + update(argValue, args) + + def setInt(argValue: String, args: List[String]) = + try + val x = argValue.toInt + choices match + case Some(r: Range) if x < r.head || r.last < x => + fail(s"$argValue is out of legal range ${r.head}..${r.last} for $name", args) + case Some(xs) if !xs.contains(x) => + fail(s"$argValue is not a valid choice for $name", args) + case _ => + update(x, args) + catch case _: NumberFormatException => + fail(s"$argValue is not an integer argument for $name", args) + + def doSet(argRest: String) = ((implicitly[ClassTag[T]], args): @unchecked) match { + case (BooleanTag, _) => + update(true, args) + case (OptionTag, _) => + update(Some(propertyClass.get.getConstructor().newInstance()), args) + case (ListTag, _) => + if (argRest.isEmpty) missingArg + else + val strings = argRest.split(",").toList + choices match + case Some(valid) => strings.filterNot(valid.contains) match + case Nil => update(strings, args) + case invalid => fail(s"invalid choice(s) for $name: ${invalid.mkString(",")}", args) + case _ => update(strings, args) + case (StringTag, _) if argRest.nonEmpty || choices.exists(_.contains("")) => + setString(argRest, args) + case (StringTag, arg2 :: args2) => + if (arg2 startsWith "-") missingArg + else setString(arg2, args2) + case (OutputTag, arg :: args) => + val path = Directory(arg) + val isJar = path.extension == "jar" + if (!isJar && !path.isDirectory) + fail(s"'$arg' does not exist or is not a directory or .jar file", args) + else { + val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) + update(output, args) + } + case (IntTag, args) if argRest.nonEmpty => + setInt(argRest, args) + case (IntTag, arg2 :: args2) => + setInt(arg2, args2) + case (VersionTag, _) => + ScalaVersion.parse(argRest) match { + case Success(v) => update(v, args) + case Failure(ex) => fail(ex.getMessage, args) + } + case (_, Nil) => + missingArg + } + + def matches(argName: String) = (name :: aliases).exists(_ == argName) + + if (prefix != "" && arg.startsWith(prefix)) + doSet(arg drop prefix.length) + else if (prefix == "" && matches(arg.takeWhile(_ != ':'))) + doSet(arg.dropWhile(_ != ':').drop(1)) + else + state + } + } + + object Setting: + extension [T](setting: Setting[T]) + def value(using Context): T = setting.valueIn(ctx.settingsState) + def update(x: T)(using Context): SettingsState = setting.updateIn(ctx.settingsState, x) + def isDefault(using Context): Boolean = setting.isDefaultIn(ctx.settingsState) + + class SettingGroup { + + private val _allSettings = new ArrayBuffer[Setting[?]] + def allSettings: Seq[Setting[?]] = _allSettings.toSeq + + def defaultState: SettingsState = new SettingsState(allSettings map (_.default)) + + def userSetSettings(state: SettingsState): Seq[Setting[?]] = + allSettings filterNot (_.isDefaultIn(state)) + + def toConciseString(state: SettingsState): String = + userSetSettings(state).mkString("(", " ", ")") + + private def checkDependencies(state: ArgsSummary): ArgsSummary = + userSetSettings(state.sstate).foldLeft(state)(checkDependenciesOfSetting) + + private def checkDependenciesOfSetting(state: ArgsSummary, setting: Setting[?]) = + setting.depends.foldLeft(state) { (s, dep) => + val (depSetting, reqValue) = dep + if (depSetting.valueIn(state.sstate) == reqValue) s + else s.fail(s"incomplete option ${setting.name} (requires ${depSetting.name})") + } + + /** Iterates over the arguments applying them to settings where applicable. + * Then verifies setting dependencies are met. + * + * This takes a boolean indicating whether to keep + * processing if an argument is seen which is not a command line option. + * This is an expedience for the moment so that you can say + * + * scalac -d /tmp foo.scala -optimise + * + * while also allowing + * + * scala Program opt opt + * + * to get their arguments. + */ + @tailrec + final def processArguments(state: ArgsSummary, processAll: Boolean, skipped: List[String]): ArgsSummary = + def stateWithArgs(args: List[String]) = ArgsSummary(state.sstate, args, state.errors, state.warnings) + state.arguments match + case Nil => + checkDependencies(stateWithArgs(skipped)) + case "--" :: args => + checkDependencies(stateWithArgs(skipped ++ args)) + case x :: _ if x startsWith "-" => + @tailrec def loop(settings: List[Setting[?]]): ArgsSummary = settings match + case setting :: settings1 => + val state1 = setting.tryToSet(state) + if state1 ne state then state1 + else loop(settings1) + case Nil => + state.warn(s"bad option '$x' was ignored") + processArguments(loop(allSettings.toList), processAll, skipped) + case arg :: args => + if processAll then processArguments(stateWithArgs(args), processAll, skipped :+ arg) + else state + end processArguments + + def processArguments(arguments: List[String], processAll: Boolean, settingsState: SettingsState = defaultState): ArgsSummary = + processArguments(ArgsSummary(settingsState, arguments, Nil, Nil), processAll, Nil) + + def publish[T](settingf: Int => Setting[T]): Setting[T] = { + val setting = settingf(_allSettings.length) + _allSettings += setting + setting + } + + def BooleanSetting(name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil): Setting[Boolean] = + publish(Setting(name, descr, initialValue, aliases = aliases)) + + def StringSetting(name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = + publish(Setting(name, descr, default, helpArg, aliases = aliases)) + + def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil): Setting[String] = + publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + + def MultiChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + + def IntSetting(name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = + publish(Setting(name, descr, default, aliases = aliases)) + + def IntChoiceSetting(name: String, descr: String, choices: Seq[Int], default: Int): Setting[Int] = + publish(Setting(name, descr, default, choices = Some(choices))) + + def MultiStringSetting(name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(name, descr, default, helpArg, aliases = aliases)) + + def OutputSetting(name: String, helpArg: String, descr: String, default: AbstractFile): Setting[AbstractFile] = + publish(Setting(name, descr, default, helpArg)) + + def PathSetting(name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = + publish(Setting(name, descr, default, aliases = aliases)) + + def PhasesSetting(name: String, descr: String, default: String = "", aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(name, descr, if (default.isEmpty) Nil else List(default), aliases = aliases)) + + def PrefixSetting(name: String, pre: String, descr: String): Setting[List[String]] = + publish(Setting(name, descr, Nil, prefix = pre)) + + def VersionSetting(name: String, descr: String, default: ScalaVersion = NoScalaVersion): Setting[ScalaVersion] = + publish(Setting(name, descr, default)) + + def OptionSetting[T: ClassTag](name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = + publish(Setting(name, descr, None, propertyClass = Some(implicitly[ClassTag[T]].runtimeClass), aliases = aliases)) + } +end Settings diff --git a/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala b/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala new file mode 100644 index 000000000000..545e2f2d9b42 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala @@ -0,0 +1,31 @@ +package dotty.tools +package dotc +package config + +import core.Decorators.* +import util.Property + +enum SourceVersion: + case `3.0-migration`, `3.0`, `3.1` // Note: do not add `3.1-migration` here, 3.1 is the same language as 3.0. + case `3.2-migration`, `3.2` + case `future-migration`, `future` + + val isMigrating: Boolean = toString.endsWith("-migration") + + def stable: SourceVersion = + if isMigrating then SourceVersion.values(ordinal + 1) else this + + def isAtLeast(v: SourceVersion) = stable.ordinal >= v.ordinal + +object SourceVersion extends Property.Key[SourceVersion]: + def defaultSourceVersion = `3.2` + + /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ + val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) + + /** language versions that the compiler recognises. */ + val validSourceVersionNames = values.toList.map(_.toString.toTermName) + + /** All source versions that can be recognised from a language import. e.g. `import language.3.1` */ + val allSourceVersionNames = validSourceVersionNames ::: illegalSourceVersionNames +end SourceVersion diff --git a/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala b/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala new file mode 100644 index 000000000000..5b79432a97e7 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala @@ -0,0 +1,42 @@ +package dotty.tools +package dotc +package config + +import scala.language.unsafeNulls + +/** For placing a wrapper function around property functions. + * Motivated by places like google app engine throwing exceptions + * on property lookups. + */ +trait WrappedProperties extends PropertiesTrait { + def wrap[T](body: => T): Option[T] + + protected def propCategory: String = "wrapped" + protected def pickJarBasedOn: Class[?] = this.getClass + + override def propIsSet(name: String): Boolean = wrap(super.propIsSet(name)) exists (x => x) + override def propOrElse(name: String, alt: String): String = wrap(super.propOrElse(name, alt)) getOrElse alt + override def setProp(name: String, value: String): String = wrap(super.setProp(name, value)).orNull + override def clearProp(name: String): String = wrap(super.clearProp(name)).orNull + override def envOrElse(name: String, alt: String): String = wrap(super.envOrElse(name, alt)) getOrElse alt + override def envOrNone(name: String): Option[String] = wrap(super.envOrNone(name)).flatten + + def systemProperties: Iterator[(String, String)] = { + import scala.jdk.CollectionConverters._ + wrap(System.getProperties.asScala.iterator) getOrElse Iterator.empty + } +} + +object WrappedProperties { + object AccessControl extends WrappedProperties { + def wrap[T](body: => T): Option[T] = + try Some(body) + catch { + // the actual exception we are concerned with is AccessControlException, + // but that's deprecated on JDK 17, so catching its superclass is a convenient + // way to avoid a deprecation warning + case _: SecurityException => + None + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala new file mode 100644 index 000000000000..d33b1d39942e --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala @@ -0,0 +1,273 @@ +package dotty.tools +package dotc +package core + +import Symbols._, Types._, Contexts._, Constants._ +import dotty.tools.dotc.ast.tpd, tpd.* +import util.Spans.Span +import printing.{Showable, Printer} +import printing.Texts.Text +import annotation.internal.sharable +import language.experimental.pureFunctions + +object Annotations { + + def annotClass(tree: Tree)(using Context) = + if (tree.symbol.isConstructor) tree.symbol.owner + else tree.tpe.typeSymbol + + abstract class Annotation extends Showable, caps.Pure { + + def tree(using Context): Tree + + def symbol(using Context): Symbol = annotClass(tree) + + def hasSymbol(sym: Symbol)(using Context) = symbol == sym + + def matches(cls: Symbol)(using Context): Boolean = symbol.derivesFrom(cls) + + def appliesToModule: Boolean = true // for now; see remark in SymDenotations + + def derivedAnnotation(tree: Tree)(using Context): Annotation = + if (tree eq this.tree) this else Annotation(tree) + + /** All arguments to this annotation in a single flat list */ + def arguments(using Context): List[Tree] = tpd.allArguments(tree) + + def argument(i: Int)(using Context): Option[Tree] = { + val args = arguments + if (i < args.length) Some(args(i)) else None + } + def argumentConstant(i: Int)(using Context): Option[Constant] = + for (case ConstantType(c) <- argument(i) map (_.tpe.widenTermRefExpr.normalized)) yield c + + def argumentConstantString(i: Int)(using Context): Option[String] = + for (case Constant(s: String) <- argumentConstant(i)) yield s + + /** The tree evaluaton is in progress. */ + def isEvaluating: Boolean = false + + /** The tree evaluation has finished. */ + def isEvaluated: Boolean = true + + /** Normally, applies a type map to all tree nodes of this annotation, but can + * be overridden. Returns EmptyAnnotation if type type map produces a range + * type, since ranges cannot be types of trees. + */ + def mapWith(tm: TypeMap)(using Context) = + val args = arguments + if args.isEmpty then this + else + val findDiff = new TreeAccumulator[Type]: + def apply(x: Type, tree: Tree)(using Context): Type = + if tm.isRange(x) then x + else + val tp1 = tm(tree.tpe) + foldOver(if tp1 frozen_=:= tree.tpe then x else tp1, tree) + val diff = findDiff(NoType, args) + if tm.isRange(diff) then EmptyAnnotation + else if diff.exists then derivedAnnotation(tm.mapOver(tree)) + else this + + /** Does this annotation refer to a parameter of `tl`? */ + def refersToParamOf(tl: TermLambda)(using Context): Boolean = + val args = arguments + if args.isEmpty then false + else tree.existsSubTree { + case id: Ident => id.tpe.stripped match + case TermParamRef(tl1, _) => tl eq tl1 + case _ => false + case _ => false + } + + /** A string representation of the annotation. Overridden in BodyAnnotation. + */ + def toText(printer: Printer): Text = printer.annotText(this) + + def ensureCompleted(using Context): Unit = tree + + def sameAnnotation(that: Annotation)(using Context): Boolean = + symbol == that.symbol && tree.sameTree(that.tree) + + /** Operations for hash-consing, can be overridden */ + def hash: Int = System.identityHashCode(this) + def eql(that: Annotation) = this eq that + } + + case class ConcreteAnnotation(t: Tree) extends Annotation: + def tree(using Context): Tree = t + + abstract class LazyAnnotation extends Annotation { + protected var mySym: Symbol | (Context ?-> Symbol) | Null + override def symbol(using parentCtx: Context): Symbol = + assert(mySym != null) + mySym match { + case symFn: (Context ?-> Symbol) @unchecked => + mySym = null + mySym = atPhaseBeforeTransforms(symFn) + // We should always produce the same annotation tree, no matter when the + // annotation is evaluated. Setting the phase to a pre-transformation phase + // seems to be enough to ensure this (note that after erasure, `ctx.typer` + // will be the Erasure typer, but that doesn't seem to affect the annotation + // trees we create, so we leave it as is) + case sym: Symbol if sym.defRunId != parentCtx.runId => + mySym = sym.denot.current.symbol + case _ => + } + mySym.asInstanceOf[Symbol] + + protected var myTree: Tree | (Context ?-> Tree) | Null + def tree(using Context): Tree = + assert(myTree != null) + myTree match { + case treeFn: (Context ?-> Tree) @unchecked => + myTree = null + myTree = atPhaseBeforeTransforms(treeFn) + case _ => + } + myTree.asInstanceOf[Tree] + + override def isEvaluating: Boolean = myTree == null + override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] + } + + class DeferredSymAndTree(symFn: Context ?-> Symbol, treeFn: Context ?-> Tree) + extends LazyAnnotation: + protected var mySym: Symbol | (Context ?-> Symbol) | Null = ctx ?=> symFn(using ctx) + protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) + + /** An annotation indicating the body of a right-hand side, + * typically of an inline method. Treated specially in + * pickling/unpickling and TypeTreeMaps + */ + abstract class BodyAnnotation extends Annotation { + override def symbol(using Context): ClassSymbol = defn.BodyAnnot + override def derivedAnnotation(tree: Tree)(using Context): Annotation = + if (tree eq this.tree) this else ConcreteBodyAnnotation(tree) + override def arguments(using Context): List[Tree] = Nil + override def ensureCompleted(using Context): Unit = () + override def toText(printer: Printer): Text = "@Body" + } + + class ConcreteBodyAnnotation(body: Tree) extends BodyAnnotation { + def tree(using Context): Tree = body + } + + abstract class LazyBodyAnnotation extends BodyAnnotation { + // Copy-pasted from LazyAnnotation to avoid having to turn it into a trait + protected var myTree: Tree | (Context ?-> Tree) | Null + def tree(using Context): Tree = + assert(myTree != null) + myTree match { + case treeFn: (Context ?-> Tree) @unchecked => + myTree = null + myTree = atPhaseBeforeTransforms(treeFn) + case _ => + } + myTree.asInstanceOf[Tree] + + override def isEvaluating: Boolean = myTree == null + override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] + } + + object LazyBodyAnnotation { + def apply(bodyFn: Context ?-> Tree): LazyBodyAnnotation = + new LazyBodyAnnotation: + protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> bodyFn(using ctx) + } + + object Annotation { + + def apply(tree: Tree): ConcreteAnnotation = ConcreteAnnotation(tree) + + def apply(cls: ClassSymbol)(using Context): Annotation = + apply(cls, Nil) + + def apply(cls: ClassSymbol, arg: Tree)(using Context): Annotation = + apply(cls, arg :: Nil) + + def apply(cls: ClassSymbol, arg1: Tree, arg2: Tree)(using Context): Annotation = + apply(cls, arg1 :: arg2 :: Nil) + + def apply(cls: ClassSymbol, args: List[Tree])(using Context): Annotation = + apply(cls.typeRef, args) + + def apply(atp: Type, arg: Tree)(using Context): Annotation = + apply(atp, arg :: Nil) + + def apply(atp: Type, arg1: Tree, arg2: Tree)(using Context): Annotation = + apply(atp, arg1 :: arg2 :: Nil) + + def apply(atp: Type, args: List[Tree])(using Context): Annotation = + apply(New(atp, args)) + + /** Create an annotation where the tree is computed lazily. */ + def deferred(sym: Symbol)(treeFn: Context ?-> Tree): Annotation = + new LazyAnnotation { + protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) + protected var mySym: Symbol | (Context ?-> Symbol) | Null = sym + } + + /** Create an annotation where the symbol and the tree are computed lazily. */ + def deferredSymAndTree(symFn: Context ?-> Symbol)(treeFn: Context ?-> Tree): Annotation = + DeferredSymAndTree(symFn, treeFn) + + /** Extractor for child annotations */ + object Child { + + /** A deferred annotation to the result of a given child computation */ + def later(delayedSym: Context ?-> Symbol, span: Span)(using Context): Annotation = { + def makeChildLater(using Context) = { + val sym = delayedSym + New(defn.ChildAnnot.typeRef.appliedTo(sym.owner.thisType.select(sym.name, sym)), Nil) + .withSpan(span) + } + deferred(defn.ChildAnnot)(makeChildLater) + } + + /** A regular, non-deferred Child annotation */ + def apply(sym: Symbol, span: Span)(using Context): Annotation = later(sym, span) + + def unapply(ann: Annotation)(using Context): Option[Symbol] = + if (ann.symbol == defn.ChildAnnot) { + val AppliedType(_, (arg: NamedType) :: Nil) = ann.tree.tpe: @unchecked + Some(arg.symbol) + } + else None + } + + def makeSourceFile(path: String)(using Context): Annotation = + apply(defn.SourceFileAnnot, Literal(Constant(path))) + } + + @sharable val EmptyAnnotation = Annotation(EmptyTree) + + def ThrowsAnnotation(cls: ClassSymbol)(using Context): Annotation = { + val tref = cls.typeRef + Annotation(defn.ThrowsAnnot.typeRef.appliedTo(tref), Ident(tref)) + } + + /** Extracts the type of the thrown exception from an annotation. + * + * Supports both "old-style" `@throws(classOf[Exception])` + * as well as "new-style" `@throws[Exception]("cause")` annotations. + */ + object ThrownException { + def unapply(a: Annotation)(using Context): Option[Type] = + if (a.symbol ne defn.ThrowsAnnot) + None + else a.argumentConstant(0) match { + // old-style: @throws(classOf[Exception]) (which is throws[T](classOf[Exception])) + case Some(Constant(tpe: Type)) => + Some(tpe) + // new-style: @throws[Exception], @throws[Exception]("cause") + case _ => + stripApply(a.tree) match { + case TypeApply(_, List(tpt)) => + Some(tpt.tpe) + case _ => + None + } + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Atoms.scala b/tests/pos-with-compiler-cc/dotc/core/Atoms.scala new file mode 100644 index 000000000000..bcaaf6794107 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Atoms.scala @@ -0,0 +1,36 @@ +package dotty.tools +package dotc +package core + +import Types._ + +/** Indicates the singleton types that a type must or may consist of. + * @param lo The lower bound: singleton types in this set are guaranteed + * to be in the carrier type. + * @param hi The upper bound: all singleton types in the carrier type are + * guaranteed to be in this set + * If the underlying type of a singleton type is another singleton type, + * only the latter type ends up in the sets. + */ +enum Atoms: + case Range(lo: Set[Type], hi: Set[Type]) + case Unknown + + def & (that: Atoms): Atoms = this match + case Range(lo1, hi1) => + that match + case Range(lo2, hi2) => Range(lo1 & lo2, hi1 & hi2) + case Unknown => Range(Set.empty, hi1) + case Unknown => + that match + case Range(lo2, hi2) => Range(Set.empty, hi2) + case Unknown => Unknown + + def | (that: Atoms): Atoms = this match + case Range(lo1, hi1) => + that match + case Range(lo2, hi2) => Range(lo1 | lo2, hi1 | hi2) + case Unknown => Unknown + case Unknown => Unknown + +end Atoms diff --git a/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala b/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala new file mode 100644 index 000000000000..a61701eee2d7 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala @@ -0,0 +1,215 @@ +package dotty.tools +package dotc +package core + +import Contexts._, Types._, Symbols._, Names._, Flags._ +import Denotations.SingleDenotation +import Decorators._ +import collection.mutable +import config.SourceVersion.future +import config.Feature.sourceVersion + +/** Realizability status */ +object CheckRealizable { + + sealed abstract class Realizability(val msg: String) { + def andAlso(other: => Realizability): Realizability = + if (this == Realizable) other else this + def mapError(f: Realizability => Realizability): Realizability = + if (this == Realizable) this else f(this) + } + + object Realizable extends Realizability("") + + object NotConcrete extends Realizability(" is not a concrete type") + + class NotFinal(sym: Symbol)(using Context) + extends Realizability(i" refers to nonfinal $sym") + + class HasProblemBounds(name: Name, info: Type)(using Context) + extends Realizability(i" has a member $name with possibly conflicting bounds ${info.bounds.lo} <: ... <: ${info.bounds.hi}") + + class HasProblemBaseArg(typ: Type, argBounds: TypeBounds)(using Context) + extends Realizability(i" has a base type $typ with possibly conflicting parameter bounds ${argBounds.lo} <: ... <: ${argBounds.hi}") + + class HasProblemBase(base1: Type, base2: Type)(using Context) + extends Realizability(i" has conflicting base types $base1 and $base2") + + class HasProblemField(fld: SingleDenotation, problem: Realizability)(using Context) + extends Realizability(i" has a member $fld which is not a legal path\nsince ${fld.symbol.name}: ${fld.info}${problem.msg}") + + class ProblemInUnderlying(tp: Type, problem: Realizability)(using Context) + extends Realizability(i"s underlying type ${tp}${problem.msg}") { + assert(problem != Realizable) + } + + def realizability(tp: Type)(using Context): Realizability = + new CheckRealizable().realizability(tp) + + def boundsRealizability(tp: Type)(using Context): Realizability = + new CheckRealizable().boundsRealizability(tp) + + private val LateInitializedFlags = Lazy | Erased +} + +/** Compute realizability status. + * + * A type T is realizable iff it is inhabited by non-null values. This ensures that its type members have good bounds + * (in the sense from DOT papers). A type projection T#L is legal if T is realizable, and can be understood as + * Scala 2's `v.L forSome { val v: T }`. + * + * In general, a realizable type can have multiple inhabitants, hence it need not be stable (in the sense of + * Type.isStable). + */ +class CheckRealizable(using Context) { + import CheckRealizable._ + + /** A set of all fields that have already been checked. Used + * to avoid infinite recursions when analyzing recursive types. + */ + private val checkedFields: mutable.Set[Symbol] = mutable.LinkedHashSet[Symbol]() + + /** Is symbol's definitition a lazy or erased val? + * (note we exclude modules here, because their realizability is ensured separately) + */ + private def isLateInitialized(sym: Symbol) = sym.isOneOf(LateInitializedFlags, butNot = Module) + + /** The realizability status of given type `tp`*/ + def realizability(tp: Type): Realizability = tp.dealias match { + /* + * A `TermRef` for a path `p` is realizable if + * - `p`'s type is stable and realizable, or + * - its underlying path is idempotent (that is, *stable*), total, and not null. + * We don't check yet the "not null" clause: that will require null-safety checking. + * + * We assume that stability of tp.prefix is checked elsewhere, since that's necessary for the path to be legal in + * the first place. + */ + case tp: TermRef => + val sym = tp.symbol + lazy val tpInfoRealizable = realizability(tp.info) + if (sym.is(StableRealizable)) realizability(tp.prefix) + else { + val r = + if (sym.isStableMember && !isLateInitialized(sym)) + // it's realizable because we know that a value of type `tp` has been created at run-time + Realizable + else if (!sym.isEffectivelyFinal) + // it's potentially not realizable since it might be overridden with a member of nonrealizable type + new NotFinal(sym) + else + // otherwise we need to look at the info to determine realizability + // roughly: it's realizable if the info does not have bad bounds + tpInfoRealizable.mapError(r => new ProblemInUnderlying(tp, r)) + r andAlso { + if (sym.isStableMember) sym.setFlag(StableRealizable) // it's known to be stable and realizable + realizability(tp.prefix) + } mapError { r => + // A mutable path is in fact stable and realizable if it has a realizable singleton type. + if (tp.info.isStable && tpInfoRealizable == Realizable) { + sym.setFlag(StableRealizable) + Realizable + } + else r + } + } + case _: SingletonType | NoPrefix => + Realizable + case tp => + def isConcrete(tp: Type): Boolean = tp.dealias match { + case tp: TypeRef => tp.symbol.isClass + case tp: TypeParamRef => false + case tp: TypeProxy => isConcrete(tp.underlying) + case tp: AndType => isConcrete(tp.tp1) && isConcrete(tp.tp2) + case tp: OrType => isConcrete(tp.tp1) && isConcrete(tp.tp2) + case _ => false + } + if (!isConcrete(tp)) NotConcrete + else boundsRealizability(tp).andAlso(memberRealizability(tp)) + } + + private def refinedNames(tp: Type): Set[Name] = tp.dealias match { + case tp: RefinedType => refinedNames(tp.parent) + tp.refinedName + case tp: AndType => refinedNames(tp.tp1) ++ refinedNames(tp.tp2) + case tp: OrType => refinedNames(tp.tp1) ++ refinedNames(tp.tp2) + case tp: TypeProxy => refinedNames(tp.superType) + case _ => Set.empty + } + + /** `Realizable` if `tp` has good bounds, a `HasProblem...` instance + * pointing to a bad bounds member otherwise. "Has good bounds" means: + * + * - all type members have good bounds (except for opaque helpers) + * - all refinements of the underlying type have good bounds (except for opaque companions) + * - all base types are class types, and if their arguments are wildcards + * they have good bounds. + * - base types do not appear in multiple instances with different arguments. + * (depending on the simplification scheme for AndTypes employed, this could + * also lead to base types with bad bounds). + */ + private def boundsRealizability(tp: Type) = { + + val memberProblems = withMode(Mode.CheckBoundsOrSelfType) { + for { + mbr <- tp.nonClassTypeMembers + if !(mbr.info.loBound <:< mbr.info.hiBound) + } + yield new HasProblemBounds(mbr.name, mbr.info) + } + + val refinementProblems = withMode(Mode.CheckBoundsOrSelfType) { + for { + name <- refinedNames(tp) + if (name.isTypeName) + mbr <- tp.member(name).alternatives + if !(mbr.info.loBound <:< mbr.info.hiBound) + } + yield + new HasProblemBounds(name, mbr.info) + } + + def baseTypeProblems(base: Type) = base match { + case AndType(base1, base2) => + new HasProblemBase(base1, base2) :: Nil + case base => + base.argInfos.collect { + case bounds @ TypeBounds(lo, hi) if !(lo <:< hi) => + new HasProblemBaseArg(base, bounds) + } + } + val baseProblems = + tp.baseClasses.map(_.baseTypeOf(tp)).flatMap(baseTypeProblems) + + baseProblems.foldLeft( + refinementProblems.foldLeft( + memberProblems.foldLeft( + Realizable: Realizability)(_ andAlso _))(_ andAlso _))(_ andAlso _) + } + + /** `Realizable` if all of `tp`'s non-strict fields have realizable types, + * a `HasProblemField` instance pointing to a bad field otherwise. + */ + private def memberRealizability(tp: Type) = { + def checkField(sofar: Realizability, fld: SingleDenotation): Realizability = + sofar andAlso { + if (checkedFields.contains(fld.symbol) || fld.symbol.isOneOf(Private | Mutable | LateInitializedFlags)) + // if field is private it cannot be part of a visible path + // if field is mutable it cannot be part of a path + // if field is lazy or erased it does not need to be initialized when the owning object is + // so in all cases the field does not influence realizability of the enclosing object. + Realizable + else { + checkedFields += fld.symbol + realizability(fld.info).mapError(r => new HasProblemField(fld, r)) + } + } + if sourceVersion.isAtLeast(future) then + // check fields only from version 3.x. + // Reason: An embedded field could well be nullable, which means it + // should not be part of a path and need not be checked; but we cannot recognize + // this situation until we have a typesystem that tracks nullability. + tp.fields.foldLeft(Realizable: Realizability)(checkField) + else + Realizable + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Comments.scala b/tests/pos-with-compiler-cc/dotc/core/Comments.scala new file mode 100644 index 000000000000..1b20b75ad8ac --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Comments.scala @@ -0,0 +1,462 @@ +package dotty.tools +package dotc +package core + +import scala.language.unsafeNulls + +import ast.{ untpd, tpd } +import Symbols._, Contexts._ +import util.{SourceFile, ReadOnlyMap} +import util.Spans._ +import util.CommentParsing._ +import util.Property.Key +import parsing.Parsers.Parser +import reporting.ProperDefinitionNotFound + +object Comments { + val ContextDoc: Key[ContextDocstrings] = new Key[ContextDocstrings] + + /** Decorator for getting docbase out of context */ + given CommentsContext: AnyRef with + extension (c: Context) def docCtx: Option[ContextDocstrings] = c.property(ContextDoc) + + /** Context for Docstrings, contains basic functionality for getting + * docstrings via `Symbol` and expanding templates + */ + class ContextDocstrings { + + private val _docstrings: MutableSymbolMap[Comment] = MutableSymbolMap[Comment](512) // FIXME: 2nd [Comment] needed or "not a class type" + + val templateExpander: CommentExpander = new CommentExpander + + def docstrings: ReadOnlyMap[Symbol, Comment] = _docstrings + + def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym) + + def addDocstring(sym: Symbol, doc: Option[Comment]): Unit = + doc.foreach(d => _docstrings.update(sym, d)) + } + + /** + * A `Comment` contains the unformatted docstring, it's position and potentially more + * information that is populated when the comment is "cooked". + * + * @param span The position span of this `Comment`. + * @param raw The raw comment, as seen in the source code, without any expansion. + * @param expanded If this comment has been expanded, it's expansion, otherwise `None`. + * @param usecases The usecases for this comment. + */ + final case class Comment( + span: Span, + raw: String, + expanded: Option[String], + usecases: List[UseCase], + variables: Map[String, String], + ) { + + /** Has this comment been cooked or expanded? */ + def isExpanded: Boolean = expanded.isDefined + + /** The body of this comment, without the `@usecase` and `@define` sections, after expansion. */ + lazy val expandedBody: Option[String] = + expanded.map(removeSections(_, "@usecase", "@define")) + + val isDocComment: Boolean = Comment.isDocComment(raw) + + /** + * Expands this comment by giving its content to `f`, and then parsing the `@usecase` sections. + * Typically, `f` will take care of expanding the variables. + * + * @param f The expansion function. + * @return The expanded comment, with the `usecases` populated. + */ + def expand(f: String => String)(using Context): Comment = { + val expandedComment = f(raw) + val useCases = Comment.parseUsecases(expandedComment, span) + Comment(span, raw, Some(expandedComment), useCases, Map.empty) + } + } + + object Comment { + + def isDocComment(comment: String): Boolean = comment.startsWith("/**") + + def apply(span: Span, raw: String): Comment = + Comment(span, raw, None, Nil, Map.empty) + + private def parseUsecases(expandedComment: String, span: Span)(using Context): List[UseCase] = + if (!isDocComment(expandedComment)) + Nil + else + tagIndex(expandedComment) + .filter { startsWithTag(expandedComment, _, "@usecase") } + .map { case (start, end) => decomposeUseCase(expandedComment, span, start, end) } + + /** Turns a usecase section into a UseCase, with code changed to: + * {{{ + * // From: + * def foo: A + * // To: + * def foo: A = ??? + * }}} + */ + private def decomposeUseCase(body: String, span: Span, start: Int, end: Int)(using Context): UseCase = { + def subPos(start: Int, end: Int) = + if (span == NoSpan) NoSpan + else { + val start1 = span.start + start + val end1 = span.end + end + span withStart start1 withPoint start1 withEnd end1 + } + + val codeStart = skipWhitespace(body, start + "@usecase".length) + val codeEnd = skipToEol(body, codeStart) + val code = body.substring(codeStart, codeEnd) + " = ???" + val codePos = subPos(codeStart, codeEnd) + + UseCase(code, codePos) + } + } + + final case class UseCase(code: String, codePos: Span, untpdCode: untpd.Tree, tpdCode: Option[tpd.DefDef]) { + def typed(tpdCode: tpd.DefDef): UseCase = copy(tpdCode = Some(tpdCode)) + } + + object UseCase { + def apply(code: String, codePos: Span)(using Context): UseCase = { + val tree = { + val tree = new Parser(SourceFile.virtual("", code)).localDef(codePos.start) + tree match { + case tree: untpd.DefDef => + val newName = ctx.compilationUnit.freshNames.newName(tree.name, NameKinds.DocArtifactName) + untpd.cpy.DefDef(tree)(name = newName) + case _ => + report.error(ProperDefinitionNotFound(), ctx.source.atSpan(codePos)) + tree + } + } + UseCase(code, codePos, tree, None) + } + } + + /** + * Port of DocComment.scala from nsc + * @author Martin Odersky + * @author Felix Mulder + */ + class CommentExpander { + import dotc.config.Printers.scaladoc + import scala.collection.mutable + + def expand(sym: Symbol, site: Symbol)(using Context): String = { + val parent = if (site != NoSymbol) site else sym + defineVariables(parent) + expandedDocComment(sym, parent) + } + + /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing. + * + * @param sym The symbol for which doc comment is returned + * @param site The class for which doc comments are generated + * @throws ExpansionLimitExceeded when more than 10 successive expansions + * of the same string are done, which is + * interpreted as a recursive variable definition. + */ + def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(using Context): String = { + // when parsing a top level class or module, use the (module-)class itself to look up variable definitions + val parent = if ((sym.is(Flags.Module) || sym.isClass) && site.is(Flags.Package)) sym + else site + expandVariables(cookedDocComment(sym, docStr), sym, parent) + } + + private def template(raw: String): String = + removeSections(raw, "@define") + + private def defines(raw: String): List[String] = { + val sections = tagIndex(raw) + val defines = sections filter { startsWithTag(raw, _, "@define") } + val usecases = sections filter { startsWithTag(raw, _, "@usecase") } + val end = startTag(raw, (defines ::: usecases).sortBy(_._1)) + + defines map { case (start, end) => raw.substring(start, end) } + } + + private def replaceInheritDocToInheritdoc(docStr: String): String = + docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc") + + /** The cooked doc comment of an overridden symbol */ + protected def superComment(sym: Symbol)(using Context): Option[String] = + allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "") + + private val cookedDocComments = MutableSymbolMap[String]() + + /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by + * missing sections of an inherited doc comment. + * If a symbol does not have a doc comment but some overridden version of it does, + * the doc comment of the overridden version is copied instead. + */ + def cookedDocComment(sym: Symbol, docStr: String = "")(using Context): String = cookedDocComments.getOrElseUpdate(sym, { + var ownComment = + if (docStr.length == 0) ctx.docCtx.flatMap(_.docstring(sym).map(c => template(c.raw))).getOrElse("") + else template(docStr) + ownComment = replaceInheritDocToInheritdoc(ownComment) + + superComment(sym) match { + case None => + // SI-8210 - The warning would be false negative when this symbol is a setter + if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter) + scaladoc.println(s"${sym.span}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.") + ownComment.replace("@inheritdoc", "") + case Some(sc) => + if (ownComment == "") sc + else expandInheritdoc(sc, merge(sc, ownComment, sym), sym) + } + }) + + private def isMovable(str: String, sec: (Int, Int)): Boolean = + startsWithTag(str, sec, "@param") || + startsWithTag(str, sec, "@tparam") || + startsWithTag(str, sec, "@return") + + def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = { + val srcSections = tagIndex(src) + val dstSections = tagIndex(dst) + val srcParams = paramDocs(src, "@param", srcSections) + val dstParams = paramDocs(dst, "@param", dstSections) + val srcTParams = paramDocs(src, "@tparam", srcSections) + val dstTParams = paramDocs(dst, "@tparam", dstSections) + val out = new StringBuilder + var copied = 0 + var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _))) + + if (copyFirstPara) { + val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment + (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections) + out append src.substring(0, eop).trim + copied = 3 + tocopy = 3 + } + + def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match { + case Some((start, end)) => + if (end > tocopy) tocopy = end + case None => + srcSec match { + case Some((start1, end1)) => + out append dst.substring(copied, tocopy).trim + out append "\n" + copied = tocopy + out append src.substring(start1, end1).trim + case None => + } + } + + //TODO: enable this once you know how to get `sym.paramss` + /* + for (params <- sym.paramss; param <- params) + mergeSection(srcParams get param.name.toString, dstParams get param.name.toString) + for (tparam <- sym.typeParams) + mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString) + + mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections)) + mergeSection(groupDoc(src, srcSections), groupDoc(dst, dstSections)) + */ + + if (out.length == 0) dst + else { + out append dst.substring(copied) + out.toString + } + } + + /** + * Expand inheritdoc tags + * - for the main comment we transform the inheritdoc into the super variable, + * and the variable expansion can expand it further + * - for the param, tparam and throws sections we must replace comments on the spot + * + * This is done separately, for two reasons: + * 1. It takes longer to run compared to merge + * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely + * impacts performance + * + * @param parent The source (or parent) comment + * @param child The child (overriding member or usecase) comment + * @param sym The child symbol + * @return The child comment with the inheritdoc sections expanded + */ + def expandInheritdoc(parent: String, child: String, sym: Symbol): String = + if (child.indexOf("@inheritdoc") == -1) + child + else { + val parentSections = tagIndex(parent) + val childSections = tagIndex(child) + val parentTagMap = sectionTagMap(parent, parentSections) + val parentNamedParams = Map() + + ("@param" -> paramDocs(parent, "@param", parentSections)) + + ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) + + ("@throws" -> paramDocs(parent, "@throws", parentSections)) + + val out = new StringBuilder + + def replaceInheritdoc(childSection: String, parentSection: => String) = + if (childSection.indexOf("@inheritdoc") == -1) + childSection + else + childSection.replace("@inheritdoc", parentSection) + + def getParentSection(section: (Int, Int)): String = { + + def getSectionHeader = extractSectionTag(child, section) match { + case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section) + case other => other + } + + def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String = + paramMap.get(param) match { + case Some(section) => + // Cleanup the section tag and parameter + val sectionTextBounds = extractSectionText(parent, section) + cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2)) + case None => + scaladoc.println(s"""${sym.span}: the """" + getSectionHeader + "\" annotation of the " + sym + + " comment contains @inheritdoc, but the corresponding section in the parent is not defined.") + "" + } + + child.substring(section._1, section._1 + 7) match { + case param@("@param "|"@tparam"|"@throws") => + sectionString(extractSectionParam(child, section), parentNamedParams(param.trim)) + case _ => + sectionString(extractSectionTag(child, section), parentTagMap) + } + } + + def mainComment(str: String, sections: List[(Int, Int)]): String = + if (str.trim.length > 3) + str.trim.substring(3, startTag(str, sections)) + else + "" + + // Append main comment + out.append("/**") + out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections))) + + // Append sections + for (section <- childSections) + out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section))) + + out.append("*/") + out.toString + } + + protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(using Context): String = { + val expandLimit = 10 + + def expandInternal(str: String, depth: Int): String = { + if (depth >= expandLimit) + throw new ExpansionLimitExceeded(str) + + val out = new StringBuilder + var copied, idx = 0 + // excluding variables written as \$foo so we can use them when + // necessary to document things like Symbol#decode + def isEscaped = idx > 0 && str.charAt(idx - 1) == '\\' + while (idx < str.length) + if ((str charAt idx) != '$' || isEscaped) + idx += 1 + else { + val vstart = idx + idx = skipVariable(str, idx + 1) + def replaceWith(repl: String) = { + out append str.substring(copied, vstart) + out append repl + copied = idx + } + variableName(str.substring(vstart + 1, idx)) match { + case "super" => + superComment(sym) foreach { sc => + val superSections = tagIndex(sc) + replaceWith(sc.substring(3, startTag(sc, superSections))) + for (sec @ (start, end) <- superSections) + if (!isMovable(sc, sec)) out append sc.substring(start, end) + } + case "" => idx += 1 + case vname => + lookupVariable(vname, site) match { + case Some(replacement) => replaceWith(replacement) + case None => + scaladoc.println(s"Variable $vname undefined in comment for $sym in $site") + } + } + } + if (out.length == 0) str + else { + out append str.substring(copied) + expandInternal(out.toString, depth + 1) + } + } + + // We suppressed expanding \$ throughout the recursion, and now we + // need to replace \$ with $ so it looks as intended. + expandInternal(initialStr, 0).replace("""\$""", "$") + } + + def defineVariables(sym: Symbol)(using Context): Unit = { + val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r + + val raw = ctx.docCtx.flatMap(_.docstring(sym).map(_.raw)).getOrElse("") + defs(sym) ++= defines(raw).map { + str => { + val start = skipWhitespace(str, "@define".length) + val (key, value) = str.splitAt(skipVariable(str, start)) + key.drop(start) -> value + } + } map { + case (key, Trim(value)) => + variableName(key) -> value.replaceAll("\\s+\\*+$", "") + } + } + + /** Maps symbols to the variable -> replacement maps that are defined + * in their doc comments + */ + private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map() + + /** Lookup definition of variable. + * + * @param vble The variable for which a definition is searched + * @param site The class for which doc comments are generated + */ + def lookupVariable(vble: String, site: Symbol)(using Context): Option[String] = site match { + case NoSymbol => None + case _ => + val searchList = + if (site.flags.is(Flags.Module)) site :: site.info.baseClasses + else site.info.baseClasses + + searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match { + case Some(str) if str startsWith "$" => lookupVariable(str.tail, site) + case res => res orElse lookupVariable(vble, site.owner) + } + } + + /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing + * If a symbol does not have a doc comment but some overridden version of it does, + * the position of the doc comment of the overridden version is returned instead. + */ + def docCommentPos(sym: Symbol)(using Context): Span = + ctx.docCtx.flatMap(_.docstring(sym).map(_.span)).getOrElse(NoSpan) + + /** A version which doesn't consider self types, as a temporary measure: + * an infinite loop has broken out between superComment and cookedDocComment + * since r23926. + */ + private def allInheritedOverriddenSymbols(sym: Symbol)(using Context): List[Symbol] = + if (!sym.owner.isClass) Nil + else sym.allOverriddenSymbols.toList.filter(_ != NoSymbol) //TODO: could also be `sym.owner.allOverrid..` + //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol) + + class ExpansionLimitExceeded(str: String) extends Exception + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Constants.scala b/tests/pos-with-compiler-cc/dotc/core/Constants.scala new file mode 100644 index 000000000000..f45e9e5217de --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Constants.scala @@ -0,0 +1,261 @@ +package dotty.tools +package dotc +package core + +import Types._, Symbols._, Contexts._ +import printing.Printer +import printing.Texts.Text + +object Constants { + + inline val NoTag = 0 + inline val UnitTag = 1 + inline val BooleanTag = 2 + inline val ByteTag = 3 + inline val ShortTag = 4 + inline val CharTag = 5 + inline val IntTag = 6 + inline val LongTag = 7 + inline val FloatTag = 8 + inline val DoubleTag = 9 + inline val StringTag = 10 + inline val NullTag = 11 + inline val ClazzTag = 12 + + class Constant(val value: Any, val tag: Int) extends printing.Showable with Product1[Any] { + import java.lang.Double.doubleToRawLongBits + import java.lang.Float.floatToRawIntBits + + def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue + def isShortRange: Boolean = isIntRange && Short.MinValue <= intValue && intValue <= Short.MaxValue + def isCharRange: Boolean = isIntRange && Char.MinValue <= intValue && intValue <= Char.MaxValue + def isIntRange: Boolean = ByteTag <= tag && tag <= IntTag + def isLongRange: Boolean = ByteTag <= tag && tag <= LongTag + def isFloatRange: Boolean = ByteTag <= tag && tag <= FloatTag + def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag + def isNonUnitAnyVal: Boolean = BooleanTag <= tag && tag <= DoubleTag + def isAnyVal: Boolean = UnitTag <= tag && tag <= DoubleTag + + def tpe(using Context): Type = tag match { + case UnitTag => defn.UnitType + case BooleanTag => defn.BooleanType + case ByteTag => defn.ByteType + case ShortTag => defn.ShortType + case CharTag => defn.CharType + case IntTag => defn.IntType + case LongTag => defn.LongType + case FloatTag => defn.FloatType + case DoubleTag => defn.DoubleType + case StringTag => defn.StringType + case NullTag => defn.NullType + case ClazzTag => defn.ClassType(typeValue) + } + + /** We need the equals method to take account of tags as well as values. + */ + override def equals(other: Any): Boolean = other match { + case that: Constant => + this.tag == that.tag && equalHashValue == that.equalHashValue + case _ => false + } + + def isNaN: Boolean = value match { + case f: Float => f.isNaN + case d: Double => d.isNaN + case _ => false + } + + def booleanValue: Boolean = + if (tag == BooleanTag) value.asInstanceOf[Boolean] + else throw new Error("value " + value + " is not a boolean") + + def byteValue: Byte = tag match { + case ByteTag => value.asInstanceOf[Byte] + case ShortTag => value.asInstanceOf[Short].toByte + case CharTag => value.asInstanceOf[Char].toByte + case IntTag => value.asInstanceOf[Int].toByte + case LongTag => value.asInstanceOf[Long].toByte + case FloatTag => value.asInstanceOf[Float].toByte + case DoubleTag => value.asInstanceOf[Double].toByte + case _ => throw new Error("value " + value + " is not a Byte") + } + + def shortValue: Short = tag match { + case ByteTag => value.asInstanceOf[Byte].toShort + case ShortTag => value.asInstanceOf[Short] + case CharTag => value.asInstanceOf[Char].toShort + case IntTag => value.asInstanceOf[Int].toShort + case LongTag => value.asInstanceOf[Long].toShort + case FloatTag => value.asInstanceOf[Float].toShort + case DoubleTag => value.asInstanceOf[Double].toShort + case _ => throw new Error("value " + value + " is not a Short") + } + + def charValue: Char = tag match { + case ByteTag => value.asInstanceOf[Byte].toChar + case ShortTag => value.asInstanceOf[Short].toChar + case CharTag => value.asInstanceOf[Char] + case IntTag => value.asInstanceOf[Int].toChar + case LongTag => value.asInstanceOf[Long].toChar + case FloatTag => value.asInstanceOf[Float].toChar + case DoubleTag => value.asInstanceOf[Double].toChar + case _ => throw new Error("value " + value + " is not a Char") + } + + def intValue: Int = tag match { + case ByteTag => value.asInstanceOf[Byte].toInt + case ShortTag => value.asInstanceOf[Short].toInt + case CharTag => value.asInstanceOf[Char].toInt + case IntTag => value.asInstanceOf[Int] + case LongTag => value.asInstanceOf[Long].toInt + case FloatTag => value.asInstanceOf[Float].toInt + case DoubleTag => value.asInstanceOf[Double].toInt + case _ => throw new Error("value " + value + " is not an Int") + } + + def longValue: Long = tag match { + case ByteTag => value.asInstanceOf[Byte].toLong + case ShortTag => value.asInstanceOf[Short].toLong + case CharTag => value.asInstanceOf[Char].toLong + case IntTag => value.asInstanceOf[Int].toLong + case LongTag => value.asInstanceOf[Long] + case FloatTag => value.asInstanceOf[Float].toLong + case DoubleTag => value.asInstanceOf[Double].toLong + case _ => throw new Error("value " + value + " is not a Long") + } + + def floatValue: Float = tag match { + case ByteTag => value.asInstanceOf[Byte].toFloat + case ShortTag => value.asInstanceOf[Short].toFloat + case CharTag => value.asInstanceOf[Char].toFloat + case IntTag => value.asInstanceOf[Int].toFloat + case LongTag => value.asInstanceOf[Long].toFloat + case FloatTag => value.asInstanceOf[Float] + case DoubleTag => value.asInstanceOf[Double].toFloat + case _ => throw new Error("value " + value + " is not a Float") + } + + def doubleValue: Double = tag match { + case ByteTag => value.asInstanceOf[Byte].toDouble + case ShortTag => value.asInstanceOf[Short].toDouble + case CharTag => value.asInstanceOf[Char].toDouble + case IntTag => value.asInstanceOf[Int].toDouble + case LongTag => value.asInstanceOf[Long].toDouble + case FloatTag => value.asInstanceOf[Float].toDouble + case DoubleTag => value.asInstanceOf[Double] + case _ => throw new Error("value " + value + " is not a Double") + } + + /** Convert constant value to conform to given type. + */ + def convertTo(pt: Type)(using Context): Constant | Null = { + def classBound(pt: Type): Type = pt.dealias.stripTypeVar match { + case tref: TypeRef if !tref.symbol.isClass && tref.info.exists => + classBound(tref.info.bounds.lo) + case param: TypeParamRef => + ctx.typerState.constraint.entry(param) match { + case TypeBounds(lo, hi) => + if (hi.classSymbol.isPrimitiveValueClass) hi //constrain further with high bound + else classBound(lo) + case NoType => classBound(param.binder.paramInfos(param.paramNum).lo) + case inst => classBound(inst) + } + case pt => pt + } + pt match + case ConstantType(value) if value == this => this + case _: SingletonType => null + case _ => + val target = classBound(pt).typeSymbol + if (target == tpe.typeSymbol) + this + else if ((target == defn.ByteClass) && isByteRange) + Constant(byteValue) + else if (target == defn.ShortClass && isShortRange) + Constant(shortValue) + else if (target == defn.CharClass && isCharRange) + Constant(charValue) + else if (target == defn.IntClass && isIntRange) + Constant(intValue) + else if (target == defn.LongClass && isLongRange) + Constant(longValue) + else if (target == defn.FloatClass && isFloatRange) + Constant(floatValue) + else if (target == defn.DoubleClass && isNumeric) + Constant(doubleValue) + else + null + } + + def stringValue: String = value.toString + + def toText(printer: Printer): Text = printer.toText(this) + + def typeValue: Type = value.asInstanceOf[Type] + + /** + * Consider two `NaN`s to be identical, despite non-equality + * Consider -0d to be distinct from 0d, despite equality + * + * We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`) + * to avoid treating different encodings of `NaN` as the same constant. + * You probably can't express different `NaN` varieties as compile time + * constants in regular Scala code, but it is conceivable that you could + * conjure them with a macro. + */ + private def equalHashValue: Any = value match { + case f: Float => floatToRawIntBits(f) + case d: Double => doubleToRawLongBits(d) + case v => v + } + + override def hashCode: Int = { + import scala.util.hashing.MurmurHash3._ + val seed = 17 + var h = seed + h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide. + h = mix(h, equalHashValue.##) + finalizeHash(h, length = 2) + } + + override def toString: String = s"Constant($value)" + def canEqual(x: Any): Boolean = true + def get: Any = value + def isEmpty: Boolean = false + def _1: Any = value + } + + object Constant { + def apply(x: Null): Constant = new Constant(x, NullTag) + def apply(x: Unit): Constant = new Constant(x, UnitTag) + def apply(x: Boolean): Constant = new Constant(x, BooleanTag) + def apply(x: Byte): Constant = new Constant(x, ByteTag) + def apply(x: Short): Constant = new Constant(x, ShortTag) + def apply(x: Int): Constant = new Constant(x, IntTag) + def apply(x: Long): Constant = new Constant(x, LongTag) + def apply(x: Float): Constant = new Constant(x, FloatTag) + def apply(x: Double): Constant = new Constant(x, DoubleTag) + def apply(x: String): Constant = new Constant(x, StringTag) + def apply(x: Char): Constant = new Constant(x, CharTag) + def apply(x: Type): Constant = new Constant(x, ClazzTag) + def apply(value: Any): Constant = + new Constant(value, + value match { + case null => NullTag + case x: Unit => UnitTag + case x: Boolean => BooleanTag + case x: Byte => ByteTag + case x: Short => ShortTag + case x: Int => IntTag + case x: Long => LongTag + case x: Float => FloatTag + case x: Double => DoubleTag + case x: String => StringTag + case x: Char => CharTag + case x: Type => ClazzTag + } + ) + + def unapply(c: Constant): Constant = c + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Constraint.scala b/tests/pos-with-compiler-cc/dotc/core/Constraint.scala new file mode 100644 index 000000000000..07b6e71cdcc9 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Constraint.scala @@ -0,0 +1,196 @@ +package dotty.tools +package dotc +package core + +import Types._, Contexts._ +import printing.Showable + +/** Constraint over undetermined type parameters. Constraints are built + * over values of the following types: + * + * - TypeLambda A constraint constrains the type parameters of a set of TypeLambdas + * - TypeParamRef The parameters of the constrained type lambdas + * - TypeVar Every constrained parameter might be associated with a TypeVar + * that has the TypeParamRef as origin. + */ +abstract class Constraint extends Showable { + + type This <: Constraint + + /** Does the constraint's domain contain the type parameters of `tl`? */ + def contains(tl: TypeLambda): Boolean + + /** Does the constraint's domain contain the type parameter `param`? */ + def contains(param: TypeParamRef): Boolean + + /** Does this constraint contain the type variable `tvar` and is it uninstantiated? */ + def contains(tvar: TypeVar): Boolean + + /** The constraint entry for given type parameter `param`, or NoType if `param` is not part of + * the constraint domain. Note: Low level, implementation dependent. + */ + def entry(param: TypeParamRef): Type + + /** The type variable corresponding to parameter `param`, or + * NoType, if `param` is not in constrained or is not paired with a type variable. + */ + def typeVarOfParam(param: TypeParamRef): Type + + /** Is it known that `param1 <:< param2`? */ + def isLess(param1: TypeParamRef, param2: TypeParamRef): Boolean + + /** The parameters that are known to be smaller wrt <: than `param` */ + def lower(param: TypeParamRef): List[TypeParamRef] + + /** The parameters that are known to be greater wrt <: than `param` */ + def upper(param: TypeParamRef): List[TypeParamRef] + + /** The lower dominator set. + * + * This is like `lower`, except that each parameter returned is no smaller than every other returned parameter. + */ + def minLower(param: TypeParamRef): List[TypeParamRef] + + /** The upper dominator set. + * + * This is like `upper`, except that each parameter returned is no greater than every other returned parameter. + */ + def minUpper(param: TypeParamRef): List[TypeParamRef] + + /** lower(param) \ lower(butNot) */ + def exclusiveLower(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] + + /** upper(param) \ upper(butNot) */ + def exclusiveUpper(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] + + /** The constraint bounds for given type parameter `param`. + * Poly params that are known to be smaller or greater than `param` + * are not contained in the return bounds. + * @pre `param` is not part of the constraint domain. + */ + def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds + + /** A new constraint which is derived from this constraint by adding + * entries for all type parameters of `poly`. + * @param tvars A list of type variables associated with the params, + * or Nil if the constraint will just be checked for + * satisfiability but will solved to give instances of + * type variables. + */ + def add(poly: TypeLambda, tvars: List[TypeVar])(using Context): This + + /** A new constraint which is derived from this constraint by updating + * the entry for parameter `param` to `tp`. + * `tp` can be one of the following: + * + * - A TypeBounds value, indicating new constraint bounds + * - Another type, indicating a solution for the parameter + * + * @pre `this contains param`. + */ + def updateEntry(param: TypeParamRef, tp: Type)(using Context): This + + /** A constraint that includes the relationship `p1 <: p2`. + * `<:` relationships between parameters ("edges") are propagated, but + * non-parameter bounds are left alone. + * + * @param direction Must be set to `KeepParam1` or `KeepParam2` when + * `p2 <: p1` is already true depending on which parameter + * the caller intends to keep. This will avoid propagating + * bounds that will be redundant after `p1` and `p2` are + * unified. + */ + def addLess(p1: TypeParamRef, p2: TypeParamRef, + direction: UnificationDirection = UnificationDirection.NoUnification)(using Context): This + + /** A new constraint which is derived from this constraint by removing + * the type parameter `param` from the domain and replacing all top-level occurrences + * of the parameter elsewhere in the constraint by type `tp`, or a conservative + * approximation of it if that is needed to avoid cycles. + * Occurrences nested inside a refinement or prefix are not affected. + */ + def replace(param: TypeParamRef, tp: Type)(using Context): This + + /** Is entry associated with `tl` removable? This is the case if + * all type parameters of the entry are associated with type variables + * which have their `inst` fields set. + */ + def isRemovable(tl: TypeLambda): Boolean + + /** A new constraint with all entries coming from `tl` removed. */ + def remove(tl: TypeLambda)(using Context): This + + /** A new constraint with entry `from` replaced with `to` + * Rerences to `from` from within other constraint bounds are updated to `to`. + * Type variables are left alone. + */ + def subst(from: TypeLambda, to: TypeLambda)(using Context): This + + /** Is `tv` marked as hard in the constraint? */ + def isHard(tv: TypeVar): Boolean + + /** The same as this constraint, but with `tv` marked as hard. */ + def withHard(tv: TypeVar)(using Context): This + + /** Gives for each instantiated type var that does not yet have its `inst` field + * set, the instance value stored in the constraint. Storing instances in constraints + * is done only in a temporary way for contexts that may be retracted + * without also retracting the type var as a whole. + */ + def instType(tvar: TypeVar): Type + + /** The given `tl` in case it is not contained in this constraint, + * a fresh copy of `tl` otherwise. + */ + def ensureFresh(tl: TypeLambda)(using Context): TypeLambda + + /** The type lambdas constrained by this constraint */ + def domainLambdas: List[TypeLambda] + + /** The type lambda parameters constrained by this constraint */ + def domainParams: List[TypeParamRef] + + /** Check whether predicate holds for all parameters in constraint */ + def forallParams(p: TypeParamRef => Boolean): Boolean + + /** Perform operation `op` on all typevars that do not have their `inst` field set. */ + def foreachTypeVar(op: TypeVar => Unit): Unit + + /** The uninstantiated typevars of this constraint, which still have a bounds constraint + */ + def uninstVars: collection.Seq[TypeVar] + + /** Whether `tl` is present in both `this` and `that` but is associated with + * different TypeVars there, meaning that the constraints cannot be merged. + */ + def hasConflictingTypeVarsFor(tl: TypeLambda, that: Constraint): Boolean + + /** Check that no constrained parameter contains itself as a bound */ + def checkNonCyclic()(using Context): this.type + + /** Does `param` occur at the toplevel in `tp` ? + * Toplevel means: the type itself or a factor in some + * combination of `&` or `|` types. + */ + def occursAtToplevel(param: TypeParamRef, tp: Type)(using Context): Boolean + + /** Check that constraint only refers to TypeParamRefs bound by itself */ + def checkClosed()(using Context): Unit + + /** Check that every typevar om this constraint has as origin a type parameter + * of athe type lambda that is associated with the typevar itself. + */ + def checkConsistentVars()(using Context): Unit +} + +/** When calling `Constraint#addLess(p1, p2, ...)`, the caller might end up + * unifying one parameter with the other, this enum lets `addLess` know which + * direction the unification will take. + */ +enum UnificationDirection: + /** Neither p1 nor p2 will be instantiated. */ + case NoUnification + /** `p2 := p1`, p1 left uninstantiated. */ + case KeepParam1 + /** `p1 := p2`, p2 left uninstantiated. */ + case KeepParam2 diff --git a/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala b/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala new file mode 100644 index 000000000000..a3d8cabba971 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala @@ -0,0 +1,879 @@ +package dotty.tools +package dotc +package core + +import Types._ +import Contexts._ +import Symbols._ +import Decorators._ +import Flags._ +import config.Config +import config.Printers.typr +import typer.ProtoTypes.{newTypeVar, representedParamRef} +import UnificationDirection.* +import NameKinds.AvoidNameKind +import util.SimpleIdentitySet +import NullOpsDecorator.stripNull + +/** Methods for adding constraints and solving them. + * + * What goes into a Constraint as opposed to a ConstrainHandler? + * + * Constraint code is purely functional: Operations get constraints and produce new ones. + * Constraint code does not have access to a type-comparer. Anything regarding lubs and glbs has to be done + * elsewhere. + * + * By comparison: Constraint handlers are parts of type comparers and can use their functionality. + * Constraint handlers update the current constraint as a side effect. + */ +trait ConstraintHandling { + + def constr: config.Printers.Printer = config.Printers.constr + + protected def isSub(tp1: Type, tp2: Type)(using Context): Boolean + protected def isSame(tp1: Type, tp2: Type)(using Context): Boolean + + protected def constraint: Constraint + protected def constraint_=(c: Constraint): Unit + + private var addConstraintInvocations = 0 + + /** If the constraint is frozen we cannot add new bounds to the constraint. */ + protected var frozenConstraint: Boolean = false + + /** Potentially a type lambda that is still instantiatable, even though the constraint + * is generally frozen. + */ + protected var caseLambda: Type = NoType + + /** If set, align arguments `S1`, `S2`when taking the glb + * `T1 { X = S1 } & T2 { X = S2 }` of a constraint upper bound for some type parameter. + * Aligning means computing `S1 =:= S2` which may change the current constraint. + * See note in TypeComparer#distributeAnd. + */ + protected var homogenizeArgs: Boolean = false + + /** We are currently comparing type lambdas. Used as a flag for + * optimization: when `false`, no need to do an expensive `pruneLambdaParams` + */ + protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty + + /** Used for match type reduction: If false, we don't recognize an abstract type + * to be a subtype type of any of its base classes. This is in place only at the + * toplevel; it is turned on again when we add parts of the scrutinee to the constraint. + */ + protected var canWidenAbstract: Boolean = true + + protected var myNecessaryConstraintsOnly = false + /** When collecting the constraints needed for a particular subtyping + * judgment to be true, we sometimes need to approximate the constraint + * set (see `TypeComparer#either` for example). + * + * Normally, this means adding extra constraints which may not be necessary + * for the subtyping judgment to be true, but if this variable is set to true + * we will instead under-approximate and keep only the constraints that must + * always be present for the subtyping judgment to hold. + * + * This is needed for GADT bounds inference to be sound, but it is also used + * when constraining a method call based on its expected type to avoid adding + * constraints that would later prevent us from typechecking method + * arguments, see or-inf.scala and and-inf.scala for examples. + */ + protected def necessaryConstraintsOnly(using Context): Boolean = + ctx.mode.is(Mode.GadtConstraintInference) || myNecessaryConstraintsOnly + + /** If `trustBounds = false` we perform comparisons in a pessimistic way as follows: + * Given an abstract type `A >: L <: H`, a subtype comparison of any type + * with `A` will compare against both `L` and `H`. E.g. + * + * T <:< A if T <:< L and T <:< H + * A <:< T if L <:< T and H <:< T + * + * This restricted form makes sure we don't "forget" types when forming + * unions and intersections with abstract types that have bad bounds. E.g. + * the following example from neg/i8900.scala that @smarter came up with: + * We have a type variable X with constraints + * + * X >: 1, X >: x.M + * + * where `x` is a locally nested variable and `x.M` has bad bounds + * + * x.M >: Int | String <: Int & String + * + * If we trust bounds, then the lower bound of `X` is `x.M` since `x.M >: 1`. + * Then even if we correct levels on instantiation to eliminate the local `x`, + * it is alreay too late, we'd get `Int & String` as instance, which does not + * satisfy the original constraint `X >: 1`. + * + * But if `trustBounds` is false, we do not conclude the `x.M >: 1` since + * we compare both bounds and the upper bound `Int & String` is not a supertype + * of `1`. So the lower bound is `1 | x.M` and when we level-avoid that we + * get `1 | Int & String`, which simplifies to `Int`. + */ + private var myTrustBounds = true + + inline def withUntrustedBounds(op: => Type): Type = + val saved = myTrustBounds + myTrustBounds = false + try op finally myTrustBounds = saved + + def trustBounds: Boolean = + !Config.checkLevelsOnInstantiation || myTrustBounds + + def checkReset() = + assert(addConstraintInvocations == 0) + assert(frozenConstraint == false) + assert(caseLambda == NoType) + assert(homogenizeArgs == false) + assert(comparedTypeLambdas == Set.empty) + + def nestingLevel(param: TypeParamRef)(using Context) = constraint.typeVarOfParam(param) match + case tv: TypeVar => tv.nestingLevel + case _ => + // This should only happen when reducing match types (in + // TrackingTypeComparer#matchCases) or in uncommitable TyperStates (as + // asserted in ProtoTypes.constrained) and is special-cased in `levelOK` + // below. + Int.MaxValue + + /** Is `level` <= `maxLevel` or legal in the current context? */ + def levelOK(level: Int, maxLevel: Int)(using Context): Boolean = + level <= maxLevel + || ctx.isAfterTyper || !ctx.typerState.isCommittable // Leaks in these cases shouldn't break soundness + || level == Int.MaxValue // See `nestingLevel` above. + || !Config.checkLevelsOnConstraints + + /** If `param` is nested deeper than `maxLevel`, try to instantiate it to a + * fresh type variable of level `maxLevel` and return the new variable. + * If this isn't possible, throw a TypeError. + */ + def atLevel(maxLevel: Int, param: TypeParamRef)(using Context): TypeParamRef = + if levelOK(nestingLevel(param), maxLevel) then + return param + LevelAvoidMap(0, maxLevel)(param) match + case freshVar: TypeVar => freshVar.origin + case _ => throw new TypeError( + i"Could not decrease the nesting level of ${param} from ${nestingLevel(param)} to $maxLevel in $constraint") + + def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = constraint.nonParamBounds(param) + + /** The full lower bound of `param` includes both the `nonParamBounds` and the + * params in the constraint known to be `<: param`, except that + * params with a `nestingLevel` higher than `param` will be instantiated + * to a fresh param at a legal level. See the documentation of `TypeVar` + * for details. + */ + def fullLowerBound(param: TypeParamRef)(using Context): Type = + val maxLevel = nestingLevel(param) + var loParams = constraint.minLower(param) + if maxLevel != Int.MaxValue then + loParams = loParams.mapConserve(atLevel(maxLevel, _)) + loParams.foldLeft(nonParamBounds(param).lo)(_ | _) + + /** The full upper bound of `param`, see the documentation of `fullLowerBounds` above. */ + def fullUpperBound(param: TypeParamRef)(using Context): Type = + val maxLevel = nestingLevel(param) + var hiParams = constraint.minUpper(param) + if maxLevel != Int.MaxValue then + hiParams = hiParams.mapConserve(atLevel(maxLevel, _)) + hiParams.foldLeft(nonParamBounds(param).hi)(_ & _) + + /** Full bounds of `param`, including other lower/upper params. + * + * Note that underlying operations perform subtype checks - for this reason, recursing on `fullBounds` + * of some param when comparing types might lead to infinite recursion. Consider `bounds` instead. + */ + def fullBounds(param: TypeParamRef)(using Context): TypeBounds = + nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param)) + + /** An approximating map that prevents types nested deeper than maxLevel as + * well as WildcardTypes from leaking into the constraint. + */ + class LevelAvoidMap(topLevelVariance: Int, maxLevel: Int)(using Context) extends TypeOps.AvoidMap: + variance = topLevelVariance + + def toAvoid(tp: NamedType): Boolean = + tp.prefix == NoPrefix && !tp.symbol.isStatic && !levelOK(tp.symbol.nestingLevel, maxLevel) + + /** Return a (possibly fresh) type variable of a level no greater than `maxLevel` which is: + * - lower-bounded by `tp` if variance >= 0 + * - upper-bounded by `tp` if variance <= 0 + * If this isn't possible, return the empty range. + */ + def legalVar(tp: TypeVar): Type = + val oldParam = tp.origin + val nameKind = + if variance > 0 then AvoidNameKind.UpperBound + else if variance < 0 then AvoidNameKind.LowerBound + else AvoidNameKind.BothBounds + + /** If it exists, return the first param in the list created in a previous call to `legalVar(tp)` + * with the appropriate level and variance. + */ + def findParam(params: List[TypeParamRef]): Option[TypeParamRef] = + params.find(p => + nestingLevel(p) <= maxLevel && representedParamRef(p) == oldParam && + (p.paramName.is(AvoidNameKind.BothBounds) || + variance != 0 && p.paramName.is(nameKind))) + + // First, check if we can reuse an existing parameter, this is more than an optimization + // since it avoids an infinite loop in tests/pos/i8900-cycle.scala + findParam(constraint.lower(oldParam)).orElse(findParam(constraint.upper(oldParam))) match + case Some(param) => + constraint.typeVarOfParam(param) + case _ => + // Otherwise, try to return a fresh type variable at `maxLevel` with + // the appropriate constraints. + val name = nameKind(oldParam.paramName.toTermName).toTypeName + val freshVar = newTypeVar(TypeBounds.upper(tp.topType), name, + nestingLevel = maxLevel, represents = oldParam) + val ok = + if variance < 0 then + addLess(freshVar.origin, oldParam) + else if variance > 0 then + addLess(oldParam, freshVar.origin) + else + unify(freshVar.origin, oldParam) + if ok then freshVar else emptyRange + end legalVar + + override def apply(tp: Type): Type = tp match + case tp: TypeVar if !tp.isInstantiated && !levelOK(tp.nestingLevel, maxLevel) => + legalVar(tp) + // TypeParamRef can occur in tl bounds + case tp: TypeParamRef => + constraint.typeVarOfParam(tp) match + case tvar: TypeVar => + apply(tvar) + case _ => super.apply(tp) + case _ => + super.apply(tp) + + override def mapWild(t: WildcardType) = + if ctx.mode.is(Mode.TypevarsMissContext) then super.mapWild(t) + else + val tvar = newTypeVar(apply(t.effectiveBounds).toBounds, nestingLevel = maxLevel) + tvar + end LevelAvoidMap + + /** Approximate `rawBound` if needed to make it a legal bound of `param` by + * avoiding wildcards and types with a level strictly greater than its + * `nestingLevel`. + * + * Note that level-checking must be performed here and cannot be delayed + * until instantiation because if we allow level-incorrect bounds, then we + * might end up reasoning with bad bounds outside of the scope where they are + * defined. This can lead to level-correct but unsound instantiations as + * demonstrated by tests/neg/i8900.scala. + */ + protected def legalBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Type = + // Over-approximate for soundness. + var variance = if isUpper then -1 else 1 + // ...unless we can only infer necessary constraints, in which case we + // flip the variance to under-approximate. + if necessaryConstraintsOnly then variance = -variance + + val approx = new LevelAvoidMap(variance, nestingLevel(param)): + override def legalVar(tp: TypeVar): Type = + // `legalVar` will create a type variable whose bounds depend on + // `variance`, but whether the variance is positive or negative, + // we can still infer necessary constraints since just creating a + // type variable doesn't reduce the set of possible solutions. + // Therefore, we can safely "unflip" the variance flipped above. + // This is necessary for i8900-unflip.scala to typecheck. + val v = if necessaryConstraintsOnly then -this.variance else this.variance + atVariance(v)(super.legalVar(tp)) + approx(rawBound) + end legalBound + + protected def addOneBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = + if !constraint.contains(param) then true + else if !isUpper && param.occursIn(rawBound) then + // We don't allow recursive lower bounds when defining a type, + // so we shouldn't allow them as constraints either. + false + else + val bound = legalBound(param, rawBound, isUpper) + val oldBounds @ TypeBounds(lo, hi) = constraint.nonParamBounds(param) + val equalBounds = (if isUpper then lo else hi) eq bound + if equalBounds && !bound.existsPart(_ eq param, StopAt.Static) then + // The narrowed bounds are equal and not recursive, + // so we can remove `param` from the constraint. + constraint = constraint.replace(param, bound) + true + else + // Narrow one of the bounds of type parameter `param` + // If `isUpper` is true, ensure that `param <: `bound`, otherwise ensure + // that `param >: bound`. + val narrowedBounds = + val saved = homogenizeArgs + homogenizeArgs = Config.alignArgsInAnd + try + withUntrustedBounds( + if isUpper then oldBounds.derivedTypeBounds(lo, hi & bound) + else oldBounds.derivedTypeBounds(lo | bound, hi)) + finally + homogenizeArgs = saved + //println(i"narrow bounds for $param from $oldBounds to $narrowedBounds") + val c1 = constraint.updateEntry(param, narrowedBounds) + (c1 eq constraint) + || { + constraint = c1 + val TypeBounds(lo, hi) = constraint.entry(param): @unchecked + isSub(lo, hi) + } + end addOneBound + + protected def addBoundTransitively(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = + + /** Adjust the bound `tp` in the following ways: + * + * 1. Toplevel occurrences of TypeRefs that are instantiated in the current + * constraint are also dereferenced. + * 2. Toplevel occurrences of ExprTypes lead to a `NoType` return, which + * causes the addOneBound operation to fail. + * + * An occurrence is toplevel if it is the bound itself, or a term in some + * combination of `&` or `|` types. + */ + def adjust(tp: Type): Type = tp match + case tp: AndOrType => + val p1 = adjust(tp.tp1) + val p2 = adjust(tp.tp2) + if p1.exists && p2.exists then tp.derivedAndOrType(p1, p2) else NoType + case tp: TypeVar if constraint.contains(tp.origin) => + adjust(tp.underlying) + case tp: ExprType => + // ExprTypes are not value types, so type parameters should not + // be instantiated to ExprTypes. A scenario where such an attempted + // instantiation can happen is if we unify (=> T) => () with A => () + // where A is a TypeParamRef. See the comment on EtaExpansion.etaExpand + // why types such as (=> T) => () can be constructed and i7969.scala + // as a test where this happens. + // Note that scalac by contrast allows such instantiations. But letting + // type variables be ExprTypes has its own problems (e.g. you can't write + // the resulting types down) and is largely unknown terrain. + NoType + case _ => + tp + + def description = i"constraint $param ${if isUpper then "<:" else ":>"} $rawBound to\n$constraint" + constr.println(i"adding $description$location") + if isUpper && rawBound.isRef(defn.NothingClass) && ctx.typerState.isGlobalCommittable then + def msg = i"!!! instantiated to Nothing: $param, constraint = $constraint" + if Config.failOnInstantiationToNothing + then assert(false, msg) + else report.log(msg) + def others = if isUpper then constraint.lower(param) else constraint.upper(param) + val bound = adjust(rawBound) + bound.exists + && addOneBound(param, bound, isUpper) && others.forall(addOneBound(_, bound, isUpper)) + .showing(i"added $description = $result$location", constr) + end addBoundTransitively + + protected def addLess(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { + def description = i"ordering $p1 <: $p2 to\n$constraint" + val res = + if (constraint.isLess(p2, p1)) unify(p2, p1) + else { + val down1 = p1 :: constraint.exclusiveLower(p1, p2) + val up2 = p2 :: constraint.exclusiveUpper(p2, p1) + val lo1 = constraint.nonParamBounds(p1).lo + val hi2 = constraint.nonParamBounds(p2).hi + constr.println(i"adding $description down1 = $down1, up2 = $up2$location") + constraint = constraint.addLess(p1, p2) + down1.forall(addOneBound(_, hi2, isUpper = true)) && + up2.forall(addOneBound(_, lo1, isUpper = false)) + } + constr.println(i"added $description = $res$location") + res + } + + def location(using Context) = "" // i"in ${ctx.typerState.stateChainStr}" // use for debugging + + /** Unify p1 with p2: one parameter will be kept in the constraint, the + * other will be removed and its bounds transferred to the remaining one. + * + * If p1 and p2 have different `nestingLevel`, the parameter with the lowest + * level will be kept and the transferred bounds from the other parameter + * will be adjusted for level-correctness. + */ + private def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { + constr.println(s"unifying $p1 $p2") + if !constraint.isLess(p1, p2) then + constraint = constraint.addLess(p1, p2) + + val level1 = nestingLevel(p1) + val level2 = nestingLevel(p2) + val pKept = if level1 <= level2 then p1 else p2 + val pRemoved = if level1 <= level2 then p2 else p1 + + val down = constraint.exclusiveLower(p2, p1) + val up = constraint.exclusiveUpper(p1, p2) + + constraint = constraint.addLess(p2, p1, direction = if pKept eq p1 then KeepParam2 else KeepParam1) + + val boundKept = constraint.nonParamBounds(pKept).substParam(pRemoved, pKept) + var boundRemoved = constraint.nonParamBounds(pRemoved).substParam(pRemoved, pKept) + + if level1 != level2 then + boundRemoved = LevelAvoidMap(-1, math.min(level1, level2))(boundRemoved) + val TypeBounds(lo, hi) = boundRemoved: @unchecked + // After avoidance, the interval might be empty, e.g. in + // tests/pos/i8900-promote.scala: + // >: x.type <: Singleton + // becomes: + // >: Int <: Singleton + // In that case, we can still get a legal constraint + // by replacing the lower-bound to get: + // >: Int & Singleton <: Singleton + if !isSub(lo, hi) then + boundRemoved = TypeBounds(lo & hi, hi) + + val newBounds = (boundKept & boundRemoved).bounds + constraint = constraint.updateEntry(pKept, newBounds).replace(pRemoved, pKept) + + val lo = newBounds.lo + val hi = newBounds.hi + isSub(lo, hi) && + down.forall(addOneBound(_, hi, isUpper = true)) && + up.forall(addOneBound(_, lo, isUpper = false)) + } + + protected def isSubType(tp1: Type, tp2: Type, whenFrozen: Boolean)(using Context): Boolean = + if (whenFrozen) + isSubTypeWhenFrozen(tp1, tp2) + else + isSub(tp1, tp2) + + inline final def inFrozenConstraint[T](op: => T): T = { + val savedFrozen = frozenConstraint + val savedLambda = caseLambda + frozenConstraint = true + caseLambda = NoType + try op + finally { + frozenConstraint = savedFrozen + caseLambda = savedLambda + } + } + + final def isSubTypeWhenFrozen(tp1: Type, tp2: Type)(using Context): Boolean = inFrozenConstraint(isSub(tp1, tp2)) + final def isSameTypeWhenFrozen(tp1: Type, tp2: Type)(using Context): Boolean = inFrozenConstraint(isSame(tp1, tp2)) + + /** Test whether the lower bounds of all parameters in this + * constraint are a solution to the constraint. + */ + protected final def isSatisfiable(using Context): Boolean = + constraint.forallParams { param => + val TypeBounds(lo, hi) = constraint.entry(param): @unchecked + isSub(lo, hi) || { + report.log(i"sub fail $lo <:< $hi") + false + } + } + + /** Fix instance type `tp` by avoidance so that it does not contain references + * to types at level > `maxLevel`. + * @param tp the type to be fixed + * @param fromBelow whether type was obtained from lower bound + * @param maxLevel the maximum level of references allowed + * @param param the parameter that was instantiated + */ + private def fixLevels(tp: Type, fromBelow: Boolean, maxLevel: Int, param: TypeParamRef)(using Context) = + + def needsFix(tp: NamedType) = + (tp.prefix eq NoPrefix) && tp.symbol.nestingLevel > maxLevel + + /** An accumulator that determines whether levels need to be fixed + * and computes on the side sets of nested type variables that need + * to be instantiated. + */ + def needsLeveling = new TypeAccumulator[Boolean]: + if !fromBelow then variance = -1 + + def apply(need: Boolean, tp: Type) = + need || tp.match + case tp: NamedType => + needsFix(tp) + || !stopBecauseStaticOrLocal(tp) && apply(need, tp.prefix) + case tp: TypeVar => + val inst = tp.instanceOpt + if inst.exists then apply(need, inst) + else if tp.nestingLevel > maxLevel then + // Change the nesting level of inner type variable to `maxLevel`. + // This means that the type variable will be instantiated later to a + // less nested type. If there are other references to the same type variable + // that do not come from the type undergoing `fixLevels`, this could lead + // to coarser types than intended. An alternative is to instantiate the + // type variable right away, but this also loses information. See + // i15934.scala for a test where the current strategey works but an early instantiation + // of `tp` would fail. + constr.println(i"widening nesting level of type variable $tp from ${tp.nestingLevel} to $maxLevel") + ctx.typerState.setNestingLevel(tp, maxLevel) + true + else false + case _ => + foldOver(need, tp) + end needsLeveling + + def levelAvoid = new TypeOps.AvoidMap: + if !fromBelow then variance = -1 + def toAvoid(tp: NamedType) = needsFix(tp) + + if Config.checkLevelsOnInstantiation && !ctx.isAfterTyper && needsLeveling(false, tp) then + typr.println(i"instance $tp for $param needs leveling to $maxLevel") + levelAvoid(tp) + else tp + end fixLevels + + /** Solve constraint set for given type parameter `param`. + * If `fromBelow` is true the parameter is approximated by its lower bound, + * otherwise it is approximated by its upper bound, unless the upper bound + * contains a reference to the parameter itself (such occurrences can arise + * for F-bounded types, `addOneBound` ensures that they never occur in the + * lower bound). + * The solved type is not allowed to contain references to types nested deeper + * than `maxLevel`. + * Wildcard types in bounds are approximated by their upper or lower bounds. + * The constraint is left unchanged. + * @return the instantiating type + * @pre `param` is in the constraint's domain. + */ + final def approximation(param: TypeParamRef, fromBelow: Boolean, maxLevel: Int)(using Context): Type = + constraint.entry(param) match + case entry: TypeBounds => + val useLowerBound = fromBelow || param.occursIn(entry.hi) + val rawInst = withUntrustedBounds( + if useLowerBound then fullLowerBound(param) else fullUpperBound(param)) + val levelInst = fixLevels(rawInst, fromBelow, maxLevel, param) + if levelInst ne rawInst then + typr.println(i"level avoid for $maxLevel: $rawInst --> $levelInst") + typr.println(i"approx $param, from below = $fromBelow, inst = $levelInst") + levelInst + case inst => + assert(inst.exists, i"param = $param\nconstraint = $constraint") + inst + end approximation + + /** If `tp` is an intersection such that some operands are transparent trait instances + * and others are not, replace as many transparent trait instances as possible with Any + * as long as the result is still a subtype of `bound`. But fall back to the + * original type if the resulting widened type is a supertype of all dropped + * types (since in this case the type was not a true intersection of transparent traits + * and other types to start with). + */ + def dropTransparentTraits(tp: Type, bound: Type)(using Context): Type = + var kept: Set[Type] = Set() // types to keep since otherwise bound would not fit + var dropped: List[Type] = List() // the types dropped so far, last one on top + + def dropOneTransparentTrait(tp: Type): Type = + val tpd = tp.dealias + if tpd.typeSymbol.isTransparentTrait && !tpd.isLambdaSub && !kept.contains(tpd) then + dropped = tpd :: dropped + defn.AnyType + else tpd match + case AndType(tp1, tp2) => + val tp1w = dropOneTransparentTrait(tp1) + if tp1w ne tp1 then tp1w & tp2 + else + val tp2w = dropOneTransparentTrait(tp2) + if tp2w ne tp2 then tp1 & tp2w + else tpd + case _ => + tp + + def recur(tp: Type): Type = + val tpw = dropOneTransparentTrait(tp) + if tpw eq tp then tp + else if tpw <:< bound then recur(tpw) + else + kept += dropped.head + dropped = dropped.tail + recur(tp) + + val saved = ctx.typerState.snapshot() + val tpw = recur(tp) + if (tpw eq tp) || dropped.forall(_ frozen_<:< tpw) then + // Rollback any constraint change that would lead to `tp` no longer + // being a valid solution. + ctx.typerState.resetTo(saved) + tp + else + tpw + end dropTransparentTraits + + /** If `tp` is an applied match type alias which is also an unreducible application + * of a higher-kinded type to a wildcard argument, widen to the match type's bound, + * in order to avoid an unreducible application of higher-kinded type ... in inferred type" + * error in PostTyper. Fixes #11246. + */ + def widenIrreducible(tp: Type)(using Context): Type = tp match + case tp @ AppliedType(tycon, _) if tycon.isLambdaSub && tp.hasWildcardArg => + tp.superType match + case MatchType(bound, _, _) => bound + case _ => tp + case _ => + tp + + /** Widen inferred type `inst` with upper `bound`, according to the following rules: + * 1. If `inst` is a singleton type, or a union containing some singleton types, + * widen (all) the singleton type(s), provided the result is a subtype of `bound`. + * (i.e. `inst.widenSingletons <:< bound` succeeds with satisfiable constraint) + * 2a. If `inst` is a union type and `widenUnions` is true, approximate the union type + * from above by an intersection of all common base types, provided the result + * is a subtype of `bound`. + * 2b. If `inst` is a union type and `widenUnions` is false, turn it into a hard + * union type (except for unions | Null, which are kept in the state they were). + * 3. Widen some irreducible applications of higher-kinded types to wildcard arguments + * (see @widenIrreducible). + * 4. Drop transparent traits from intersections (see @dropTransparentTraits). + * + * Don't do these widenings if `bound` is a subtype of `scala.Singleton`. + * Also, if the result of these widenings is a TypeRef to a module class, + * and this type ref is different from `inst`, replace by a TermRef to + * its source module instead. + * + * At this point we also drop the @Repeated annotation to avoid inferring type arguments with it, + * as those could leak the annotation to users (see run/inferred-repeated-result). + */ + def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = + def widenOr(tp: Type) = + if widenUnions then + val tpw = tp.widenUnion + if (tpw ne tp) && (tpw <:< bound) then tpw else tp + else tp.hardenUnions + + def widenSingle(tp: Type) = + val tpw = tp.widenSingletons + if (tpw ne tp) && (tpw <:< bound) then tpw else tp + + def isSingleton(tp: Type): Boolean = tp match + case WildcardType(optBounds) => optBounds.exists && isSingleton(optBounds.bounds.hi) + case _ => isSubTypeWhenFrozen(tp, defn.SingletonType) + + val wideInst = + if isSingleton(bound) then inst + else dropTransparentTraits(widenIrreducible(widenOr(widenSingle(inst))), bound) + wideInst match + case wideInst: TypeRef if wideInst.symbol.is(Module) => + TermRef(wideInst.prefix, wideInst.symbol.sourceModule) + case _ => + wideInst.dropRepeatedAnnot + end widenInferred + + /** Convert all toplevel union types in `tp` to hard unions */ + extension (tp: Type) private def hardenUnions(using Context): Type = tp.widen match + case tp: AndType => + tp.derivedAndType(tp.tp1.hardenUnions, tp.tp2.hardenUnions) + case tp: RefinedType => + tp.derivedRefinedType(tp.parent.hardenUnions, tp.refinedName, tp.refinedInfo) + case tp: RecType => + tp.rebind(tp.parent.hardenUnions) + case tp: HKTypeLambda => + tp.derivedLambdaType(resType = tp.resType.hardenUnions) + case tp: OrType => + val tp1 = tp.stripNull + if tp1 ne tp then tp.derivedOrType(tp1.hardenUnions, defn.NullType) + else tp.derivedOrType(tp.tp1.hardenUnions, tp.tp2.hardenUnions, soft = false) + case _ => + tp + + /** The instance type of `param` in the current constraint (which contains `param`). + * If `fromBelow` is true, the instance type is the lub of the parameter's + * lower bounds; otherwise it is the glb of its upper bounds. However, + * a lower bound instantiation can be a singleton type only if the upper bound + * is also a singleton type. + * The instance type is not allowed to contain references to types nested deeper + * than `maxLevel`. + */ + def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int)(using Context): Type = { + val approx = approximation(param, fromBelow, maxLevel).simplified + if fromBelow then + val widened = widenInferred(approx, param, widenUnions) + // Widening can add extra constraints, in particular the widened type might + // be a type variable which is now instantiated to `param`, and therefore + // cannot be used as an instantiation of `param` without creating a loop. + // If that happens, we run `instanceType` again to find a new instantation. + // (we do not check for non-toplevel occurences: those should never occur + // since `addOneBound` disallows recursive lower bounds). + if constraint.occursAtToplevel(param, widened) then + instanceType(param, fromBelow, widenUnions, maxLevel) + else + widened + else + approx + } + + /** Constraint `c1` subsumes constraint `c2`, if under `c2` as constraint we have + * for all poly params `p` defined in `c2` as `p >: L2 <: U2`: + * + * c1 defines p with bounds p >: L1 <: U1, and + * L2 <: L1, and + * U1 <: U2 + * + * Both `c1` and `c2` are required to derive from constraint `pre`, without adding + * any new type variables but possibly narrowing already registered ones with further bounds. + */ + protected final def subsumes(c1: Constraint, c2: Constraint, pre: Constraint)(using Context): Boolean = + if (c2 eq pre) true + else if (c1 eq pre) false + else { + val saved = constraint + try + // We iterate over params of `pre`, instead of `c2` as the documentation may suggest. + // As neither `c1` nor `c2` can have more params than `pre`, this only matters in one edge case. + // Constraint#forallParams only iterates over params that can be directly constrained. + // If `c2` has, compared to `pre`, instantiated a param and we iterated over params of `c2`, + // we could miss that param being instantiated to an incompatible type in `c1`. + pre.forallParams(p => + c1.entry(p).exists + && c2.upper(p).forall(c1.isLess(p, _)) + && isSubTypeWhenFrozen(c1.nonParamBounds(p), c2.nonParamBounds(p)) + ) + finally constraint = saved + } + + /** The current bounds of type parameter `param` */ + def bounds(param: TypeParamRef)(using Context): TypeBounds = { + val e = constraint.entry(param) + if (e.exists) e.bounds + else { + // TODO: should we change the type of paramInfos to nullable? + val pinfos: List[param.binder.PInfo] | Null = param.binder.paramInfos + if (pinfos != null) pinfos(param.paramNum) // pinfos == null happens in pos/i536.scala + else TypeBounds.empty + } + } + + /** Add type lambda `tl`, possibly with type variables `tvars`, to current constraint + * and propagate all bounds. + * @param tvars See Constraint#add + */ + def addToConstraint(tl: TypeLambda, tvars: List[TypeVar])(using Context): Boolean = + checkPropagated(i"initialized $tl") { + constraint = constraint.add(tl, tvars) + tl.paramRefs.forall { param => + val lower = constraint.lower(param) + val upper = constraint.upper(param) + constraint.entry(param) match { + case bounds: TypeBounds => + if lower.nonEmpty && !bounds.lo.isRef(defn.NothingClass) + || upper.nonEmpty && !bounds.hi.isAny + then constr.println(i"INIT*** $tl") + lower.forall(addOneBound(_, bounds.hi, isUpper = true)) && + upper.forall(addOneBound(_, bounds.lo, isUpper = false)) + case x => + // Happens if param was already solved while processing earlier params of the same TypeLambda. + // See #4720. + true + } + } + } + + /** Can `param` be constrained with new bounds? */ + final def canConstrain(param: TypeParamRef): Boolean = + (!frozenConstraint || (caseLambda `eq` param.binder)) && constraint.contains(param) + + /** Is `param` assumed to be a sub- and super-type of any other type? + * This holds if `TypeVarsMissContext` is set unless `param` is a part + * of a MatchType that is currently normalized. + */ + final def assumedTrue(param: TypeParamRef)(using Context): Boolean = + ctx.mode.is(Mode.TypevarsMissContext) && (caseLambda `ne` param.binder) + + /** Add constraint `param <: bound` if `fromBelow` is false, `param >: bound` otherwise. + * `bound` is assumed to be in normalized form, as specified in `firstTry` and + * `secondTry` of `TypeComparer`. In particular, it should not be an alias type, + * lazy ref, typevar, wildcard type, error type. In addition, upper bounds may + * not be AndTypes and lower bounds may not be OrTypes. This is assured by the + * way isSubType is organized. + */ + protected def addConstraint(param: TypeParamRef, bound: Type, fromBelow: Boolean)(using Context): Boolean = + if !bound.isValueTypeOrLambda then return false + + /** When comparing lambdas we might get constraints such as + * `A <: X0` or `A = List[X0]` where `A` is a constrained parameter + * and `X0` is a lambda parameter. The constraint for `A` is not allowed + * to refer to such a lambda parameter because the lambda parameter is + * not visible where `A` is defined. Consequently, we need to + * approximate the bound so that the lambda parameter does not appear in it. + * If `tp` is an upper bound, we need to approximate with something smaller, + * otherwise something larger. + * Test case in pos/i94-nada.scala. This test crashes with an illegal instance + * error in Test2 when the rest of the SI-2712 fix is applied but `pruneLambdaParams` is + * missing. + */ + def avoidLambdaParams(tp: Type) = + if comparedTypeLambdas.nonEmpty then + val approx = new ApproximatingTypeMap { + if (!fromBelow) variance = -1 + def apply(t: Type): Type = t match { + case t @ TypeParamRef(tl: TypeLambda, n) if comparedTypeLambdas contains tl => + val bounds = tl.paramInfos(n) + range(bounds.lo, bounds.hi) + case tl: TypeLambda => + val saved = comparedTypeLambdas + comparedTypeLambdas -= tl + try mapOver(tl) + finally comparedTypeLambdas = saved + case _ => + mapOver(t) + } + } + approx(tp) + else tp + + def addParamBound(bound: TypeParamRef) = + constraint.entry(param) match { + case _: TypeBounds => + if (fromBelow) addLess(bound, param) else addLess(param, bound) + case tp => + if (fromBelow) isSub(bound, tp) else isSub(tp, bound) + } + + def kindCompatible(tp1: Type, tp2: Type): Boolean = + val tparams1 = tp1.typeParams + val tparams2 = tp2.typeParams + tparams1.corresponds(tparams2)((p1, p2) => kindCompatible(p1.paramInfo, p2.paramInfo)) + && (tparams1.isEmpty || kindCompatible(tp1.hkResult, tp2.hkResult)) + || tp1.hasAnyKind + || tp2.hasAnyKind + + def description = i"constr $param ${if (fromBelow) ">:" else "<:"} $bound:\n$constraint" + + //checkPropagated(s"adding $description")(true) // DEBUG in case following fails + checkPropagated(s"added $description") { + addConstraintInvocations += 1 + val saved = canWidenAbstract + canWidenAbstract = true + try bound match + case bound: TypeParamRef if constraint contains bound => + addParamBound(bound) + case _ => + val pbound = avoidLambdaParams(bound) + kindCompatible(param, pbound) && addBoundTransitively(param, pbound, !fromBelow) + finally + canWidenAbstract = saved + addConstraintInvocations -= 1 + } + end addConstraint + + /** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */ + def checkPropagated(msg: => String)(result: Boolean)(using Context): Boolean = { + if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) + inFrozenConstraint { + for (p <- constraint.domainParams) { + def check(cond: => Boolean, q: TypeParamRef, ordering: String, explanation: String): Unit = + assert(cond, i"propagation failure for $p $ordering $q: $explanation\n$msg") + for (u <- constraint.upper(p)) + check(bounds(p).hi <:< bounds(u).hi, u, "<:", "upper bound not propagated") + for (l <- constraint.lower(p)) { + check(bounds(l).lo <:< bounds(p).hi, l, ">:", "lower bound not propagated") + check(constraint.isLess(l, p), l, ">:", "reverse ordering (<:) missing") + } + } + } + result + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala b/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala new file mode 100644 index 000000000000..d2b1246a8149 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala @@ -0,0 +1,23 @@ +package dotty.tools.dotc +package core + +import Contexts._ +import config.Printers.{default, typr} + +trait ConstraintRunInfo { self: Run => + private var maxSize = 0 + private var maxConstraint: Constraint | Null = _ + def recordConstraintSize(c: Constraint, size: Int): Unit = + if (size > maxSize) { + maxSize = size + maxConstraint = c + } + def printMaxConstraint()(using Context): Unit = + if maxSize > 0 then + val printer = if ctx.settings.YdetailedStats.value then default else typr + printer.println(s"max constraint size: $maxSize") + try printer.println(s"max constraint = ${maxConstraint.nn.show}") + catch case ex: StackOverflowError => printer.println("max constraint cannot be printed due to stack overflow") + + protected def reset(): Unit = maxConstraint = null +} diff --git a/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala b/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala new file mode 100644 index 000000000000..aa85f714a8e5 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala @@ -0,0 +1,113 @@ +package dotty.tools.dotc +package core + +import Contexts._, Symbols._, Types._, Flags._ +import Denotations._, SymDenotations._ +import Names.Name, StdNames.nme +import ast.untpd + +/** Extension methods for contexts where we want to keep the ctx. syntax */ +object ContextOps: + + extension (ctx: Context) + + /** Enter symbol into current class, if current class is owner of current context, + * or into current scope, if not. Should always be called instead of scope.enter + * in order to make sure that updates to class members are reflected in + * finger prints. + */ + def enter(sym: Symbol): Symbol = inContext(ctx) { + ctx.owner match + case cls: ClassSymbol => cls.classDenot.enter(sym) + case _ => ctx.scope.openForMutations.enter(sym) + sym + } + + /** The denotation with the given `name` and all `required` flags in current context + */ + def denotNamed(name: Name, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = + inContext(ctx) { + if (ctx.owner.isClass) + if (ctx.outer.owner == ctx.owner) { // inner class scope; check whether we are referring to self + if (ctx.scope.size == 1) { + val elem = ctx.scope.lastEntry.nn + if (elem.name == name) return elem.sym.denot // return self + } + val pre = ctx.owner.thisType + if ctx.isJava then javaFindMember(name, pre, required, excluded) + else pre.findMember(name, pre, required, excluded) + } + else // we are in the outermost context belonging to a class; self is invisible here. See inClassContext. + ctx.owner.findMember(name, ctx.owner.thisType, required, excluded) + else + ctx.scope.denotsNamed(name).filterWithFlags(required, excluded).toDenot(NoPrefix) + } + + final def javaFindMember(name: Name, pre: Type, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = + assert(ctx.isJava) + inContext(ctx) { + + val preSym = pre.typeSymbol + + // 1. Try to search in current type and parents. + val directSearch = pre.findMember(name, pre, required, excluded) + + // 2. Try to search in companion class if current is an object. + def searchCompanionClass = if preSym.is(Flags.Module) then + preSym.companionClass.thisType.findMember(name, pre, required, excluded) + else NoDenotation + + // 3. Try to search in companion objects of super classes. + // In Java code, static inner classes, which we model as members of the companion object, + // can be referenced from an ident in a subclass or by a selection prefixed by the subclass. + def searchSuperCompanionObjects = + val toSearch = if preSym.is(Flags.Module) then + if preSym.companionClass.exists then + preSym.companionClass.asClass.baseClasses + else Nil + else + preSym.asClass.baseClasses + + toSearch.iterator.map { bc => + val pre1 = bc.companionModule.namedType + pre1.findMember(name, pre1, required, excluded) + }.find(_.exists).getOrElse(NoDenotation) + + if preSym.isClass then + directSearch orElse searchCompanionClass orElse searchSuperCompanionObjects + else + directSearch + } + + /** A fresh local context with given tree and owner. + * Owner might not exist (can happen for self valdefs), in which case + * no owner is set in result context + */ + def localContext(tree: untpd.Tree, owner: Symbol): FreshContext = inContext(ctx) { + val freshCtx = ctx.fresh.setTree(tree) + if owner.exists then freshCtx.setOwner(owner) else freshCtx + } + + /** Context where `sym` is defined, assuming we are in a nested context. */ + def defContext(sym: Symbol): Context = inContext(ctx) { + ctx.outersIterator + .dropWhile(_.owner != sym) + .dropWhile(_.owner == sym) + .next() + } + + /** A new context for the interior of a class */ + def inClassContext(selfInfo: TypeOrSymbol): Context = inContext(ctx) { + val localCtx: Context = ctx.fresh.setNewScope + selfInfo match { + case sym: Symbol if sym.exists && sym.name != nme.WILDCARD => localCtx.scope.openForMutations.enter(sym) + case _ => + } + localCtx + } + + def packageContext(tree: untpd.PackageDef, pkg: Symbol): Context = inContext(ctx) { + if (pkg.is(Package)) ctx.fresh.setOwner(pkg.moduleClass).setTree(tree) + else ctx + } +end ContextOps diff --git a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala new file mode 100644 index 000000000000..68620c6d3fe7 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala @@ -0,0 +1,1000 @@ +package dotty.tools +package dotc +package core + +import interfaces.CompilerCallback +import Decorators._ +import Periods._ +import Names._ +import Phases._ +import Types._ +import Symbols._ +import Scopes._ +import Uniques._ +import ast.Trees._ +import ast.untpd +import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} +import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} +import inlines.Inliner +import Nullables._ +import Implicits.ContextualImplicits +import config.Settings._ +import config.Config +import reporting._ +import io.{AbstractFile, NoAbstractFile, PlainFile, Path} +import scala.io.Codec +import collection.mutable +import printing._ +import config.{JavaPlatform, SJSPlatform, Platform, ScalaSettings} +import classfile.ReusableDataReader +import StdNames.nme + +import scala.annotation.internal.sharable + +import DenotTransformers.DenotTransformer +import dotty.tools.dotc.profile.Profiler +import util.Property.Key +import util.Store +import xsbti.AnalysisCallback +import plugins._ +import java.util.concurrent.atomic.AtomicInteger +import java.nio.file.InvalidPathException +import language.experimental.pureFunctions + +object Contexts { + + private val (compilerCallbackLoc, store1) = Store.empty.newLocation[CompilerCallback]() + private val (sbtCallbackLoc, store2) = store1.newLocation[AnalysisCallback]() + private val (printerFnLoc, store3) = store2.newLocation[Context -> Printer](new RefinedPrinter(_)) + private val (settingsStateLoc, store4) = store3.newLocation[SettingsState]() + private val (compilationUnitLoc, store5) = store4.newLocation[CompilationUnit]() + private val (runLoc, store6) = store5.newLocation[Run | Null]() + private val (profilerLoc, store7) = store6.newLocation[Profiler]() + private val (notNullInfosLoc, store8) = store7.newLocation[List[NotNullInfo]]() + private val (importInfoLoc, store9) = store8.newLocation[ImportInfo | Null]() + private val (typeAssignerLoc, store10) = store9.newLocation[TypeAssigner](TypeAssigner) + + private val initialStore = store10 + + /** The current context */ + inline def ctx(using ctx: Context): Context = ctx + + /** Run `op` with given context */ + inline def inContext[T](c: Context)(inline op: Context ?=> T): T = + op(using c) + + /** Execute `op` at given period */ + inline def atPeriod[T](pd: Period)(inline op: Context ?=> T)(using Context): T = + op(using ctx.fresh.setPeriod(pd)) + + /** Execute `op` at given phase id */ + inline def atPhase[T](pid: PhaseId)(inline op: Context ?=> T)(using Context): T = + op(using ctx.withPhase(pid)) + + /** Execute `op` at given phase */ + inline def atPhase[T](phase: Phase)(inline op: Context ?=> T)(using Context): T = + op(using ctx.withPhase(phase)) + + inline def atNextPhase[T](inline op: Context ?=> T)(using Context): T = + atPhase(ctx.phase.next)(op) + + /** Execute `op` at the current phase if it's before the first transform phase, + * otherwise at the last phase before the first transform phase. + * + * Note: this should be used instead of `atPhaseNoLater(ctx.picklerPhase)` + * because the later won't work if the `Pickler` phase is not present (for example, + * when using `QuoteCompiler`). + */ + inline def atPhaseBeforeTransforms[T](inline op: Context ?=> T)(using Context): T = + atPhaseNoLater(firstTransformPhase.prev)(op) + + inline def atPhaseNoLater[T](limit: Phase)(inline op: Context ?=> T)(using Context): T = + op(using if !limit.exists || ctx.phase <= limit then ctx else ctx.withPhase(limit)) + + inline def atPhaseNoEarlier[T](limit: Phase)(inline op: Context ?=> T)(using Context): T = + op(using if !limit.exists || limit <= ctx.phase then ctx else ctx.withPhase(limit)) + + inline def inMode[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = + op(using if mode != ctx.mode then ctx.fresh.setMode(mode) else ctx) + + inline def withMode[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = + inMode(ctx.mode | mode)(op) + + inline def withoutMode[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = + inMode(ctx.mode &~ mode)(op) + + /** A context is passed basically everywhere in dotc. + * This is convenient but carries the risk of captured contexts in + * objects that turn into space leaks. To combat this risk, here are some + * conventions to follow: + * + * - Never let an implicit context be an argument of a class whose instances + * live longer than the context. + * - Classes that need contexts for their initialization take an explicit parameter + * named `initctx`. They pass initctx to all positions where it is needed + * (and these positions should all be part of the intialization sequence of the class). + * - Classes that need contexts that survive initialization are instead passed + * a "condensed context", typically named `cctx` (or they create one). Condensed contexts + * just add some basic information to the context base without the + * risk of capturing complete trees. + * - To make sure these rules are kept, it would be good to do a sanity + * check using bytecode inspection with javap or scalap: Keep track + * of all class fields of type context; allow them only in whitelisted + * classes (which should be short-lived). + */ + abstract class Context(val base: ContextBase) { thiscontext => + + given Context = this + + /** All outer contexts, ending in `base.initialCtx` and then `NoContext` */ + def outersIterator: Iterator[Context] = new Iterator[Context] { + var current = thiscontext + def hasNext = current != NoContext + def next = { val c = current; current = current.outer; c } + } + + /** The outer context */ + private var _outer: Context = _ + protected def outer_=(outer: Context): Unit = _outer = outer + final def outer: Context = _outer + + /** The current context */ + private var _period: Period = _ + protected def period_=(period: Period): Unit = { + assert(period.firstPhaseId == period.lastPhaseId, period) + _period = period + } + final def period: Period = _period + + /** The scope nesting level */ + private var _mode: Mode = _ + protected def mode_=(mode: Mode): Unit = _mode = mode + final def mode: Mode = _mode + + /** The current owner symbol */ + private var _owner: Symbol = _ + protected def owner_=(owner: Symbol): Unit = _owner = owner + final def owner: Symbol = _owner + + /** The current tree */ + private var _tree: Tree[? >: Untyped]= _ + protected def tree_=(tree: Tree[? >: Untyped]): Unit = _tree = tree + final def tree: Tree[? >: Untyped] = _tree + + /** The current scope */ + private var _scope: Scope = _ + protected def scope_=(scope: Scope): Unit = _scope = scope + final def scope: Scope = _scope + + /** The current typerstate */ + private var _typerState: TyperState = _ + protected def typerState_=(typerState: TyperState): Unit = _typerState = typerState + final def typerState: TyperState = _typerState + + /** The current bounds in force for type parameters appearing in a GADT */ + private var _gadt: GadtConstraint = _ + protected def gadt_=(gadt: GadtConstraint): Unit = _gadt = gadt + final def gadt: GadtConstraint = _gadt + + /** The history of implicit searches that are currently active */ + private var _searchHistory: SearchHistory = _ + protected def searchHistory_= (searchHistory: SearchHistory): Unit = _searchHistory = searchHistory + final def searchHistory: SearchHistory = _searchHistory + + /** The current source file */ + private var _source: SourceFile = _ + protected def source_=(source: SourceFile): Unit = _source = source + final def source: SourceFile = _source + + /** A map in which more contextual properties can be stored + * Typically used for attributes that are read and written only in special situations. + */ + private var _moreProperties: Map[Key[Any], Any] = _ + protected def moreProperties_=(moreProperties: Map[Key[Any], Any]): Unit = _moreProperties = moreProperties + final def moreProperties: Map[Key[Any], Any] = _moreProperties + + def property[T](key: Key[T]): Option[T] = + moreProperties.get(key).asInstanceOf[Option[T]] + + /** A store that can be used by sub-components. + * Typically used for attributes that are defined only once per compilation unit. + * Access to store entries is much faster than access to properties, and only + * slightly slower than a normal field access would be. + */ + private var _store: Store = _ + protected def store_=(store: Store): Unit = _store = store + final def store: Store = _store + + /** The compiler callback implementation, or null if no callback will be called. */ + def compilerCallback: CompilerCallback = store(compilerCallbackLoc) + + /** The sbt callback implementation if we are run from sbt, null otherwise */ + def sbtCallback: AnalysisCallback = store(sbtCallbackLoc) + + /** The current plain printer */ + def printerFn: Context -> Printer = store(printerFnLoc) + + /** A function creating a printer */ + def printer: Printer = + val pr = printerFn(this) + if this.settings.YplainPrinter.value then pr.plain else pr + + /** The current settings values */ + def settingsState: SettingsState = store(settingsStateLoc) + + /** The current compilation unit */ + def compilationUnit: CompilationUnit = store(compilationUnitLoc) + + /** The current compiler-run */ + def run: Run | Null = store(runLoc) + + /** The current compiler-run profiler */ + def profiler: Profiler = store(profilerLoc) + + /** The paths currently known to be not null */ + def notNullInfos: List[NotNullInfo] = store(notNullInfosLoc) + + /** The currently active import info */ + def importInfo: ImportInfo | Null = store(importInfoLoc) + + /** The current type assigner or typer */ + def typeAssigner: TypeAssigner = store(typeAssignerLoc) + + /** The new implicit references that are introduced by this scope */ + protected var implicitsCache: ContextualImplicits | Null = null + def implicits: ContextualImplicits = { + if (implicitsCache == null) + implicitsCache = { + val implicitRefs: List[ImplicitRef] = + if (isClassDefContext) + try owner.thisType.implicitMembers + catch { + case ex: CyclicReference => Nil + } + else if (isImportContext) importInfo.nn.importedImplicits + else if (isNonEmptyScopeContext) scope.implicitDecls + else Nil + val outerImplicits = + if (isImportContext && importInfo.nn.unimported.exists) + outer.implicits exclude importInfo.nn.unimported + else + outer.implicits + if (implicitRefs.isEmpty) outerImplicits + else new ContextualImplicits(implicitRefs, outerImplicits, isImportContext)(this) + } + implicitsCache.nn + } + + /** Either the current scope, or, if the current context owner is a class, + * the declarations of the current class. + */ + def effectiveScope(using Context): Scope = + val myOwner: Symbol | Null = owner + if myOwner != null && myOwner.isClass then myOwner.asClass.unforcedDecls + else scope + + def nestingLevel: Int = effectiveScope.nestingLevel + + /** Sourcefile corresponding to given abstract file, memoized */ + def getSource(file: AbstractFile, codec: -> Codec = Codec(settings.encoding.value)) = { + util.Stats.record("Context.getSource") + base.sources.getOrElseUpdate(file, SourceFile(file, codec)) + } + + /** SourceFile with given path name, memoized */ + def getSource(path: TermName): SourceFile = getFile(path) match + case NoAbstractFile => NoSource + case file => getSource(file) + + /** SourceFile with given path, memoized */ + def getSource(path: String): SourceFile = getSource(path.toTermName) + + /** AbstraFile with given path name, memoized */ + def getFile(name: TermName): AbstractFile = base.files.get(name) match + case Some(file) => + file + case None => + try + val file = new PlainFile(Path(name.toString)) + base.files(name) = file + file + catch + case ex: InvalidPathException => + report.error(s"invalid file path: ${ex.getMessage}") + NoAbstractFile + + /** AbstractFile with given path, memoized */ + def getFile(name: String): AbstractFile = getFile(name.toTermName) + + + private var related: SimpleIdentityMap[Phase | SourceFile, Context] | Null = null + + private def lookup(key: Phase | SourceFile): Context | Null = + util.Stats.record("Context.related.lookup") + if related == null then + related = SimpleIdentityMap.empty + null + else + related.nn(key) + + private def withPhase(phase: Phase, pid: PhaseId): Context = + util.Stats.record("Context.withPhase") + val curId = phaseId + if curId == pid then + this + else + var ctx1 = lookup(phase) + if ctx1 == null then + util.Stats.record("Context.withPhase.new") + ctx1 = fresh.setPhase(pid) + related = related.nn.updated(phase, ctx1) + ctx1 + + final def withPhase(phase: Phase): Context = withPhase(phase, phase.id) + final def withPhase(pid: PhaseId): Context = withPhase(base.phases(pid), pid) + + final def withSource(source: SourceFile): Context = + util.Stats.record("Context.withSource") + if this.source eq source then + this + else + var ctx1 = lookup(source) + if ctx1 == null then + util.Stats.record("Context.withSource.new") + val ctx2 = fresh.setSource(source) + if ctx2.compilationUnit eq NoCompilationUnit then + // `source` might correspond to a file not necessarily + // in the current project (e.g. when inlining library code), + // so set `mustExist` to false. + ctx2.setCompilationUnit(CompilationUnit(source, mustExist = false)) + ctx1 = ctx2 + related = related.nn.updated(source, ctx2) + ctx1 + + // `creationTrace`-related code. To enable, uncomment the code below and the + // call to `setCreationTrace()` in this file. + /* + /** If -Ydebug is on, the top of the stack trace where this context + * was created, otherwise `null`. + */ + private var creationTrace: Array[StackTraceElement] = _ + + private def setCreationTrace() = + creationTrace = (new Throwable).getStackTrace().take(20) + + /** Print all enclosing context's creation stacktraces */ + def printCreationTraces() = { + println("=== context creation trace =======") + for (ctx <- outersIterator) { + println(s">>>>>>>>> $ctx") + if (ctx.creationTrace != null) println(ctx.creationTrace.mkString("\n")) + } + println("=== end context creation trace ===") + } + */ + + /** The current reporter */ + def reporter: Reporter = typerState.reporter + + final def phase: Phase = base.phases(period.firstPhaseId) + final def runId = period.runId + final def phaseId = period.phaseId + + final def lastPhaseId = base.phases.length - 1 + + /** Does current phase use an erased types interpretation? */ + final def erasedTypes = phase.erasedTypes + + /** Are we in a Java compilation unit? */ + final def isJava: Boolean = compilationUnit.isJava + + /** Is current phase after TyperPhase? */ + final def isAfterTyper = base.isAfterTyper(phase) + final def isTyper = base.isTyper(phase) + + /** Is this a context for the members of a class definition? */ + def isClassDefContext: Boolean = + owner.isClass && (owner ne outer.owner) + + /** Is this a context that introduces an import clause? */ + def isImportContext: Boolean = + (this ne NoContext) + && (outer ne NoContext) + && (this.importInfo nen outer.importInfo) + + /** Is this a context that introduces a non-empty scope? */ + def isNonEmptyScopeContext: Boolean = + (this.scope ne outer.scope) && !this.scope.isEmpty + + /** Is this a context for typechecking an inlined body? */ + def isInlineContext: Boolean = + typer.isInstanceOf[Inliner#InlineTyper] + + /** The next outer context whose tree is a template or package definition + * Note: Currently unused + def enclTemplate: Context = { + var c = this + while (c != NoContext && !c.tree.isInstanceOf[Template[?]] && !c.tree.isInstanceOf[PackageDef[?]]) + c = c.outer + c + }*/ + + /** The context for a supercall. This context is used for elaborating + * the parents of a class and their arguments. + * The context is computed from the current class context. It has + * + * - as owner: The primary constructor of the class + * - as outer context: The context enclosing the class context + * - as scope: The parameter accessors in the class context + * + * The reasons for this peculiar choice of attributes are as follows: + * + * - The constructor must be the owner, because that's where any local methods or closures + * should go. + * - The context may not see any class members (inherited or defined), and should + * instead see definitions defined in the outer context which might be shadowed by + * such class members. That's why the outer context must be the outer context of the class. + * - At the same time the context should see the parameter accessors of the current class, + * that's why they get added to the local scope. An alternative would have been to have the + * context see the constructor parameters instead, but then we'd need a final substitution step + * from constructor parameters to class parameter accessors. + */ + def superCallContext: Context = { + val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors: _*) + superOrThisCallContext(owner.primaryConstructor, locals) + } + + /** The context for the arguments of a this(...) constructor call. + * The context is computed from the local auxiliary constructor context. + * It has + * + * - as owner: The auxiliary constructor + * - as outer context: The context enclosing the enclosing class context + * - as scope: The parameters of the auxiliary constructor. + */ + def thisCallArgContext: Context = { + val constrCtx = outersIterator.dropWhile(_.outer.owner == owner).next() + superOrThisCallContext(owner, constrCtx.scope) + .setTyperState(typerState) + .setGadt(gadt) + .fresh + .setScope(this.scope) + } + + /** The super- or this-call context with given owner and locals. */ + private def superOrThisCallContext(owner: Symbol, locals: Scope): FreshContext = { + var classCtx = outersIterator.dropWhile(!_.isClassDefContext).next() + classCtx.outer.fresh.setOwner(owner) + .setScope(locals) + .setMode(classCtx.mode) + } + + /** The context of expression `expr` seen as a member of a statement sequence */ + def exprContext(stat: Tree[? >: Untyped], exprOwner: Symbol): Context = + if (exprOwner == this.owner) this + else if (untpd.isSuperConstrCall(stat) && this.owner.isClass) superCallContext + else fresh.setOwner(exprOwner) + + /** A new context that summarizes an import statement */ + def importContext(imp: Import[?], sym: Symbol): FreshContext = + fresh.setImportInfo(ImportInfo(sym, imp.selectors, imp.expr)) + + /** Is the debug option set? */ + def debug: Boolean = base.settings.Ydebug.value + + /** Is the verbose option set? */ + def verbose: Boolean = base.settings.verbose.value + + /** Should use colors when printing? */ + def useColors: Boolean = + base.settings.color.value == "always" + + /** Is the explicit nulls option set? */ + def explicitNulls: Boolean = base.settings.YexplicitNulls.value + + /** Initialize all context fields, except typerState, which has to be set separately + * @param outer The outer context + * @param origin The context from which fields are copied + */ + private[Contexts] def init(outer: Context, origin: Context): this.type = { + _outer = outer + _period = origin.period + _mode = origin.mode + _owner = origin.owner + _tree = origin.tree + _scope = origin.scope + _gadt = origin.gadt + _searchHistory = origin.searchHistory + _source = origin.source + _moreProperties = origin.moreProperties + _store = origin.store + this + } + + def reuseIn(outer: Context): this.type = + implicitsCache = null + related = null + init(outer, outer) + + /** A fresh clone of this context embedded in this context. */ + def fresh: FreshContext = freshOver(this) + + /** A fresh clone of this context embedded in the specified `outer` context. */ + def freshOver(outer: Context): FreshContext = + util.Stats.record("Context.fresh") + FreshContext(base).init(outer, this).setTyperState(this.typerState) + + final def withOwner(owner: Symbol): Context = + if (owner ne this.owner) fresh.setOwner(owner) else this + + final def withTyperState(typerState: TyperState): Context = + if typerState ne this.typerState then fresh.setTyperState(typerState) else this + + final def withUncommittedTyperState: Context = + withTyperState(typerState.uncommittedAncestor) + + final def withProperty[T](key: Key[T], value: Option[T]): Context = + if (property(key) == value) this + else value match { + case Some(v) => fresh.setProperty(key, v) + case None => fresh.dropProperty(key) + } + + def typer: Typer = this.typeAssigner match { + case typer: Typer => typer + case _ => new Typer + } + + override def toString: String = + def iinfo(using Context) = + val info = ctx.importInfo + if (info == null) "" else i"${info.selectors}%, %" + def cinfo(using Context) = + val core = s" owner = ${ctx.owner}, scope = ${ctx.scope}, import = $iinfo" + if (ctx ne NoContext) && (ctx.implicits ne ctx.outer.implicits) then + s"$core, implicits = ${ctx.implicits}" + else + core + s"""Context( + |${outersIterator.map(ctx => cinfo(using ctx)).mkString("\n\n")})""".stripMargin + + def settings: ScalaSettings = base.settings + def definitions: Definitions = base.definitions + def platform: Platform = base.platform + def pendingUnderlying: util.HashSet[Type] = base.pendingUnderlying + def uniqueNamedTypes: Uniques.NamedTypeUniques = base.uniqueNamedTypes + def uniques: util.WeakHashSet[Type] = base.uniques + + def initialize()(using Context): Unit = base.initialize() + } + + /** A condensed context provides only a small memory footprint over + * a Context base, and therefore can be stored without problems in + * long-lived objects. + abstract class CondensedContext extends Context { + override def condensed = this + } + */ + + /** A fresh context allows selective modification + * of its attributes using the with... methods. + */ + class FreshContext(base: ContextBase) extends Context(base) { + def setPeriod(period: Period): this.type = + util.Stats.record("Context.setPeriod") + this.period = period + this + def setMode(mode: Mode): this.type = + util.Stats.record("Context.setMode") + this.mode = mode + this + def setOwner(owner: Symbol): this.type = + util.Stats.record("Context.setOwner") + assert(owner != NoSymbol) + this.owner = owner + this + def setTree(tree: Tree[? >: Untyped]): this.type = + util.Stats.record("Context.setTree") + this.tree = tree + this + def setScope(scope: Scope): this.type = { this.scope = scope; this } + def setNewScope: this.type = + util.Stats.record("Context.setScope") + this.scope = newScope + this + def setTyperState(typerState: TyperState): this.type = { this.typerState = typerState; this } + def setNewTyperState(): this.type = setTyperState(typerState.fresh(committable = true)) + def setExploreTyperState(): this.type = setTyperState(typerState.fresh(committable = false)) + def setReporter(reporter: Reporter): this.type = setTyperState(typerState.fresh().setReporter(reporter)) + def setTyper(typer: Typer): this.type = { this.scope = typer.scope; setTypeAssigner(typer) } + def setGadt(gadt: GadtConstraint): this.type = + util.Stats.record("Context.setGadt") + this.gadt = gadt + this + def setFreshGADTBounds: this.type = setGadt(gadt.fresh) + def setSearchHistory(searchHistory: SearchHistory): this.type = + util.Stats.record("Context.setSearchHistory") + this.searchHistory = searchHistory + this + def setSource(source: SourceFile): this.type = + util.Stats.record("Context.setSource") + this.source = source + this + private def setMoreProperties(moreProperties: Map[Key[Any], Any]): this.type = + util.Stats.record("Context.setMoreProperties") + this.moreProperties = moreProperties + this + private def setStore(store: Store): this.type = + util.Stats.record("Context.setStore") + this.store = store + this + def setImplicits(implicits: ContextualImplicits): this.type = { this.implicitsCache = implicits; this } + + def setCompilationUnit(compilationUnit: CompilationUnit): this.type = { + setSource(compilationUnit.source) + updateStore(compilationUnitLoc, compilationUnit) + } + + def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) + def setSbtCallback(callback: AnalysisCallback): this.type = updateStore(sbtCallbackLoc, callback) + def setPrinterFn(printer: Context -> Printer): this.type = updateStore(printerFnLoc, printer) + def setSettings(settingsState: SettingsState): this.type = updateStore(settingsStateLoc, settingsState) + def setRun(run: Run | Null): this.type = updateStore(runLoc, run) + def setProfiler(profiler: Profiler): this.type = updateStore(profilerLoc, profiler) + def setNotNullInfos(notNullInfos: List[NotNullInfo]): this.type = updateStore(notNullInfosLoc, notNullInfos) + def setImportInfo(importInfo: ImportInfo): this.type = + importInfo.mentionsFeature(nme.unsafeNulls) match + case Some(true) => + setMode(this.mode &~ Mode.SafeNulls) + case Some(false) if ctx.settings.YexplicitNulls.value => + setMode(this.mode | Mode.SafeNulls) + case _ => + updateStore(importInfoLoc, importInfo) + def setTypeAssigner(typeAssigner: TypeAssigner): this.type = updateStore(typeAssignerLoc, typeAssigner) + + def setProperty[T](key: Key[T], value: T): this.type = + setMoreProperties(moreProperties.updated(key, value)) + + def dropProperty(key: Key[?]): this.type = + setMoreProperties(moreProperties - key) + + def addLocation[T](initial: T): Store.Location[T] = { + val (loc, store1) = store.newLocation(initial) + setStore(store1) + loc + } + + def addLocation[T](): Store.Location[T] = { + val (loc, store1) = store.newLocation[T]() + setStore(store1) + loc + } + + def updateStore[T](loc: Store.Location[T], value: T): this.type = + setStore(store.updated(loc, value)) + + def setPhase(pid: PhaseId): this.type = setPeriod(Period(runId, pid)) + def setPhase(phase: Phase): this.type = setPeriod(Period(runId, phase.start, phase.end)) + + def setSetting[T](setting: Setting[T], value: T): this.type = + setSettings(setting.updateIn(settingsState, value)) + + def setDebug: this.type = setSetting(base.settings.Ydebug, true) + } + + given ops: AnyRef with + extension (c: Context) + def addNotNullInfo(info: NotNullInfo) = + c.withNotNullInfos(c.notNullInfos.extendWith(info)) + + def addNotNullRefs(refs: Set[TermRef]) = + c.addNotNullInfo(NotNullInfo(refs, Set())) + + def withNotNullInfos(infos: List[NotNullInfo]): Context = + if c.notNullInfos eq infos then c else c.fresh.setNotNullInfos(infos) + + def relaxedOverrideContext: Context = + c.withModeBits(c.mode &~ Mode.SafeNulls | Mode.RelaxedOverriding) + end ops + + // TODO: Fix issue when converting ModeChanges and FreshModeChanges to extension givens + extension (c: Context) { + final def withModeBits(mode: Mode): Context = + if (mode != c.mode) c.fresh.setMode(mode) else c + + final def addMode(mode: Mode): Context = withModeBits(c.mode | mode) + final def retractMode(mode: Mode): Context = withModeBits(c.mode &~ mode) + } + + extension (c: FreshContext) { + final def addMode(mode: Mode): c.type = c.setMode(c.mode | mode) + final def retractMode(mode: Mode): c.type = c.setMode(c.mode &~ mode) + } + + private def exploreCtx(using Context): FreshContext = + util.Stats.record("explore") + val base = ctx.base + import base._ + val nestedCtx = + if exploresInUse < exploreContexts.size then + exploreContexts(exploresInUse).reuseIn(ctx) + else + val ts = TyperState() + .setReporter(ExploringReporter()) + .setCommittable(false) + val c = FreshContext(ctx.base).init(ctx, ctx).setTyperState(ts) + exploreContexts += c + c + exploresInUse += 1 + val nestedTS = nestedCtx.typerState + nestedTS.init(ctx.typerState, ctx.typerState.constraint) + nestedCtx + + private def wrapUpExplore(ectx: Context) = + ectx.reporter.asInstanceOf[ExploringReporter].reset() + ectx.base.exploresInUse -= 1 + + inline def explore[T](inline op: Context ?=> T)(using Context): T = + val ectx = exploreCtx + try op(using ectx) finally wrapUpExplore(ectx) + + inline def exploreInFreshCtx[T](inline op: FreshContext ?=> T)(using Context): T = + val ectx = exploreCtx + try op(using ectx) finally wrapUpExplore(ectx) + + private def changeOwnerCtx(owner: Symbol)(using Context): Context = + val base = ctx.base + import base._ + val nestedCtx = + if changeOwnersInUse < changeOwnerContexts.size then + changeOwnerContexts(changeOwnersInUse).reuseIn(ctx) + else + val c = FreshContext(ctx.base).init(ctx, ctx) + changeOwnerContexts += c + c + changeOwnersInUse += 1 + nestedCtx.setOwner(owner).setTyperState(ctx.typerState) + + /** Run `op` in current context, with a mode is temporarily set as specified. + */ + inline def runWithOwner[T](owner: Symbol)(inline op: Context ?=> T)(using Context): T = + if Config.reuseOwnerContexts then + try op(using changeOwnerCtx(owner)) + finally ctx.base.changeOwnersInUse -= 1 + else + op(using ctx.fresh.setOwner(owner)) + + /** The type comparer of the kind created by `maker` to be used. + * This is the currently active type comparer CMP if + * - CMP is associated with the current context, and + * - CMP is of the kind created by maker or maker creates a plain type comparer. + * Note: plain TypeComparers always take on the kind of the outer comparer if they are in the same context. + * In other words: tracking or explaining is a sticky property in the same context. + */ + private def comparer(using Context): TypeComparer = + util.Stats.record("comparing") + val base = ctx.base + if base.comparersInUse > 0 + && (base.comparers(base.comparersInUse - 1).comparerContext eq ctx) + then + base.comparers(base.comparersInUse - 1).currentInstance + else + val result = + if base.comparersInUse < base.comparers.size then + base.comparers(base.comparersInUse) + else + val result = TypeComparer(ctx) + base.comparers += result + result + base.comparersInUse += 1 + result.init(ctx) + result + + inline def comparing[T](inline op: TypeComparer => T)(using Context): T = + util.Stats.record("comparing") + val saved = ctx.base.comparersInUse + try op(comparer) + finally ctx.base.comparersInUse = saved + end comparing + + /** A class defining the initial context with given context base + * and set of possible settings. + */ + private class InitialContext(base: ContextBase, settingsGroup: SettingGroup) extends FreshContext(base) { + outer = NoContext + period = InitialPeriod + mode = Mode.None + typerState = TyperState.initialState() + owner = NoSymbol + tree = untpd.EmptyTree + moreProperties = Map(MessageLimiter -> DefaultMessageLimiter()) + scope = EmptyScope + source = NoSource + store = initialStore + .updated(settingsStateLoc, settingsGroup.defaultState) + .updated(notNullInfosLoc, Nil) + .updated(compilationUnitLoc, NoCompilationUnit) + searchHistory = new SearchRoot + gadt = GadtConstraint.empty + } + + @sharable object NoContext extends Context((null: ContextBase | Null).uncheckedNN) { + source = NoSource + override val implicits: ContextualImplicits = new ContextualImplicits(Nil, null, false)(this: @unchecked) + } + + /** A context base defines state and associated methods that exist once per + * compiler run. + */ + class ContextBase extends ContextState + with Phases.PhasesBase + with Plugins { + + /** The applicable settings */ + val settings: ScalaSettings = new ScalaSettings + + /** The initial context */ + val initialCtx: Context = new InitialContext(this, settings) + + /** The platform, initialized by `initPlatform()`. */ + private var _platform: Platform | Null = _ + + /** The platform */ + def platform: Platform = { + val p = _platform + if p == null then + throw new IllegalStateException( + "initialize() must be called before accessing platform") + p + } + + protected def newPlatform(using Context): Platform = + if (settings.scalajs.value) new SJSPlatform + else new JavaPlatform + + /** The loader that loads the members of _root_ */ + def rootLoader(root: TermSymbol)(using Context): SymbolLoader = platform.rootLoader(root) + + /** The standard definitions */ + val definitions: Definitions = new Definitions + + // Set up some phases to get started */ + usePhases(List(SomePhase)) + + /** Initializes the `ContextBase` with a starting context. + * This initializes the `platform` and the `definitions`. + */ + def initialize()(using Context): Unit = { + _platform = newPlatform + definitions.init() + } + + def fusedContaining(p: Phase): Phase = + allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase) + } + + /** The essential mutable state of a context base, collected into a common class */ + class ContextState { + // Symbols state + + /** Counter for unique symbol ids */ + private var _nextSymId: Int = 0 + def nextSymId: Int = { _nextSymId += 1; _nextSymId } + + /** Sources and Files that were loaded */ + val sources: util.HashMap[AbstractFile, SourceFile] = util.HashMap[AbstractFile, SourceFile]() + val files: util.HashMap[TermName, AbstractFile] = util.HashMap() + + // Types state + /** A table for hash consing unique types */ + private[core] val uniques: Uniques = Uniques() + + /** A table for hash consing unique applied types */ + private[dotc] val uniqueAppliedTypes: AppliedUniques = AppliedUniques() + + /** A table for hash consing unique named types */ + private[core] val uniqueNamedTypes: NamedTypeUniques = NamedTypeUniques() + + var emptyTypeBounds: TypeBounds | Null = null + var emptyWildcardBounds: WildcardType | Null = null + + /** Number of findMember calls on stack */ + private[core] var findMemberCount: Int = 0 + + /** List of names which have a findMemberCall on stack, + * after Config.LogPendingFindMemberThreshold is reached. + */ + private[core] var pendingMemberSearches: List[Name] = Nil + + /** The number of recursive invocation of underlying on a NamedType + * during a controlled operation. + */ + private[core] var underlyingRecursions: Int = 0 + + /** The set of named types on which a currently active invocation + * of underlying during a controlled operation exists. */ + private[core] val pendingUnderlying: util.HashSet[Type] = util.HashSet[Type]() + + /** A map from ErrorType to associated message. We use this map + * instead of storing messages directly in ErrorTypes in order + * to avoid space leaks - the message usually captures a context. + */ + private[core] val errorTypeMsg: mutable.Map[Types.ErrorType, Message] = mutable.Map() + + // Phases state + + private[core] var phasesPlan: List[List[Phase]] = _ + + /** Phases by id */ + private[dotc] var phases: Array[Phase] = _ + + /** Phases with consecutive Transforms grouped into a single phase, Empty array if fusion is disabled */ + private[core] var fusedPhases: Array[Phase] = Array.empty[Phase] + + /** Next denotation transformer id */ + private[core] var nextDenotTransformerId: Array[Int] = _ + + private[core] var denotTransformers: Array[DenotTransformer] = _ + + /** Flag to suppress inlining, set after overflow */ + private[dotc] var stopInlining: Boolean = false + + /** A variable that records that some error was reported in a globally committable context. + * The error will not necessarlily be emitted, since it could still be that + * the enclosing context will be aborted. The variable is used as a smoke test + * to turn off assertions that might be wrong if the program is erroneous. To + * just test for `ctx.reporter.errorsReported` is not always enough, since it + * could be that the context in which the assertion is tested is a completer context + * that's different from the context where the error was reported. See i13218.scala + * for a test. + */ + private[dotc] var errorsToBeReported = false + + // Reporters state + private[dotc] var indent: Int = 0 + + protected[dotc] val indentTab: String = " " + + private[Contexts] val exploreContexts = new mutable.ArrayBuffer[FreshContext] + private[Contexts] var exploresInUse: Int = 0 + + private[Contexts] val changeOwnerContexts = new mutable.ArrayBuffer[FreshContext] + private[Contexts] var changeOwnersInUse: Int = 0 + + private[Contexts] val comparers = new mutable.ArrayBuffer[TypeComparer] + private[Contexts] var comparersInUse: Int = 0 + + private var charArray = new Array[Char](256) + + private[core] val reusableDataReader = ReusableInstance(new ReusableDataReader()) + + private[dotc] var wConfCache: (List[String], WConf) = _ + + def sharedCharArray(len: Int): Array[Char] = + while len > charArray.length do + charArray = new Array[Char](charArray.length * 2) + charArray + + def reset(): Unit = + uniques.clear() + uniqueAppliedTypes.clear() + uniqueNamedTypes.clear() + emptyTypeBounds = null + emptyWildcardBounds = null + errorsToBeReported = false + errorTypeMsg.clear() + sources.clear() + files.clear() + comparers.clear() // forces re-evaluation of top and bottom classes in TypeComparer + + // Test that access is single threaded + + /** The thread on which `checkSingleThreaded was invoked last */ + @sharable private var thread: Thread | Null = null + + /** Check that we are on the same thread as before */ + def checkSingleThreaded(): Unit = + if (thread == null) thread = Thread.currentThread() + else assert(thread == Thread.currentThread(), "illegal multithreaded access to ContextBase") + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala new file mode 100644 index 000000000000..679e22b48c9e --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala @@ -0,0 +1,312 @@ +package dotty.tools +package dotc +package core + +import scala.annotation.tailrec +import scala.collection.mutable.ListBuffer +import scala.util.control.NonFatal + +import Contexts._, Names._, Phases._, Symbols._ +import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._ +import transform.MegaPhase +import reporting.{Message, NoExplanation} +import language.experimental.pureFunctions + +/** This object provides useful implicit decorators for types defined elsewhere */ +object Decorators { + + /** Extension methods for toType/TermName methods on PreNames. + */ + extension (pn: PreName) + def toTermName: TermName = pn match + case s: String => termName(s) + case n: Name => n.toTermName + def toTypeName: TypeName = pn match + case s: String => typeName(s) + case n: Name => n.toTypeName + + extension (s: String) + def splitWhere(f: Char => Boolean, doDropIndex: Boolean): Option[(String, String)] = + def splitAt(idx: Int, doDropIndex: Boolean): Option[(String, String)] = + if (idx == -1) None + else Some((s.take(idx), s.drop(if (doDropIndex) idx + 1 else idx))) + splitAt(s.indexWhere(f), doDropIndex) + + /** Create a term name from a string slice, using a common buffer. + * This avoids some allocation relative to `termName(s)` + */ + def sliceToTermName(start: Int, end: Int)(using Context): SimpleName = + val len = end - start + val chars = ctx.base.sharedCharArray(len) + s.getChars(start, end, chars, 0) + termName(chars, 0, len) + + def sliceToTypeName(start: Int, end: Int)(using Context): TypeName = + sliceToTermName(start, end).toTypeName + + def concat(name: Name)(using Context): SimpleName = name match + case name: SimpleName => + val len = s.length + name.length + var chars = ctx.base.sharedCharArray(len) + s.getChars(0, s.length, chars, 0) + if name.length != 0 then name.getChars(0, name.length, chars, s.length) + termName(chars, 0, len) + case name: TypeName => s.concat(name.toTermName) + case _ => termName(s.concat(name.toString).nn) + + def indented(width: Int): String = + val padding = " " * width + padding + s.replace("\n", "\n" + padding) + end extension + + extension (str: -> String) + def toMessage: Message = reporting.NoExplanation(str) + + /** Implements a findSymbol method on iterators of Symbols that + * works like find but avoids Option, replacing None with NoSymbol. + */ + extension (it: Iterator[Symbol]) + final def findSymbol(p: Symbol => Boolean): Symbol = { + while (it.hasNext) { + val sym = it.next() + if (p(sym)) return sym + } + NoSymbol + } + + inline val MaxFilterRecursions = 10 + + /** Implements filterConserve, zipWithConserve methods + * on lists that avoid duplication of list nodes where feasible. + */ + implicit class ListDecorator[T](val xs: List[T]) extends AnyVal { + + final def mapconserve[U](f: T => U): List[U] = { + @tailrec + def loop(mapped: ListBuffer[U] | Null, unchanged: List[U], pending: List[T]): List[U] = + if (pending.isEmpty) + if (mapped == null) unchanged + else mapped.prependToList(unchanged) + else { + val head0 = pending.head + val head1 = f(head0) + + if (head1.asInstanceOf[AnyRef] eq head0.asInstanceOf[AnyRef]) + loop(mapped, unchanged, pending.tail) + else { + val b = if (mapped == null) new ListBuffer[U] else mapped + var xc = unchanged + while (xc ne pending) { + b += xc.head + xc = xc.tail + } + b += head1 + val tail0 = pending.tail + loop(b, tail0.asInstanceOf[List[U]], tail0) + } + } + loop(null, xs.asInstanceOf[List[U]], xs) + } + + /** Like `xs filter p` but returns list `xs` itself - instead of a copy - + * if `p` is true for all elements. + */ + def filterConserve(p: T => Boolean): List[T] = + + def addAll(buf: ListBuffer[T], from: List[T], until: List[T]): ListBuffer[T] = + if from eq until then buf else addAll(buf += from.head, from.tail, until) + + def loopWithBuffer(buf: ListBuffer[T], xs: List[T]): List[T] = xs match + case x :: xs1 => + if p(x) then buf += x + loopWithBuffer(buf, xs1) + case nil => buf.toList + + def loop(keep: List[T], explore: List[T], keepCount: Int, recCount: Int): List[T] = + explore match + case x :: rest => + if p(x) then + loop(keep, rest, keepCount + 1, recCount) + else if keepCount <= 3 && recCount <= MaxFilterRecursions then + val rest1 = loop(rest, rest, 0, recCount + 1) + keepCount match + case 0 => rest1 + case 1 => keep.head :: rest1 + case 2 => keep.head :: keep.tail.head :: rest1 + case 3 => val tl = keep.tail; keep.head :: tl.head :: tl.tail.head :: rest1 + else + loopWithBuffer(addAll(new ListBuffer[T], keep, explore), rest) + case nil => + keep + + loop(xs, xs, 0, 0) + end filterConserve + + /** Like `xs.lazyZip(ys).map(f)`, but returns list `xs` itself + * - instead of a copy - if function `f` maps all elements of + * `xs` to themselves. Also, it is required that `ys` is at least + * as long as `xs`. + */ + def zipWithConserve[U, V <: T](ys: List[U])(f: (T, U) => V): List[V] = + if (xs.isEmpty || ys.isEmpty) Nil + else { + val x1 = f(xs.head, ys.head) + val xs1 = xs.tail.zipWithConserve(ys.tail)(f) + if (x1.asInstanceOf[AnyRef] eq xs.head.asInstanceOf[AnyRef]) && (xs1 eq xs.tail) + then xs.asInstanceOf[List[V]] + else x1 :: xs1 + } + + /** Like `xs.lazyZip(xs.indices).map(f)`, but returns list `xs` itself + * - instead of a copy - if function `f` maps all elements of + * `xs` to themselves. + */ + def mapWithIndexConserve[U <: T](f: (T, Int) => U): List[U] = + + @tailrec + def addAll(buf: ListBuffer[T], from: List[T], until: List[T]): ListBuffer[T] = + if from eq until then buf else addAll(buf += from.head, from.tail, until) + + @tailrec + def loopWithBuffer(buf: ListBuffer[U], explore: List[T], idx: Int): List[U] = explore match + case Nil => buf.toList + case t :: rest => loopWithBuffer(buf += f(t, idx), rest, idx + 1) + + @tailrec + def loop(keep: List[T], explore: List[T], idx: Int): List[U] = explore match + case Nil => keep.asInstanceOf[List[U]] + case t :: rest => + val u = f(t, idx) + if u.asInstanceOf[AnyRef] eq t.asInstanceOf[AnyRef] then + loop(keep, rest, idx + 1) + else + val buf = addAll(new ListBuffer[T], keep, explore).asInstanceOf[ListBuffer[U]] + loopWithBuffer(buf += u, rest, idx + 1) + + loop(xs, xs, 0) + end mapWithIndexConserve + + /** True if two lists have the same length. Since calling length on linear sequences + * is Θ(n), it is an inadvisable way to test length equality. This method is Θ(n min m). + */ + final def hasSameLengthAs[U](ys: List[U]): Boolean = { + @tailrec def loop(xs: List[T], ys: List[U]): Boolean = + if (xs.isEmpty) ys.isEmpty + else ys.nonEmpty && loop(xs.tail, ys.tail) + loop(xs, ys) + } + + @tailrec final def eqElements(ys: List[AnyRef]): Boolean = xs match { + case x :: _ => + ys match { + case y :: _ => + x.asInstanceOf[AnyRef].eq(y) && + xs.tail.eqElements(ys.tail) + case _ => false + } + case nil => ys.isEmpty + } + + /** Union on lists seen as sets */ + def | (ys: List[T]): List[T] = xs ::: (ys filterNot (xs contains _)) + + /** Intersection on lists seen as sets */ + def & (ys: List[T]): List[T] = xs filter (ys contains _) + } + + extension [T, U](xss: List[List[T]]) + def nestedMap(f: T => U): List[List[U]] = xss match + case xs :: xss1 => xs.map(f) :: xss1.nestedMap(f) + case nil => Nil + def nestedMapConserve(f: T => U): List[List[U]] = + xss.mapconserve(_.mapconserve(f)) + def nestedZipWithConserve(yss: List[List[U]])(f: (T, U) => T): List[List[T]] = + xss.zipWithConserve(yss)((xs, ys) => xs.zipWithConserve(ys)(f)) + def nestedExists(p: T => Boolean): Boolean = xss match + case xs :: xss1 => xs.exists(p) || xss1.nestedExists(p) + case nil => false + end extension + + extension (text: Text) + def show(using Context): String = text.mkString(ctx.settings.pageWidth.value, ctx.settings.printLines.value) + + /** Test whether a list of strings representing phases contains + * a given phase. See [[config.CompilerCommand#explainAdvanced]] for the + * exact meaning of "contains" here. + */ + extension (names: List[String]) + def containsPhase(phase: Phase): Boolean = + names.nonEmpty && { + phase match { + case phase: MegaPhase => phase.miniPhases.exists(x => names.containsPhase(x)) + case _ => + names exists { name => + name == "all" || { + val strippedName = name.stripSuffix("+") + val logNextPhase = name != strippedName + phase.phaseName.startsWith(strippedName) || + (logNextPhase && phase.prev.phaseName.startsWith(strippedName)) + } + } + } + } + + extension [T](x: T) + def showing[U]( + op: WrappedResult[U] ?=> String, + printer: config.Printers.Printer = config.Printers.default)(using c: Conversion[T, U] | Null = null): T = { + // either the use of `$result` was driven by the expected type of `Shown` + // which led to the summoning of `Conversion[T, Shown]` (which we'll invoke) + // or no such conversion was found so we'll consume the result as it is instead + val obj = if c == null then x.asInstanceOf[U] else c(x) + printer.println(op(using WrappedResult(obj))) + x + } + + /** Instead of `toString` call `show` on `Showable` values, falling back to `toString` if an exception is raised. */ + def tryToShow(using Context): String = x match + case x: Showable => + try x.show + catch + case ex: CyclicReference => "... (caught cyclic reference) ..." + case NonFatal(ex) + if !ctx.mode.is(Mode.PrintShowExceptions) && !ctx.settings.YshowPrintErrors.value => + val msg = ex match { case te: TypeError => te.toMessage case _ => ex.getMessage } + s"[cannot display due to $msg, raw string = $x]" + case _ => String.valueOf(x).nn + + extension [T](x: T) + def assertingErrorsReported(using Context): T = { + assert(ctx.reporter.errorsReported) + x + } + def assertingErrorsReported(msg: => String)(using Context): T = { + assert(ctx.reporter.errorsReported, msg) + x + } + + extension [T <: AnyRef](xs: ::[T]) + def derivedCons(x1: T, xs1: List[T]) = + if (xs.head eq x1) && (xs.tail eq xs1) then xs else x1 :: xs1 + + extension (sc: StringContext) + /** General purpose string formatting */ + def i(args: Shown*)(using Context): String = + new StringFormatter(sc).assemble(args) + + /** Formatting for error messages: Like `i` but suppress follow-on + * error messages after the first one if some of their arguments are "non-sensical". + */ + def em(args: Shown*)(using Context): String = + forErrorMessages(new StringFormatter(sc).assemble(args)) + + /** Formatting with added explanations: Like `em`, but add explanations to + * give more info about type variables and to disambiguate where needed. + */ + def ex(args: Shown*)(using Context): String = + explained(new StringFormatter(sc).assemble(args)) + + extension [T <: AnyRef](arr: Array[T]) + def binarySearch(x: T | Null): Int = java.util.Arrays.binarySearch(arr.asInstanceOf[Array[Object | Null]], x) + +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala new file mode 100644 index 000000000000..e68dc9102961 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala @@ -0,0 +1,2379 @@ +package dotty.tools +package dotc +package core + +import scala.annotation.{threadUnsafe => tu} +import Types._, Contexts._, Symbols._, SymDenotations._, StdNames._, Names._, Phases._ +import Flags._, Scopes._, Decorators._, NameOps._, Periods._, NullOpsDecorator._ +import unpickleScala2.Scala2Unpickler.ensureConstructor +import scala.collection.mutable +import collection.mutable +import Denotations.{SingleDenotation, staticRef} +import util.{SimpleIdentityMap, SourceFile, NoSource} +import typer.ImportInfo.RootRef +import Comments.CommentsContext +import Comments.Comment +import util.Spans.NoSpan +import config.Feature +import Symbols.requiredModuleRef +import cc.{CapturingType, CaptureSet, EventuallyCapturingType} + +import scala.annotation.tailrec +import language.experimental.pureFunctions + +object Definitions { + + /** The maximum number of elements in a tuple or product. + * This should be removed once we go to hlists. + */ + val MaxTupleArity: Int = 22 + + /** The maximum arity N of a function type that's implemented + * as a trait `scala.FunctionN`. Functions of higher arity are possible, + * but are mapped in erasure to functions taking a single parameter of type + * Object[]. + * The limit 22 is chosen for Scala2x interop. It could be something + * else without affecting the set of programs that can be compiled. + */ + val MaxImplementedFunctionArity: Int = MaxTupleArity +} + +/** A class defining symbols and types of standard definitions + * + */ +class Definitions { + import Definitions._ + + private var initCtx: Context = _ + private given currentContext[Dummy_so_its_a_def]: Context = initCtx + + private def newPermanentSymbol[N <: Name](owner: Symbol, name: N, flags: FlagSet, info: Type) = + newSymbol(owner, name, flags | Permanent, info) + + private def newPermanentClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, infoFn: ClassSymbol => Type) = + newClassSymbol(owner, name, flags | Permanent | NoInits | Open, infoFn) + + private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef]): ClassSymbol = + enterCompleteClassSymbol(owner, name, flags, parents, newScope(owner.nestingLevel + 1)) + + private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope) = + newCompleteClassSymbol(owner, name, flags | Permanent | NoInits | Open, parents, decls).entered + + private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + scope.enter(newPermanentSymbol(cls, name, flags, TypeBounds.empty)) + + private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + enterTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope) + + private def enterSyntheticTypeParam(cls: ClassSymbol, paramFlags: FlagSet, scope: MutableScope, suffix: String = "T0") = + enterTypeParam(cls, suffix.toTypeName, paramFlags, scope) + + // NOTE: Ideally we would write `parentConstrs: => Type*` but SIP-24 is only + // implemented in Dotty and not in Scala 2. + // See . + private def enterSpecialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: -> Seq[Type]): ClassSymbol = { + val completer = new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = { + val cls = denot.asClass.classSymbol + val paramDecls = newScope + val typeParam = enterSyntheticTypeParam(cls, paramFlags, paramDecls) + def instantiate(tpe: Type) = + if (tpe.typeParams.nonEmpty) tpe.appliedTo(typeParam.typeRef) + else tpe + val parents = parentConstrs.toList map instantiate + denot.info = ClassInfo(ScalaPackageClass.thisType, cls, parents, paramDecls) + } + } + newPermanentClassSymbol(ScalaPackageClass, name, Artifact, completer).entered + } + + /** The trait FunctionN, ContextFunctionN, ErasedFunctionN or ErasedContextFunction, for some N + * @param name The name of the trait to be created + * + * FunctionN traits follow this template: + * + * trait FunctionN[-T0,...-T{N-1}, +R] extends Object { + * def apply($x0: T0, ..., $x{N_1}: T{N-1}): R + * } + * + * That is, they follow the template given for Function2..Function22 in the + * standard library, but without `tupled` and `curried` methods and without + * a `toString`. + * + * ContextFunctionN traits follow this template: + * + * trait ContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { + * def apply(using $x0: T0, ..., $x{N_1}: T{N-1}): R + * } + * + * ErasedFunctionN traits follow this template: + * + * trait ErasedFunctionN[-T0,...,-T{N-1}, +R] extends Object { + * def apply(erased $x0: T0, ..., $x{N_1}: T{N-1}): R + * } + * + * ErasedContextFunctionN traits follow this template: + * + * trait ErasedContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { + * def apply(using erased $x0: T0, ..., $x{N_1}: T{N-1}): R + * } + * + * ErasedFunctionN and ErasedContextFunctionN erase to Function0. + * + * ImpureXYZFunctionN follow this template: + * + * type ImpureXYZFunctionN[-T0,...,-T{N-1}, +R] = {*} XYZFunctionN[T0,...,T{N-1}, R] + */ + private def newFunctionNType(name: TypeName): Symbol = { + val impure = name.startsWith("Impure") + val completer = new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = { + val arity = name.functionArity + if impure then + val argParamNames = List.tabulate(arity)(tpnme.syntheticTypeParamName) + val argVariances = List.fill(arity)(Contravariant) + val underlyingName = name.asSimpleName.drop(6) + val underlyingClass = ScalaPackageVal.requiredClass(underlyingName) + denot.info = TypeAlias( + HKTypeLambda(argParamNames :+ "R".toTypeName, argVariances :+ Covariant)( + tl => List.fill(arity + 1)(TypeBounds.empty), + tl => CapturingType(underlyingClass.typeRef.appliedTo(tl.paramRefs), + CaptureSet.universal) + )) + else + val cls = denot.asClass.classSymbol + val decls = newScope + val paramNamePrefix = tpnme.scala ++ str.NAME_JOIN ++ name ++ str.EXPAND_SEPARATOR + val argParamRefs = List.tabulate(arity) { i => + enterTypeParam(cls, paramNamePrefix ++ "T" ++ (i + 1).toString, Contravariant, decls).typeRef + } + val resParamRef = enterTypeParam(cls, paramNamePrefix ++ "R", Covariant, decls).typeRef + val methodType = MethodType.companion( + isContextual = name.isContextFunction, + isImplicit = false, + isErased = name.isErasedFunction) + decls.enter(newMethod(cls, nme.apply, methodType(argParamRefs, resParamRef), Deferred)) + denot.info = + ClassInfo(ScalaPackageClass.thisType, cls, ObjectType :: Nil, decls) + } + } + if impure then + newPermanentSymbol(ScalaPackageClass, name, EmptyFlags, completer) + else + newPermanentClassSymbol(ScalaPackageClass, name, Trait | NoInits, completer) + } + + private def newMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol = + newPermanentSymbol(cls, name, flags | Method, info).asTerm + + private def enterMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol = + newMethod(cls, name, info, flags).entered + + private def enterPermanentSymbol(name: Name, info: Type, flags: FlagSet = EmptyFlags): Symbol = + val sym = newPermanentSymbol(ScalaPackageClass, name, flags, info) + ScalaPackageClass.currentPackageDecls.enter(sym) + sym + + private def enterAliasType(name: TypeName, tpe: Type, flags: FlagSet = EmptyFlags): TypeSymbol = + enterPermanentSymbol(name, TypeAlias(tpe), flags).asType + + private def enterBinaryAlias(name: TypeName, op: (Type, Type) => Type): TypeSymbol = + enterAliasType(name, + HKTypeLambda(TypeBounds.empty :: TypeBounds.empty :: Nil)( + tl => op(tl.paramRefs(0), tl.paramRefs(1)))) + + private def enterPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int, + resultTypeFn: PolyType -> Type, + flags: FlagSet = EmptyFlags, + bounds: TypeBounds = TypeBounds.empty, + useCompleter: Boolean = false) = { + val tparamNames = PolyType.syntheticParamNames(typeParamCount) + val tparamInfos = tparamNames map (_ => bounds) + def ptype = PolyType(tparamNames)(_ => tparamInfos, resultTypeFn) + val info = + if (useCompleter) + new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = + denot.info = ptype + } + else ptype + enterMethod(cls, name, info, flags) + } + + private def enterT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType -> Type, flags: FlagSet) = + enterPolyMethod(cls, name, 1, resultTypeFn, flags) + + private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef | Null] = { + val arr = new Array[TypeRef | Null](arity + 1) + for (i <- countFrom to arity) arr(i) = requiredClassRef(name + i) + arr + } + + private def completeClass(cls: ClassSymbol, ensureCtor: Boolean = true): ClassSymbol = { + if (ensureCtor) ensureConstructor(cls, cls.denot.asClass, EmptyScope) + if (cls.linkedClass.exists) cls.linkedClass.markAbsent() + cls + } + + @tu lazy val RootClass: ClassSymbol = newPackageSymbol( + NoSymbol, nme.ROOT, (root, rootcls) => ctx.base.rootLoader(root)).moduleClass.asClass + @tu lazy val RootPackage: TermSymbol = newSymbol( + NoSymbol, nme.ROOTPKG, PackageCreationFlags, TypeRef(NoPrefix, RootClass)) + + @tu lazy val EmptyPackageVal: TermSymbol = newPackageSymbol( + RootClass, nme.EMPTY_PACKAGE, (emptypkg, emptycls) => ctx.base.rootLoader(emptypkg)).entered + @tu lazy val EmptyPackageClass: ClassSymbol = EmptyPackageVal.moduleClass.asClass + + /** A package in which we can place all methods and types that are interpreted specially by the compiler */ + @tu lazy val OpsPackageVal: TermSymbol = newCompletePackageSymbol(RootClass, nme.OPS_PACKAGE).entered + @tu lazy val OpsPackageClass: ClassSymbol = OpsPackageVal.moduleClass.asClass + + @tu lazy val ScalaPackageVal: TermSymbol = requiredPackage(nme.scala) + @tu lazy val ScalaMathPackageVal: TermSymbol = requiredPackage("scala.math") + @tu lazy val ScalaPackageClass: ClassSymbol = { + val cls = ScalaPackageVal.moduleClass.asClass + cls.info.decls.openForMutations.useSynthesizer( + name => + if (name.isTypeName && name.isSyntheticFunction) newFunctionNType(name.asTypeName) + else NoSymbol) + cls + } + @tu lazy val ScalaPackageObject: Symbol = requiredModule("scala.package") + @tu lazy val ScalaRuntimePackageVal: TermSymbol = requiredPackage("scala.runtime") + @tu lazy val ScalaRuntimePackageClass: ClassSymbol = ScalaRuntimePackageVal.moduleClass.asClass + @tu lazy val JavaPackageVal: TermSymbol = requiredPackage(nme.java) + @tu lazy val JavaPackageClass: ClassSymbol = JavaPackageVal.moduleClass.asClass + @tu lazy val JavaLangPackageVal: TermSymbol = requiredPackage(jnme.JavaLang) + @tu lazy val JavaLangPackageClass: ClassSymbol = JavaLangPackageVal.moduleClass.asClass + + // fundamental modules + @tu lazy val SysPackage : Symbol = requiredModule("scala.sys.package") + @tu lazy val Sys_error: Symbol = SysPackage.moduleClass.requiredMethod(nme.error) + + @tu lazy val ScalaXmlPackageClass: Symbol = getPackageClassIfDefined("scala.xml") + + @tu lazy val CompiletimePackageClass: Symbol = requiredPackage("scala.compiletime").moduleClass + @tu lazy val Compiletime_codeOf: Symbol = CompiletimePackageClass.requiredMethod("codeOf") + @tu lazy val Compiletime_erasedValue : Symbol = CompiletimePackageClass.requiredMethod("erasedValue") + @tu lazy val Compiletime_uninitialized: Symbol = CompiletimePackageClass.requiredMethod("uninitialized") + @tu lazy val Compiletime_error : Symbol = CompiletimePackageClass.requiredMethod(nme.error) + @tu lazy val Compiletime_requireConst : Symbol = CompiletimePackageClass.requiredMethod("requireConst") + @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") + @tu lazy val Compiletime_constValueOpt: Symbol = CompiletimePackageClass.requiredMethod("constValueOpt") + @tu lazy val Compiletime_summonFrom : Symbol = CompiletimePackageClass.requiredMethod("summonFrom") + @tu lazy val Compiletime_summonInline : Symbol = CompiletimePackageClass.requiredMethod("summonInline") + @tu lazy val CompiletimeTestingPackage: Symbol = requiredPackage("scala.compiletime.testing") + @tu lazy val CompiletimeTesting_typeChecks: Symbol = CompiletimeTestingPackage.requiredMethod("typeChecks") + @tu lazy val CompiletimeTesting_typeCheckErrors: Symbol = CompiletimeTestingPackage.requiredMethod("typeCheckErrors") + @tu lazy val CompiletimeTesting_ErrorClass: ClassSymbol = requiredClass("scala.compiletime.testing.Error") + @tu lazy val CompiletimeTesting_Error: Symbol = requiredModule("scala.compiletime.testing.Error") + @tu lazy val CompiletimeTesting_Error_apply = CompiletimeTesting_Error.requiredMethod(nme.apply) + @tu lazy val CompiletimeTesting_ErrorKind: Symbol = requiredModule("scala.compiletime.testing.ErrorKind") + @tu lazy val CompiletimeTesting_ErrorKind_Parser: Symbol = CompiletimeTesting_ErrorKind.requiredMethod("Parser") + @tu lazy val CompiletimeTesting_ErrorKind_Typer: Symbol = CompiletimeTesting_ErrorKind.requiredMethod("Typer") + @tu lazy val CompiletimeOpsPackage: Symbol = requiredPackage("scala.compiletime.ops") + @tu lazy val CompiletimeOpsAnyModuleClass: Symbol = requiredModule("scala.compiletime.ops.any").moduleClass + @tu lazy val CompiletimeOpsIntModuleClass: Symbol = requiredModule("scala.compiletime.ops.int").moduleClass + @tu lazy val CompiletimeOpsLongModuleClass: Symbol = requiredModule("scala.compiletime.ops.long").moduleClass + @tu lazy val CompiletimeOpsFloatModuleClass: Symbol = requiredModule("scala.compiletime.ops.float").moduleClass + @tu lazy val CompiletimeOpsDoubleModuleClass: Symbol = requiredModule("scala.compiletime.ops.double").moduleClass + @tu lazy val CompiletimeOpsStringModuleClass: Symbol = requiredModule("scala.compiletime.ops.string").moduleClass + @tu lazy val CompiletimeOpsBooleanModuleClass: Symbol = requiredModule("scala.compiletime.ops.boolean").moduleClass + + /** Note: We cannot have same named methods defined in Object and Any (and AnyVal, for that matter) + * because after erasure the Any and AnyVal references get remapped to the Object methods + * which would result in a double binding assertion failure. + * Instead we do the following: + * + * - Have some methods exist only in Any, and remap them with the Erasure denotation + * transformer to be owned by Object. + * - Have other methods exist only in Object. + * To achieve this, we synthesize all Any and Object methods; Object methods no longer get + * loaded from a classfile. + */ + @tu lazy val AnyClass: ClassSymbol = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.Any, Abstract, Nil), ensureCtor = false) + def AnyType: TypeRef = AnyClass.typeRef + @tu lazy val MatchableClass: ClassSymbol = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.Matchable, Trait, AnyType :: Nil), ensureCtor = false) + def MatchableType: TypeRef = MatchableClass.typeRef + @tu lazy val AnyValClass: ClassSymbol = + val res = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.AnyVal, Abstract, List(AnyType, MatchableType))) + // Mark companion as absent, so that class does not get re-completed + val companion = ScalaPackageVal.info.decl(nme.AnyVal).symbol + companion.moduleClass.markAbsent() + companion.markAbsent() + res + + def AnyValType: TypeRef = AnyValClass.typeRef + + @tu lazy val Any_== : TermSymbol = enterMethod(AnyClass, nme.EQ, methOfAny(BooleanType), Final) + @tu lazy val Any_!= : TermSymbol = enterMethod(AnyClass, nme.NE, methOfAny(BooleanType), Final) + @tu lazy val Any_equals: TermSymbol = enterMethod(AnyClass, nme.equals_, methOfAny(BooleanType)) + @tu lazy val Any_hashCode: TermSymbol = enterMethod(AnyClass, nme.hashCode_, MethodType(Nil, IntType)) + @tu lazy val Any_toString: TermSymbol = enterMethod(AnyClass, nme.toString_, MethodType(Nil, StringType)) + @tu lazy val Any_## : TermSymbol = enterMethod(AnyClass, nme.HASHHASH, ExprType(IntType), Final) + @tu lazy val Any_isInstanceOf: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.isInstanceOf_, _ => BooleanType, Final) + @tu lazy val Any_asInstanceOf: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.asInstanceOf_, _.paramRefs(0), Final) + @tu lazy val Any_typeTest: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.isInstanceOfPM, _ => BooleanType, Final | SyntheticArtifact) + @tu lazy val Any_typeCast: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.asInstanceOfPM, _.paramRefs(0), Final | SyntheticArtifact | StableRealizable) + // generated by pattern matcher and explicit nulls, eliminated by erasure + + /** def getClass[A >: this.type](): Class[? <: A] */ + @tu lazy val Any_getClass: TermSymbol = + enterPolyMethod( + AnyClass, nme.getClass_, 1, + pt => MethodType(Nil, ClassClass.typeRef.appliedTo(TypeBounds.upper(pt.paramRefs(0)))), + Final, + bounds = TypeBounds.lower(AnyClass.thisType)) + + def AnyMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, + Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_asInstanceOf, Any_typeTest, Any_typeCast) + + @tu lazy val ObjectClass: ClassSymbol = { + val cls = requiredClass("java.lang.Object") + assert(!cls.isCompleted, "race for completing java.lang.Object") + cls.info = ClassInfo(cls.owner.thisType, cls, List(AnyType, MatchableType), newScope) + cls.setFlag(NoInits | JavaDefined) + + ensureConstructor(cls, cls.denot.asClass, EmptyScope) + val companion = JavaLangPackageVal.info.decl(nme.Object).symbol.asTerm + NamerOps.makeConstructorCompanion(companion, cls) + cls + } + def ObjectType: TypeRef = ObjectClass.typeRef + + /** A type alias of Object used to represent any reference to Object in a Java + * signature, the secret sauce is that subtype checking treats it specially: + * + * tp <:< FromJavaObject + * + * is equivalent to: + * + * tp <:< Any + * + * This is useful to avoid usability problems when interacting with Java + * code where Object is the top type. This is safe because this type will + * only appear in signatures of Java definitions in positions where `Object` + * might appear, let's enumerate all possible cases this gives us: + * + * 1. At the top level: + * + * // A.java + * void meth1(Object arg) {} + * void meth2(T arg) {} // T implicitly extends Object + * + * // B.scala + * meth1(1) // OK + * meth2(1) // OK + * + * This is safe even though Int is not a subtype of Object, because Erasure + * will detect the mismatch and box the value type. + * + * 2. In a class type parameter: + * + * // A.java + * void meth3(scala.List arg) {} + * void meth4(scala.List arg) {} + * + * // B.scala + * meth3(List[Int](1)) // OK + * meth4(List[Int](1)) // OK + * + * At erasure, type parameters are removed and value types are boxed. + * + * 3. As the type parameter of an array: + * + * // A.java + * void meth5(Object[] arg) {} + * void meth6(T[] arg) {} + * + * // B.scala + * meth5(Array[Int](1)) // error: Array[Int] is not a subtype of Array[Object] + * meth6(Array[Int](1)) // error: Array[Int] is not a subtype of Array[T & Object] + * + * + * This is a bit more subtle: at erasure, Arrays keep their type parameter, + * and primitive Arrays are not subtypes of reference Arrays on the JVM, + * so we can't pass an Array of Int where a reference Array is expected. + * Array is invariant in Scala, so `meth5` is safe even if we use `FromJavaObject`, + * but generic Arrays are treated specially: we always add `& Object` (and here + * we mean the normal java.lang.Object type) to these types when they come from + * Java signatures (see `translateJavaArrayElementType`), this ensure that `meth6` + * is safe to use. + * + * 4. As the repeated argument of a varargs method: + * + * // A.java + * void meth7(Object... args) {} + * void meth8(T... args) {} + * + * // B.scala + * meth7(1) // OK (creates a reference array) + * meth8(1) // OK (creates a primitive array and copies it into a reference array at Erasure) + * val ai = Array[Int](1) + * meth7(ai: _*) // OK (will copy the array at Erasure) + * meth8(ai: _*) // OK (will copy the array at Erasure) + * + * Java repeated arguments are erased to arrays, so it would be safe to treat + * them in the same way: add an `& Object` to the parameter type to disallow + * passing primitives, but that would be very inconvenient as it is common to + * want to pass a primitive to an Object repeated argument (e.g. + * `String.format("foo: %d", 1)`). So instead we type them _without_ adding the + * `& Object` and let `ElimRepeated` and `Erasure` take care of doing any necessary adaptation + * (note that adapting a primitive array to a reference array requires + * copying the whole array, so this transformation only preserves semantics + * if the callee does not try to mutate the varargs array which is a reasonable + * assumption to make). + * + * + * This mechanism is similar to `ObjectTpeJavaRef` in Scala 2, except that we + * create a new symbol with its own name, this is needed because this type + * can show up in inferred types and therefore needs to be preserved when + * pickling so that unpickled trees pass `-Ycheck`. + * + * Note that by default we pretty-print `FromJavaObject` as `Object` or simply omit it + * if it's the sole upper-bound of a type parameter, use `-Yprint-debug` to explicitly + * display it. + */ + @tu lazy val FromJavaObjectSymbol: TypeSymbol = + newPermanentSymbol(OpsPackageClass, tpnme.FromJavaObject, JavaDefined, TypeAlias(ObjectType)).entered + def FromJavaObjectType: TypeRef = FromJavaObjectSymbol.typeRef + + @tu lazy val AnyRefAlias: TypeSymbol = enterAliasType(tpnme.AnyRef, ObjectType) + def AnyRefType: TypeRef = AnyRefAlias.typeRef + + @tu lazy val Object_eq: TermSymbol = enterMethod(ObjectClass, nme.eq, methOfAnyRef(BooleanType), Final) + @tu lazy val Object_ne: TermSymbol = enterMethod(ObjectClass, nme.ne, methOfAnyRef(BooleanType), Final) + @tu lazy val Object_synchronized: TermSymbol = enterPolyMethod(ObjectClass, nme.synchronized_, 1, + pt => MethodType(List(pt.paramRefs(0)), pt.paramRefs(0)), Final) + @tu lazy val Object_clone: TermSymbol = enterMethod(ObjectClass, nme.clone_, MethodType(Nil, ObjectType), Protected) + @tu lazy val Object_finalize: TermSymbol = enterMethod(ObjectClass, nme.finalize_, MethodType(Nil, UnitType), Protected) + @tu lazy val Object_notify: TermSymbol = enterMethod(ObjectClass, nme.notify_, MethodType(Nil, UnitType), Final) + @tu lazy val Object_notifyAll: TermSymbol = enterMethod(ObjectClass, nme.notifyAll_, MethodType(Nil, UnitType), Final) + @tu lazy val Object_wait: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(Nil, UnitType), Final) + @tu lazy val Object_waitL: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(LongType :: Nil, UnitType), Final) + @tu lazy val Object_waitLI: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(LongType :: IntType :: Nil, UnitType), Final) + + def ObjectMethods: List[TermSymbol] = List(Object_eq, Object_ne, Object_synchronized, Object_clone, + Object_finalize, Object_notify, Object_notifyAll, Object_wait, Object_waitL, Object_waitLI) + + /** Methods in Object and Any that do not have a side effect */ + @tu lazy val pureMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, + Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_typeTest, Object_eq, Object_ne) + + @tu lazy val AnyKindClass: ClassSymbol = { + val cls = newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyKind, AbstractFinal | Permanent, Nil, newScope(0)) + if (!ctx.settings.YnoKindPolymorphism.value) + // Enable kind-polymorphism by exposing scala.AnyKind + cls.entered + cls + } + def AnyKindType: TypeRef = AnyKindClass.typeRef + + @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) + @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) + + /** Method representing a throw */ + @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, + MethodType(List(ThrowableType), NothingType)) + + @tu lazy val NothingClass: ClassSymbol = enterCompleteClassSymbol( + ScalaPackageClass, tpnme.Nothing, AbstractFinal, List(AnyType)) + def NothingType: TypeRef = NothingClass.typeRef + @tu lazy val NullClass: ClassSymbol = { + // When explicit-nulls is enabled, Null becomes a direct subtype of Any and Matchable + val parents = if ctx.explicitNulls then AnyType :: MatchableType :: Nil else ObjectType :: Nil + enterCompleteClassSymbol(ScalaPackageClass, tpnme.Null, AbstractFinal, parents) + } + def NullType: TypeRef = NullClass.typeRef + + @tu lazy val InvokerModule = requiredModule("scala.runtime.coverage.Invoker") + @tu lazy val InvokedMethodRef = InvokerModule.requiredMethodRef("invoked") + + @tu lazy val ImplicitScrutineeTypeSym = + newPermanentSymbol(ScalaPackageClass, tpnme.IMPLICITkw, EmptyFlags, TypeBounds.empty).entered + def ImplicitScrutineeTypeRef: TypeRef = ImplicitScrutineeTypeSym.typeRef + + @tu lazy val ScalaPredefModule: Symbol = requiredModule("scala.Predef") + @tu lazy val Predef_conforms : Symbol = ScalaPredefModule.requiredMethod(nme.conforms_) + @tu lazy val Predef_classOf : Symbol = ScalaPredefModule.requiredMethod(nme.classOf) + @tu lazy val Predef_identity : Symbol = ScalaPredefModule.requiredMethod(nme.identity) + @tu lazy val Predef_undefined: Symbol = ScalaPredefModule.requiredMethod(nme.???) + @tu lazy val ScalaPredefModuleClass: ClassSymbol = ScalaPredefModule.moduleClass.asClass + + @tu lazy val SubTypeClass: ClassSymbol = requiredClass("scala.<:<") + @tu lazy val SubType_refl: Symbol = SubTypeClass.companionModule.requiredMethod(nme.refl) + + @tu lazy val DummyImplicitClass: ClassSymbol = requiredClass("scala.DummyImplicit") + + @tu lazy val ScalaRuntimeModule: Symbol = requiredModule("scala.runtime.ScalaRunTime") + def runtimeMethodRef(name: PreName): TermRef = ScalaRuntimeModule.requiredMethodRef(name) + def ScalaRuntime_drop: Symbol = runtimeMethodRef(nme.drop).symbol + @tu lazy val ScalaRuntime__hashCode: Symbol = ScalaRuntimeModule.requiredMethod(nme._hashCode_) + @tu lazy val ScalaRuntime_toArray: Symbol = ScalaRuntimeModule.requiredMethod(nme.toArray) + @tu lazy val ScalaRuntime_toObjectArray: Symbol = ScalaRuntimeModule.requiredMethod(nme.toObjectArray) + + @tu lazy val BoxesRunTimeModule: Symbol = requiredModule("scala.runtime.BoxesRunTime") + @tu lazy val BoxesRunTimeModule_externalEquals: Symbol = BoxesRunTimeModule.info.decl(nme.equals_).suchThat(toDenot(_).info.firstParamTypes.size == 2).symbol + @tu lazy val ScalaStaticsModule: Symbol = requiredModule("scala.runtime.Statics") + def staticsMethodRef(name: PreName): TermRef = ScalaStaticsModule.requiredMethodRef(name) + def staticsMethod(name: PreName): TermSymbol = ScalaStaticsModule.requiredMethod(name) + + @tu lazy val DottyArraysModule: Symbol = requiredModule("scala.runtime.Arrays") + def newGenericArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newGenericArray") + def newArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newArray") + + def getWrapVarargsArrayModule: Symbol = ScalaRuntimeModule + + // The set of all wrap{X, Ref}Array methods, where X is a value type + val WrapArrayMethods: PerRun[collection.Set[Symbol]] = new PerRun({ + val methodNames = ScalaValueTypes.map(ast.tpd.wrapArrayMethodName) `union` Set(nme.wrapRefArray) + methodNames.map(getWrapVarargsArrayModule.requiredMethod(_)) + }) + + @tu lazy val ListClass: Symbol = requiredClass("scala.collection.immutable.List") + @tu lazy val ListModule: Symbol = requiredModule("scala.collection.immutable.List") + @tu lazy val NilModule: Symbol = requiredModule("scala.collection.immutable.Nil") + @tu lazy val ConsClass: Symbol = requiredClass("scala.collection.immutable.::") + @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") + + @tu lazy val SingletonClass: ClassSymbol = + // needed as a synthetic class because Scala 2.x refers to it in classfiles + // but does not define it as an explicit class. + enterCompleteClassSymbol( + ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final, + List(AnyType), EmptyScope) + @tu lazy val SingletonType: TypeRef = SingletonClass.typeRef + + @tu lazy val CollectionSeqType: TypeRef = requiredClassRef("scala.collection.Seq") + @tu lazy val SeqType: TypeRef = requiredClassRef("scala.collection.immutable.Seq") + def SeqClass(using Context): ClassSymbol = SeqType.symbol.asClass + @tu lazy val Seq_apply : Symbol = SeqClass.requiredMethod(nme.apply) + @tu lazy val Seq_head : Symbol = SeqClass.requiredMethod(nme.head) + @tu lazy val Seq_drop : Symbol = SeqClass.requiredMethod(nme.drop) + @tu lazy val Seq_lengthCompare: Symbol = SeqClass.requiredMethod(nme.lengthCompare, List(IntType)) + @tu lazy val Seq_length : Symbol = SeqClass.requiredMethod(nme.length) + @tu lazy val Seq_toSeq : Symbol = SeqClass.requiredMethod(nme.toSeq) + @tu lazy val SeqModule: Symbol = requiredModule("scala.collection.immutable.Seq") + + + @tu lazy val StringOps: Symbol = requiredClass("scala.collection.StringOps") + @tu lazy val StringOps_format: Symbol = StringOps.requiredMethod(nme.format) + + @tu lazy val ArrayType: TypeRef = requiredClassRef("scala.Array") + def ArrayClass(using Context): ClassSymbol = ArrayType.symbol.asClass + @tu lazy val Array_apply : Symbol = ArrayClass.requiredMethod(nme.apply) + @tu lazy val Array_update : Symbol = ArrayClass.requiredMethod(nme.update) + @tu lazy val Array_length : Symbol = ArrayClass.requiredMethod(nme.length) + @tu lazy val Array_clone : Symbol = ArrayClass.requiredMethod(nme.clone_) + @tu lazy val ArrayConstructor: Symbol = ArrayClass.requiredMethod(nme.CONSTRUCTOR) + + @tu lazy val ArrayModule: Symbol = requiredModule("scala.Array") + def ArrayModuleClass: Symbol = ArrayModule.moduleClass + + @tu lazy val IArrayModule: Symbol = requiredModule("scala.IArray") + def IArrayModuleClass: Symbol = IArrayModule.moduleClass + + @tu lazy val UnitType: TypeRef = valueTypeRef("scala.Unit", java.lang.Void.TYPE, UnitEnc, nme.specializedTypeNames.Void) + def UnitClass(using Context): ClassSymbol = UnitType.symbol.asClass + def UnitModuleClass(using Context): Symbol = UnitType.symbol.asClass.linkedClass + @tu lazy val BooleanType: TypeRef = valueTypeRef("scala.Boolean", java.lang.Boolean.TYPE, BooleanEnc, nme.specializedTypeNames.Boolean) + def BooleanClass(using Context): ClassSymbol = BooleanType.symbol.asClass + @tu lazy val Boolean_! : Symbol = BooleanClass.requiredMethod(nme.UNARY_!) + @tu lazy val Boolean_&& : Symbol = BooleanClass.requiredMethod(nme.ZAND) // ### harmonize required... calls + @tu lazy val Boolean_|| : Symbol = BooleanClass.requiredMethod(nme.ZOR) + @tu lazy val Boolean_== : Symbol = + BooleanClass.info.member(nme.EQ).suchThat(_.info.firstParamTypes match { + case List(pt) => pt.isRef(BooleanClass) + case _ => false + }).symbol + @tu lazy val Boolean_!= : Symbol = + BooleanClass.info.member(nme.NE).suchThat(_.info.firstParamTypes match { + case List(pt) => pt.isRef(BooleanClass) + case _ => false + }).symbol + + @tu lazy val ByteType: TypeRef = valueTypeRef("scala.Byte", java.lang.Byte.TYPE, ByteEnc, nme.specializedTypeNames.Byte) + def ByteClass(using Context): ClassSymbol = ByteType.symbol.asClass + @tu lazy val ShortType: TypeRef = valueTypeRef("scala.Short", java.lang.Short.TYPE, ShortEnc, nme.specializedTypeNames.Short) + def ShortClass(using Context): ClassSymbol = ShortType.symbol.asClass + @tu lazy val CharType: TypeRef = valueTypeRef("scala.Char", java.lang.Character.TYPE, CharEnc, nme.specializedTypeNames.Char) + def CharClass(using Context): ClassSymbol = CharType.symbol.asClass + @tu lazy val IntType: TypeRef = valueTypeRef("scala.Int", java.lang.Integer.TYPE, IntEnc, nme.specializedTypeNames.Int) + def IntClass(using Context): ClassSymbol = IntType.symbol.asClass + @tu lazy val Int_- : Symbol = IntClass.requiredMethod(nme.MINUS, List(IntType)) + @tu lazy val Int_+ : Symbol = IntClass.requiredMethod(nme.PLUS, List(IntType)) + @tu lazy val Int_/ : Symbol = IntClass.requiredMethod(nme.DIV, List(IntType)) + @tu lazy val Int_* : Symbol = IntClass.requiredMethod(nme.MUL, List(IntType)) + @tu lazy val Int_== : Symbol = IntClass.requiredMethod(nme.EQ, List(IntType)) + @tu lazy val Int_>= : Symbol = IntClass.requiredMethod(nme.GE, List(IntType)) + @tu lazy val Int_<= : Symbol = IntClass.requiredMethod(nme.LE, List(IntType)) + @tu lazy val LongType: TypeRef = valueTypeRef("scala.Long", java.lang.Long.TYPE, LongEnc, nme.specializedTypeNames.Long) + def LongClass(using Context): ClassSymbol = LongType.symbol.asClass + @tu lazy val Long_+ : Symbol = LongClass.requiredMethod(nme.PLUS, List(LongType)) + @tu lazy val Long_* : Symbol = LongClass.requiredMethod(nme.MUL, List(LongType)) + @tu lazy val Long_/ : Symbol = LongClass.requiredMethod(nme.DIV, List(LongType)) + + @tu lazy val FloatType: TypeRef = valueTypeRef("scala.Float", java.lang.Float.TYPE, FloatEnc, nme.specializedTypeNames.Float) + def FloatClass(using Context): ClassSymbol = FloatType.symbol.asClass + @tu lazy val DoubleType: TypeRef = valueTypeRef("scala.Double", java.lang.Double.TYPE, DoubleEnc, nme.specializedTypeNames.Double) + def DoubleClass(using Context): ClassSymbol = DoubleType.symbol.asClass + + @tu lazy val BoxedUnitClass: ClassSymbol = requiredClass("scala.runtime.BoxedUnit") + def BoxedUnit_UNIT(using Context): TermSymbol = BoxedUnitClass.linkedClass.requiredValue("UNIT") + def BoxedUnit_TYPE(using Context): TermSymbol = BoxedUnitClass.linkedClass.requiredValue("TYPE") + + @tu lazy val BoxedBooleanClass: ClassSymbol = requiredClass("java.lang.Boolean") + @tu lazy val BoxedByteClass : ClassSymbol = requiredClass("java.lang.Byte") + @tu lazy val BoxedShortClass : ClassSymbol = requiredClass("java.lang.Short") + @tu lazy val BoxedCharClass : ClassSymbol = requiredClass("java.lang.Character") + @tu lazy val BoxedIntClass : ClassSymbol = requiredClass("java.lang.Integer") + @tu lazy val BoxedLongClass : ClassSymbol = requiredClass("java.lang.Long") + @tu lazy val BoxedFloatClass : ClassSymbol = requiredClass("java.lang.Float") + @tu lazy val BoxedDoubleClass : ClassSymbol = requiredClass("java.lang.Double") + + @tu lazy val BoxedBooleanModule: TermSymbol = requiredModule("java.lang.Boolean") + @tu lazy val BoxedByteModule : TermSymbol = requiredModule("java.lang.Byte") + @tu lazy val BoxedShortModule : TermSymbol = requiredModule("java.lang.Short") + @tu lazy val BoxedCharModule : TermSymbol = requiredModule("java.lang.Character") + @tu lazy val BoxedIntModule : TermSymbol = requiredModule("java.lang.Integer") + @tu lazy val BoxedLongModule : TermSymbol = requiredModule("java.lang.Long") + @tu lazy val BoxedFloatModule : TermSymbol = requiredModule("java.lang.Float") + @tu lazy val BoxedDoubleModule : TermSymbol = requiredModule("java.lang.Double") + @tu lazy val BoxedUnitModule : TermSymbol = requiredModule("java.lang.Void") + + @tu lazy val ByNameParamClass2x: ClassSymbol = enterSpecialPolyClass(tpnme.BYNAME_PARAM_CLASS, Covariant, Seq(AnyType)) + + @tu lazy val RepeatedParamClass: ClassSymbol = enterSpecialPolyClass(tpnme.REPEATED_PARAM_CLASS, Covariant, Seq(ObjectType, SeqType)) + + // fundamental classes + @tu lazy val StringClass: ClassSymbol = requiredClass("java.lang.String") + def StringType: Type = StringClass.typeRef + @tu lazy val StringModule: Symbol = StringClass.linkedClass + @tu lazy val String_+ : TermSymbol = enterMethod(StringClass, nme.raw.PLUS, methOfAny(StringType), Final) + @tu lazy val String_valueOf_Object: Symbol = StringModule.info.member(nme.valueOf).suchThat(_.info.firstParamTypes match { + case List(pt) => pt.isAny || pt.stripNull.isAnyRef + case _ => false + }).symbol + + @tu lazy val JavaCloneableClass: ClassSymbol = requiredClass("java.lang.Cloneable") + @tu lazy val NullPointerExceptionClass: ClassSymbol = requiredClass("java.lang.NullPointerException") + @tu lazy val IndexOutOfBoundsException: ClassSymbol = requiredClass("java.lang.IndexOutOfBoundsException") + @tu lazy val ClassClass: ClassSymbol = requiredClass("java.lang.Class") + @tu lazy val BoxedNumberClass: ClassSymbol = requiredClass("java.lang.Number") + @tu lazy val ClassCastExceptionClass: ClassSymbol = requiredClass("java.lang.ClassCastException") + @tu lazy val ClassCastExceptionClass_stringConstructor: TermSymbol = ClassCastExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { + case List(pt) => + pt.stripNull.isRef(StringClass) + case _ => false + }).symbol.asTerm + @tu lazy val ArithmeticExceptionClass: ClassSymbol = requiredClass("java.lang.ArithmeticException") + @tu lazy val ArithmeticExceptionClass_stringConstructor: TermSymbol = ArithmeticExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { + case List(pt) => + pt.stripNull.isRef(StringClass) + case _ => false + }).symbol.asTerm + + @tu lazy val JavaSerializableClass: ClassSymbol = requiredClass("java.io.Serializable") + + @tu lazy val ComparableClass: ClassSymbol = requiredClass("java.lang.Comparable") + + @tu lazy val SystemClass: ClassSymbol = requiredClass("java.lang.System") + @tu lazy val SystemModule: Symbol = SystemClass.linkedClass + + @tu lazy val NoSuchElementExceptionClass = requiredClass("java.util.NoSuchElementException") + def NoSuchElementExceptionType = NoSuchElementExceptionClass.typeRef + @tu lazy val IllegalArgumentExceptionClass = requiredClass("java.lang.IllegalArgumentException") + def IllegalArgumentExceptionType = IllegalArgumentExceptionClass.typeRef + + // in scalac modified to have Any as parent + + @tu lazy val ThrowableType: TypeRef = requiredClassRef("java.lang.Throwable") + def ThrowableClass(using Context): ClassSymbol = ThrowableType.symbol.asClass + @tu lazy val ExceptionClass: ClassSymbol = requiredClass("java.lang.Exception") + @tu lazy val RuntimeExceptionClass: ClassSymbol = requiredClass("java.lang.RuntimeException") + + @tu lazy val SerializableType: TypeRef = JavaSerializableClass.typeRef + def SerializableClass(using Context): ClassSymbol = SerializableType.symbol.asClass + + @tu lazy val JavaBigIntegerClass: ClassSymbol = requiredClass("java.math.BigInteger") + @tu lazy val JavaBigDecimalClass: ClassSymbol = requiredClass("java.math.BigDecimal") + @tu lazy val JavaCalendarClass: ClassSymbol = requiredClass("java.util.Calendar") + @tu lazy val JavaDateClass: ClassSymbol = requiredClass("java.util.Date") + @tu lazy val JavaFormattableClass: ClassSymbol = requiredClass("java.util.Formattable") + + @tu lazy val JavaEnumClass: ClassSymbol = { + val cls = requiredClass("java.lang.Enum") + // jl.Enum has a single constructor protected(name: String, ordinal: Int). + // We remove the arguments from the primary constructor, and enter + // a new constructor symbol with 2 arguments, so that both + // `X extends jl.Enum[X]` and `X extends jl.Enum[X](name, ordinal)` + // pass typer and go through jl.Enum-specific checks in RefChecks. + cls.infoOrCompleter match { + case completer: ClassfileLoader => + cls.info = new ClassfileLoader(completer.classfile) { + override def complete(root: SymDenotation)(using Context): Unit = { + super.complete(root) + val constr = cls.primaryConstructor + val noArgInfo = constr.info match { + case info: PolyType => + info.resType match { + case meth: MethodType => + info.derivedLambdaType( + resType = meth.derivedLambdaType( + paramNames = Nil, paramInfos = Nil)) + } + } + val argConstr = constr.copy().entered + constr.info = noArgInfo + constr.termRef.recomputeDenot() + } + } + cls + } + } + def JavaEnumType = JavaEnumClass.typeRef + + @tu lazy val StringBuilderClass: ClassSymbol = requiredClass("scala.collection.mutable.StringBuilder") + @tu lazy val MatchErrorClass : ClassSymbol = requiredClass("scala.MatchError") + @tu lazy val ConversionClass : ClassSymbol = requiredClass("scala.Conversion").typeRef.symbol.asClass + + @tu lazy val StringAddClass : ClassSymbol = requiredClass("scala.runtime.StringAdd") + @tu lazy val StringAdd_+ : Symbol = StringAddClass.requiredMethod(nme.raw.PLUS) + + @tu lazy val StringContextClass: ClassSymbol = requiredClass("scala.StringContext") + @tu lazy val StringContext_s : Symbol = StringContextClass.requiredMethod(nme.s) + @tu lazy val StringContext_raw: Symbol = StringContextClass.requiredMethod(nme.raw_) + @tu lazy val StringContext_f : Symbol = StringContextClass.requiredMethod(nme.f) + @tu lazy val StringContext_parts: Symbol = StringContextClass.requiredMethod(nme.parts) + @tu lazy val StringContextModule: Symbol = StringContextClass.companionModule + @tu lazy val StringContextModule_apply: Symbol = StringContextModule.requiredMethod(nme.apply) + @tu lazy val StringContextModule_standardInterpolator: Symbol = StringContextModule.requiredMethod(nme.standardInterpolator) + @tu lazy val StringContextModule_processEscapes: Symbol = StringContextModule.requiredMethod(nme.processEscapes) + + @tu lazy val PartialFunctionClass: ClassSymbol = requiredClass("scala.PartialFunction") + @tu lazy val PartialFunction_isDefinedAt: Symbol = PartialFunctionClass.requiredMethod(nme.isDefinedAt) + @tu lazy val PartialFunction_applyOrElse: Symbol = PartialFunctionClass.requiredMethod(nme.applyOrElse) + + @tu lazy val AbstractPartialFunctionClass: ClassSymbol = requiredClass("scala.runtime.AbstractPartialFunction") + @tu lazy val FunctionXXLClass: ClassSymbol = requiredClass("scala.runtime.FunctionXXL") + @tu lazy val ScalaSymbolClass: ClassSymbol = requiredClass("scala.Symbol") + @tu lazy val DynamicClass: ClassSymbol = requiredClass("scala.Dynamic") + @tu lazy val OptionClass: ClassSymbol = requiredClass("scala.Option") + @tu lazy val SomeClass: ClassSymbol = requiredClass("scala.Some") + @tu lazy val NoneModule: Symbol = requiredModule("scala.None") + + @tu lazy val EnumClass: ClassSymbol = requiredClass("scala.reflect.Enum") + @tu lazy val Enum_ordinal: Symbol = EnumClass.requiredMethod(nme.ordinal) + + @tu lazy val EnumValueSerializationProxyClass: ClassSymbol = requiredClass("scala.runtime.EnumValueSerializationProxy") + @tu lazy val EnumValueSerializationProxyConstructor: TermSymbol = + EnumValueSerializationProxyClass.requiredMethod(nme.CONSTRUCTOR, List(ClassType(TypeBounds.empty), IntType)) + + @tu lazy val ProductClass: ClassSymbol = requiredClass("scala.Product") + @tu lazy val Product_canEqual : Symbol = ProductClass.requiredMethod(nme.canEqual_) + @tu lazy val Product_productArity : Symbol = ProductClass.requiredMethod(nme.productArity) + @tu lazy val Product_productElement : Symbol = ProductClass.requiredMethod(nme.productElement) + @tu lazy val Product_productElementName: Symbol = ProductClass.requiredMethod(nme.productElementName) + @tu lazy val Product_productPrefix : Symbol = ProductClass.requiredMethod(nme.productPrefix) + + @tu lazy val IteratorClass: ClassSymbol = requiredClass("scala.collection.Iterator") + def IteratorModule(using Context): Symbol = IteratorClass.companionModule + + @tu lazy val ModuleSerializationProxyClass: ClassSymbol = requiredClass("scala.runtime.ModuleSerializationProxy") + @tu lazy val ModuleSerializationProxyConstructor: TermSymbol = + ModuleSerializationProxyClass.requiredMethod(nme.CONSTRUCTOR, List(ClassType(TypeBounds.empty))) + + @tu lazy val MirrorClass: ClassSymbol = requiredClass("scala.deriving.Mirror") + @tu lazy val Mirror_ProductClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Product") + @tu lazy val Mirror_Product_fromProduct: Symbol = Mirror_ProductClass.requiredMethod(nme.fromProduct) + @tu lazy val Mirror_SumClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Sum") + @tu lazy val Mirror_SingletonClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Singleton") + @tu lazy val Mirror_SingletonProxyClass: ClassSymbol = requiredClass("scala.deriving.Mirror.SingletonProxy") + + @tu lazy val LanguageModule: Symbol = requiredModule("scala.language") + @tu lazy val LanguageModuleClass: Symbol = LanguageModule.moduleClass.asClass + @tu lazy val LanguageExperimentalModule: Symbol = requiredModule("scala.language.experimental") + @tu lazy val LanguageDeprecatedModule: Symbol = requiredModule("scala.language.deprecated") + @tu lazy val NonLocalReturnControlClass: ClassSymbol = requiredClass("scala.runtime.NonLocalReturnControl") + @tu lazy val SelectableClass: ClassSymbol = requiredClass("scala.Selectable") + @tu lazy val WithoutPreciseParameterTypesClass: Symbol = requiredClass("scala.Selectable.WithoutPreciseParameterTypes") + + @tu lazy val ManifestClass: ClassSymbol = requiredClass("scala.reflect.Manifest") + @tu lazy val ManifestFactoryModule: Symbol = requiredModule("scala.reflect.ManifestFactory") + @tu lazy val ClassManifestFactoryModule: Symbol = requiredModule("scala.reflect.ClassManifestFactory") + @tu lazy val OptManifestClass: ClassSymbol = requiredClass("scala.reflect.OptManifest") + @tu lazy val NoManifestModule: Symbol = requiredModule("scala.reflect.NoManifest") + + @tu lazy val ReflectPackageClass: Symbol = requiredPackage("scala.reflect.package").moduleClass + @tu lazy val ClassTagClass: ClassSymbol = requiredClass("scala.reflect.ClassTag") + @tu lazy val ClassTagModule: Symbol = ClassTagClass.companionModule + @tu lazy val ClassTagModule_apply: Symbol = ClassTagModule.requiredMethod(nme.apply) + + @tu lazy val TypeTestClass: ClassSymbol = requiredClass("scala.reflect.TypeTest") + @tu lazy val TypeTest_unapply: Symbol = TypeTestClass.requiredMethod(nme.unapply) + @tu lazy val TypeTestModule_identity: Symbol = TypeTestClass.companionModule.requiredMethod(nme.identity) + + @tu lazy val QuotedExprClass: ClassSymbol = requiredClass("scala.quoted.Expr") + + @tu lazy val QuotesClass: ClassSymbol = requiredClass("scala.quoted.Quotes") + @tu lazy val Quotes_reflect: Symbol = QuotesClass.requiredValue("reflect") + @tu lazy val Quotes_reflect_asTerm: Symbol = Quotes_reflect.requiredMethod("asTerm") + @tu lazy val Quotes_reflect_Apply: Symbol = Quotes_reflect.requiredValue("Apply") + @tu lazy val Quotes_reflect_Apply_apply: Symbol = Quotes_reflect_Apply.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_TypeApply: Symbol = Quotes_reflect.requiredValue("TypeApply") + @tu lazy val Quotes_reflect_TypeApply_apply: Symbol = Quotes_reflect_TypeApply.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_Assign: Symbol = Quotes_reflect.requiredValue("Assign") + @tu lazy val Quotes_reflect_Assign_apply: Symbol = Quotes_reflect_Assign.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_Inferred: Symbol = Quotes_reflect.requiredValue("Inferred") + @tu lazy val Quotes_reflect_Inferred_apply: Symbol = Quotes_reflect_Inferred.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_Literal: Symbol = Quotes_reflect.requiredValue("Literal") + @tu lazy val Quotes_reflect_Literal_apply: Symbol = Quotes_reflect_Literal.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_TreeMethods: Symbol = Quotes_reflect.requiredMethod("TreeMethods") + @tu lazy val Quotes_reflect_TreeMethods_asExpr: Symbol = Quotes_reflect_TreeMethods.requiredMethod("asExpr") + @tu lazy val Quotes_reflect_TypeRepr: Symbol = Quotes_reflect.requiredValue("TypeRepr") + @tu lazy val Quotes_reflect_TypeRepr_of: Symbol = Quotes_reflect_TypeRepr.requiredMethod("of") + @tu lazy val Quotes_reflect_TypeRepr_typeConstructorOf: Symbol = Quotes_reflect_TypeRepr.requiredMethod("typeConstructorOf") + @tu lazy val Quotes_reflect_TypeReprMethods: Symbol = Quotes_reflect.requiredValue("TypeReprMethods") + @tu lazy val Quotes_reflect_TypeReprMethods_asType: Symbol = Quotes_reflect_TypeReprMethods.requiredMethod("asType") + @tu lazy val Quotes_reflect_TypeTreeType: Symbol = Quotes_reflect.requiredType("TypeTree") + @tu lazy val Quotes_reflect_TermType: Symbol = Quotes_reflect.requiredType("Term") + @tu lazy val Quotes_reflect_BooleanConstant: Symbol = Quotes_reflect.requiredValue("BooleanConstant") + @tu lazy val Quotes_reflect_ByteConstant: Symbol = Quotes_reflect.requiredValue("ByteConstant") + @tu lazy val Quotes_reflect_ShortConstant: Symbol = Quotes_reflect.requiredValue("ShortConstant") + @tu lazy val Quotes_reflect_IntConstant: Symbol = Quotes_reflect.requiredValue("IntConstant") + @tu lazy val Quotes_reflect_LongConstant: Symbol = Quotes_reflect.requiredValue("LongConstant") + @tu lazy val Quotes_reflect_FloatConstant: Symbol = Quotes_reflect.requiredValue("FloatConstant") + @tu lazy val Quotes_reflect_DoubleConstant: Symbol = Quotes_reflect.requiredValue("DoubleConstant") + @tu lazy val Quotes_reflect_CharConstant: Symbol = Quotes_reflect.requiredValue("CharConstant") + @tu lazy val Quotes_reflect_StringConstant: Symbol = Quotes_reflect.requiredValue("StringConstant") + @tu lazy val Quotes_reflect_UnitConstant: Symbol = Quotes_reflect.requiredValue("UnitConstant") + @tu lazy val Quotes_reflect_NullConstant: Symbol = Quotes_reflect.requiredValue("NullConstant") + @tu lazy val Quotes_reflect_ClassOfConstant: Symbol = Quotes_reflect.requiredValue("ClassOfConstant") + + + @tu lazy val QuoteUnpicklerClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteUnpickler") + @tu lazy val QuoteUnpickler_unpickleExprV2: Symbol = QuoteUnpicklerClass.requiredMethod("unpickleExprV2") + @tu lazy val QuoteUnpickler_unpickleTypeV2: Symbol = QuoteUnpicklerClass.requiredMethod("unpickleTypeV2") + + @tu lazy val QuoteMatchingClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteMatching") + @tu lazy val QuoteMatching_ExprMatch: Symbol = QuoteMatchingClass.requiredMethod("ExprMatch") + @tu lazy val QuoteMatching_TypeMatch: Symbol = QuoteMatchingClass.requiredMethod("TypeMatch") + + @tu lazy val ToExprModule: Symbol = requiredModule("scala.quoted.ToExpr") + @tu lazy val ToExprModule_BooleanToExpr: Symbol = ToExprModule.requiredMethod("BooleanToExpr") + @tu lazy val ToExprModule_ByteToExpr: Symbol = ToExprModule.requiredMethod("ByteToExpr") + @tu lazy val ToExprModule_ShortToExpr: Symbol = ToExprModule.requiredMethod("ShortToExpr") + @tu lazy val ToExprModule_IntToExpr: Symbol = ToExprModule.requiredMethod("IntToExpr") + @tu lazy val ToExprModule_LongToExpr: Symbol = ToExprModule.requiredMethod("LongToExpr") + @tu lazy val ToExprModule_FloatToExpr: Symbol = ToExprModule.requiredMethod("FloatToExpr") + @tu lazy val ToExprModule_DoubleToExpr: Symbol = ToExprModule.requiredMethod("DoubleToExpr") + @tu lazy val ToExprModule_CharToExpr: Symbol = ToExprModule.requiredMethod("CharToExpr") + @tu lazy val ToExprModule_StringToExpr: Symbol = ToExprModule.requiredMethod("StringToExpr") + + @tu lazy val QuotedRuntimeModule: Symbol = requiredModule("scala.quoted.runtime.Expr") + @tu lazy val QuotedRuntime_exprQuote : Symbol = QuotedRuntimeModule.requiredMethod("quote") + @tu lazy val QuotedRuntime_exprSplice : Symbol = QuotedRuntimeModule.requiredMethod("splice") + @tu lazy val QuotedRuntime_exprNestedSplice : Symbol = QuotedRuntimeModule.requiredMethod("nestedSplice") + + @tu lazy val QuotedRuntime_SplicedTypeAnnot: ClassSymbol = requiredClass("scala.quoted.runtime.SplicedType") + + @tu lazy val QuotedRuntimePatterns: Symbol = requiredModule("scala.quoted.runtime.Patterns") + @tu lazy val QuotedRuntimePatterns_patternHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHole") + @tu lazy val QuotedRuntimePatterns_patternHigherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHigherOrderHole") + @tu lazy val QuotedRuntimePatterns_higherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("higherOrderHole") + @tu lazy val QuotedRuntimePatterns_patternTypeAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("patternType") + @tu lazy val QuotedRuntimePatterns_fromAboveAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("fromAbove") + + @tu lazy val QuotedTypeClass: ClassSymbol = requiredClass("scala.quoted.Type") + @tu lazy val QuotedType_splice: Symbol = QuotedTypeClass.requiredType(tpnme.Underlying) + + @tu lazy val QuotedTypeModule: Symbol = QuotedTypeClass.companionModule + @tu lazy val QuotedTypeModule_of: Symbol = QuotedTypeModule.requiredMethod("of") + + @tu lazy val CanEqualClass: ClassSymbol = getClassIfDefined("scala.Eql").orElse(requiredClass("scala.CanEqual")).asClass + def CanEqual_canEqualAny(using Context): TermSymbol = + val methodName = if CanEqualClass.name == tpnme.Eql then nme.eqlAny else nme.canEqualAny + CanEqualClass.companionModule.requiredMethod(methodName) + + @tu lazy val CanThrowClass: ClassSymbol = requiredClass("scala.CanThrow") + @tu lazy val throwsAlias: Symbol = ScalaRuntimePackageVal.requiredType(tpnme.THROWS) + + @tu lazy val TypeBoxClass: ClassSymbol = requiredClass("scala.runtime.TypeBox") + @tu lazy val TypeBox_CAP: TypeSymbol = TypeBoxClass.requiredType(tpnme.CAP) + + @tu lazy val MatchCaseClass: ClassSymbol = requiredClass("scala.runtime.MatchCase") + @tu lazy val NotGivenClass: ClassSymbol = requiredClass("scala.util.NotGiven") + @tu lazy val NotGiven_value: Symbol = NotGivenClass.companionModule.requiredMethod(nme.value) + + @tu lazy val ValueOfClass: ClassSymbol = requiredClass("scala.ValueOf") + + @tu lazy val FromDigitsClass: ClassSymbol = requiredClass("scala.util.FromDigits") + @tu lazy val FromDigits_WithRadixClass: ClassSymbol = requiredClass("scala.util.FromDigits.WithRadix") + @tu lazy val FromDigits_DecimalClass: ClassSymbol = requiredClass("scala.util.FromDigits.Decimal") + @tu lazy val FromDigits_FloatingClass: ClassSymbol = requiredClass("scala.util.FromDigits.Floating") + + @tu lazy val XMLTopScopeModule: Symbol = requiredModule("scala.xml.TopScope") + + @tu lazy val MainAnnotationClass: ClassSymbol = requiredClass("scala.annotation.MainAnnotation") + @tu lazy val MainAnnotationInfo: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Info") + @tu lazy val MainAnnotationParameter: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Parameter") + @tu lazy val MainAnnotationParameterAnnotation: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.ParameterAnnotation") + @tu lazy val MainAnnotationCommand: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Command") + + @tu lazy val CommandLineParserModule: Symbol = requiredModule("scala.util.CommandLineParser") + @tu lazy val CLP_ParseError: ClassSymbol = CommandLineParserModule.requiredClass("ParseError").typeRef.symbol.asClass + @tu lazy val CLP_parseArgument: Symbol = CommandLineParserModule.requiredMethod("parseArgument") + @tu lazy val CLP_parseRemainingArguments: Symbol = CommandLineParserModule.requiredMethod("parseRemainingArguments") + @tu lazy val CLP_showError: Symbol = CommandLineParserModule.requiredMethod("showError") + + @tu lazy val TupleTypeRef: TypeRef = requiredClassRef("scala.Tuple") + def TupleClass(using Context): ClassSymbol = TupleTypeRef.symbol.asClass + @tu lazy val Tuple_cons: Symbol = TupleClass.requiredMethod("*:") + @tu lazy val EmptyTupleModule: Symbol = requiredModule("scala.EmptyTuple") + @tu lazy val NonEmptyTupleTypeRef: TypeRef = requiredClassRef("scala.NonEmptyTuple") + def NonEmptyTupleClass(using Context): ClassSymbol = NonEmptyTupleTypeRef.symbol.asClass + lazy val NonEmptyTuple_tail: Symbol = NonEmptyTupleClass.requiredMethod("tail") + @tu lazy val PairClass: ClassSymbol = requiredClass("scala.*:") + + @tu lazy val TupleXXLClass: ClassSymbol = requiredClass("scala.runtime.TupleXXL") + def TupleXXLModule(using Context): Symbol = TupleXXLClass.companionModule + + def TupleXXL_fromIterator(using Context): Symbol = TupleXXLModule.requiredMethod("fromIterator") + + @tu lazy val RuntimeTupleMirrorTypeRef: TypeRef = requiredClassRef("scala.runtime.TupleMirror") + + @tu lazy val RuntimeTuplesModule: Symbol = requiredModule("scala.runtime.Tuples") + @tu lazy val RuntimeTuplesModuleClass: Symbol = RuntimeTuplesModule.moduleClass + @tu lazy val RuntimeTuples_consIterator: Symbol = RuntimeTuplesModule.requiredMethod("consIterator") + @tu lazy val RuntimeTuples_concatIterator: Symbol = RuntimeTuplesModule.requiredMethod("concatIterator") + @tu lazy val RuntimeTuples_apply: Symbol = RuntimeTuplesModule.requiredMethod("apply") + @tu lazy val RuntimeTuples_cons: Symbol = RuntimeTuplesModule.requiredMethod("cons") + @tu lazy val RuntimeTuples_size: Symbol = RuntimeTuplesModule.requiredMethod("size") + @tu lazy val RuntimeTuples_tail: Symbol = RuntimeTuplesModule.requiredMethod("tail") + @tu lazy val RuntimeTuples_concat: Symbol = RuntimeTuplesModule.requiredMethod("concat") + @tu lazy val RuntimeTuples_toArray: Symbol = RuntimeTuplesModule.requiredMethod("toArray") + @tu lazy val RuntimeTuples_productToArray: Symbol = RuntimeTuplesModule.requiredMethod("productToArray") + @tu lazy val RuntimeTuples_isInstanceOfTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfTuple") + @tu lazy val RuntimeTuples_isInstanceOfEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfEmptyTuple") + @tu lazy val RuntimeTuples_isInstanceOfNonEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfNonEmptyTuple") + + @tu lazy val TupledFunctionTypeRef: TypeRef = requiredClassRef("scala.util.TupledFunction") + def TupledFunctionClass(using Context): ClassSymbol = TupledFunctionTypeRef.symbol.asClass + def RuntimeTupleFunctionsModule(using Context): Symbol = requiredModule("scala.runtime.TupledFunctions") + + @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") + @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("*") + @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") + @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") + @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") + @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") + + // Annotation base classes + @tu lazy val AnnotationClass: ClassSymbol = requiredClass("scala.annotation.Annotation") + @tu lazy val ClassfileAnnotationClass: ClassSymbol = requiredClass("scala.annotation.ClassfileAnnotation") + @tu lazy val StaticAnnotationClass: ClassSymbol = requiredClass("scala.annotation.StaticAnnotation") + @tu lazy val RefiningAnnotationClass: ClassSymbol = requiredClass("scala.annotation.RefiningAnnotation") + + // Annotation classes + @tu lazy val AnnotationDefaultAnnot: ClassSymbol = requiredClass("scala.annotation.internal.AnnotationDefault") + @tu lazy val BeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BeanProperty") + @tu lazy val BooleanBeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BooleanBeanProperty") + @tu lazy val BodyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Body") + @tu lazy val CapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.capability") + @tu lazy val ChildAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Child") + @tu lazy val ContextResultCountAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ContextResultCount") + @tu lazy val ProvisionalSuperClassAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ProvisionalSuperClass") + @tu lazy val DeprecatedAnnot: ClassSymbol = requiredClass("scala.deprecated") + @tu lazy val DeprecatedOverridingAnnot: ClassSymbol = requiredClass("scala.deprecatedOverriding") + @tu lazy val ImplicitAmbiguousAnnot: ClassSymbol = requiredClass("scala.annotation.implicitAmbiguous") + @tu lazy val ImplicitNotFoundAnnot: ClassSymbol = requiredClass("scala.annotation.implicitNotFound") + @tu lazy val InlineParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InlineParam") + @tu lazy val ErasedParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ErasedParam") + @tu lazy val InvariantBetweenAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InvariantBetween") + @tu lazy val MainAnnot: ClassSymbol = requiredClass("scala.main") + @tu lazy val MappedAlternativeAnnot: ClassSymbol = requiredClass("scala.annotation.internal.MappedAlternative") + @tu lazy val MigrationAnnot: ClassSymbol = requiredClass("scala.annotation.migration") + @tu lazy val NowarnAnnot: ClassSymbol = requiredClass("scala.annotation.nowarn") + @tu lazy val TransparentTraitAnnot: ClassSymbol = requiredClass("scala.annotation.transparentTrait") + @tu lazy val NativeAnnot: ClassSymbol = requiredClass("scala.native") + @tu lazy val RepeatedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Repeated") + @tu lazy val SourceFileAnnot: ClassSymbol = requiredClass("scala.annotation.internal.SourceFile") + @tu lazy val ScalaSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaSignature") + @tu lazy val ScalaLongSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaLongSignature") + @tu lazy val ScalaStrictFPAnnot: ClassSymbol = requiredClass("scala.annotation.strictfp") + @tu lazy val ScalaStaticAnnot: ClassSymbol = requiredClass("scala.annotation.static") + @tu lazy val SerialVersionUIDAnnot: ClassSymbol = requiredClass("scala.SerialVersionUID") + @tu lazy val TailrecAnnot: ClassSymbol = requiredClass("scala.annotation.tailrec") + @tu lazy val ThreadUnsafeAnnot: ClassSymbol = requiredClass("scala.annotation.threadUnsafe") + @tu lazy val ConstructorOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.constructorOnly") + @tu lazy val CompileTimeOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.compileTimeOnly") + @tu lazy val SwitchAnnot: ClassSymbol = requiredClass("scala.annotation.switch") + @tu lazy val ExperimentalAnnot: ClassSymbol = requiredClass("scala.annotation.experimental") + @tu lazy val ThrowsAnnot: ClassSymbol = requiredClass("scala.throws") + @tu lazy val TransientAnnot: ClassSymbol = requiredClass("scala.transient") + @tu lazy val UncheckedAnnot: ClassSymbol = requiredClass("scala.unchecked") + @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") + @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") + @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") + @tu lazy val WithPureFunsAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WithPureFuns") + @tu lazy val FieldMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.field") + @tu lazy val GetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.getter") + @tu lazy val ParamMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.param") + @tu lazy val SetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.setter") + @tu lazy val ShowAsInfixAnnot: ClassSymbol = requiredClass("scala.annotation.showAsInfix") + @tu lazy val FunctionalInterfaceAnnot: ClassSymbol = requiredClass("java.lang.FunctionalInterface") + @tu lazy val TargetNameAnnot: ClassSymbol = requiredClass("scala.annotation.targetName") + @tu lazy val VarargsAnnot: ClassSymbol = requiredClass("scala.annotation.varargs") + @tu lazy val SinceAnnot: ClassSymbol = requiredClass("scala.annotation.since") + @tu lazy val RequiresCapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.internal.requiresCapability") + @tu lazy val RetainsAnnot: ClassSymbol = requiredClass("scala.annotation.retains") + @tu lazy val RetainsByNameAnnot: ClassSymbol = requiredClass("scala.annotation.retainsByName") + + @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") + + // A list of meta-annotations that are relevant for fields and accessors + @tu lazy val FieldAccessorMetaAnnots: Set[Symbol] = + Set(FieldMetaAnnot, GetterMetaAnnot, ParamMetaAnnot, SetterMetaAnnot) + + // A list of annotations that are commonly used to indicate that a field/method argument or return + // type is not null. These annotations are used by the nullification logic in JavaNullInterop to + // improve the precision of type nullification. + // We don't require that any of these annotations be present in the class path, but we want to + // create Symbols for the ones that are present, so they can be checked during nullification. + @tu lazy val NotNullAnnots: List[ClassSymbol] = getClassesIfDefined( + "javax.annotation.Nonnull" :: + "javax.validation.constraints.NotNull" :: + "androidx.annotation.NonNull" :: + "android.support.annotation.NonNull" :: + "android.annotation.NonNull" :: + "com.android.annotations.NonNull" :: + "org.eclipse.jdt.annotation.NonNull" :: + "edu.umd.cs.findbugs.annotations.NonNull" :: + "org.checkerframework.checker.nullness.qual.NonNull" :: + "org.checkerframework.checker.nullness.compatqual.NonNullDecl" :: + "org.jetbrains.annotations.NotNull" :: + "org.springframework.lang.NonNull" :: + "org.springframework.lang.NonNullApi" :: + "org.springframework.lang.NonNullFields" :: + "lombok.NonNull" :: + "reactor.util.annotation.NonNull" :: + "reactor.util.annotation.NonNullApi" :: + "io.reactivex.annotations.NonNull" :: Nil) + + // convenient one-parameter method types + def methOfAny(tp: Type): MethodType = MethodType(List(AnyType), tp) + def methOfAnyVal(tp: Type): MethodType = MethodType(List(AnyValType), tp) + def methOfAnyRef(tp: Type): MethodType = MethodType(List(ObjectType), tp) + + // Derived types + + def RepeatedParamType: TypeRef = RepeatedParamClass.typeRef + + def ClassType(arg: Type)(using Context): Type = { + val ctype = ClassClass.typeRef + if (ctx.phase.erasedTypes) ctype else ctype.appliedTo(arg) + } + + /** The enumeration type, goven a value of the enumeration */ + def EnumType(sym: Symbol)(using Context): TypeRef = + // given (in java): "class A { enum E { VAL1 } }" + // - sym: the symbol of the actual enumeration value (VAL1) + // - .owner: the ModuleClassSymbol of the enumeration (object E) + // - .linkedClass: the ClassSymbol of the enumeration (class E) + sym.owner.linkedClass.typeRef + + object FunctionOf { + def apply(args: List[Type], resultType: Type, isContextual: Boolean = false, isErased: Boolean = false)(using Context): Type = + FunctionType(args.length, isContextual, isErased).appliedTo(args ::: resultType :: Nil) + def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean, Boolean)] = { + val tsym = ft.typeSymbol + if isFunctionClass(tsym) && ft.isRef(tsym) then + val targs = ft.dealias.argInfos + if (targs.isEmpty) None + else Some(targs.init, targs.last, tsym.name.isContextFunction, tsym.name.isErasedFunction) + else None + } + } + + object PartialFunctionOf { + def apply(arg: Type, result: Type)(using Context): Type = + PartialFunctionClass.typeRef.appliedTo(arg :: result :: Nil) + def unapply(pft: Type)(using Context): Option[(Type, List[Type])] = + if (pft.isRef(PartialFunctionClass)) { + val targs = pft.dealias.argInfos + if (targs.length == 2) Some((targs.head, targs.tail)) else None + } + else None + } + + object ArrayOf { + def apply(elem: Type)(using Context): Type = + if (ctx.erasedTypes) JavaArrayType(elem) + else ArrayType.appliedTo(elem :: Nil) + def unapply(tp: Type)(using Context): Option[Type] = tp.dealias match { + case AppliedType(at, arg :: Nil) if at.isRef(ArrayType.symbol) => Some(arg) + case JavaArrayType(tp) if ctx.erasedTypes => Some(tp) + case _ => None + } + } + + object MatchCase { + def apply(pat: Type, body: Type)(using Context): Type = + MatchCaseClass.typeRef.appliedTo(pat, body) + def unapply(tp: Type)(using Context): Option[(Type, Type)] = tp match { + case AppliedType(tycon, pat :: body :: Nil) if tycon.isRef(MatchCaseClass) => + Some((pat, body)) + case _ => + None + } + def isInstance(tp: Type)(using Context): Boolean = tp match { + case AppliedType(tycon: TypeRef, _) => + tycon.name == tpnme.MatchCase && // necessary pre-filter to avoid forcing symbols + tycon.isRef(MatchCaseClass) + case _ => false + } + } + + /** An extractor for multi-dimensional arrays. + * Note that this will also extract the high bound if an + * element type is a wildcard upper-bounded by an array. E.g. + * + * Array[? <: Array[? <: Number]] + * + * would match + * + * MultiArrayOf(, 2) + */ + object MultiArrayOf { + def apply(elem: Type, ndims: Int)(using Context): Type = + if (ndims == 0) elem else ArrayOf(apply(elem, ndims - 1)) + def unapply(tp: Type)(using Context): Option[(Type, Int)] = tp match { + case ArrayOf(elemtp) => + def recur(elemtp: Type): Option[(Type, Int)] = elemtp.dealias match { + case tp @ TypeBounds(lo, hi @ MultiArrayOf(finalElemTp, n)) => + Some(finalElemTp, n) + case MultiArrayOf(finalElemTp, n) => Some(finalElemTp, n + 1) + case _ => Some(elemtp, 1) + } + recur(elemtp) + case _ => + None + } + } + + /** Extractor for context function types representing by-name parameters, of the form + * `() ?=> T`. + * Under purefunctions, this becomes `() ?-> T` or `{r1, ..., rN} () ?-> T`. + */ + object ByNameFunction: + def apply(tp: Type)(using Context): Type = tp match + case tp @ EventuallyCapturingType(tp1, refs) if tp.annot.symbol == RetainsByNameAnnot => + CapturingType(apply(tp1), refs) + case _ => + defn.ContextFunction0.typeRef.appliedTo(tp :: Nil) + def unapply(tp: Type)(using Context): Option[Type] = tp match + case tp @ AppliedType(tycon, arg :: Nil) if defn.isByNameFunctionClass(tycon.typeSymbol) => + Some(arg) + case tp @ AnnotatedType(parent, _) => + unapply(parent) + case _ => + None + + final def isByNameFunctionClass(sym: Symbol): Boolean = + sym eq ContextFunction0 + + def isByNameFunction(tp: Type)(using Context): Boolean = tp match + case ByNameFunction(_) => true + case _ => false + + final def isCompiletime_S(sym: Symbol)(using Context): Boolean = + sym.name == tpnme.S && sym.owner == CompiletimeOpsIntModuleClass + + private val compiletimePackageAnyTypes: Set[Name] = Set( + tpnme.Equals, tpnme.NotEquals, tpnme.IsConst, tpnme.ToString + ) + private val compiletimePackageNumericTypes: Set[Name] = Set( + tpnme.Plus, tpnme.Minus, tpnme.Times, tpnme.Div, tpnme.Mod, + tpnme.Lt, tpnme.Gt, tpnme.Ge, tpnme.Le, + tpnme.Abs, tpnme.Negate, tpnme.Min, tpnme.Max + ) + private val compiletimePackageIntTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.ToString, // ToString is moved to ops.any and deprecated for ops.int + tpnme.NumberOfLeadingZeros, tpnme.ToLong, tpnme.ToFloat, tpnme.ToDouble, + tpnme.Xor, tpnme.BitwiseAnd, tpnme.BitwiseOr, tpnme.ASR, tpnme.LSL, tpnme.LSR + ) + private val compiletimePackageLongTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.NumberOfLeadingZeros, tpnme.ToInt, tpnme.ToFloat, tpnme.ToDouble, + tpnme.Xor, tpnme.BitwiseAnd, tpnme.BitwiseOr, tpnme.ASR, tpnme.LSL, tpnme.LSR + ) + private val compiletimePackageFloatTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.ToInt, tpnme.ToLong, tpnme.ToDouble + ) + private val compiletimePackageDoubleTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.ToInt, tpnme.ToLong, tpnme.ToFloat + ) + private val compiletimePackageBooleanTypes: Set[Name] = Set(tpnme.Not, tpnme.Xor, tpnme.And, tpnme.Or) + private val compiletimePackageStringTypes: Set[Name] = Set( + tpnme.Plus, tpnme.Length, tpnme.Substring, tpnme.Matches, tpnme.CharAt + ) + private val compiletimePackageOpTypes: Set[Name] = + Set(tpnme.S) + ++ compiletimePackageAnyTypes + ++ compiletimePackageIntTypes + ++ compiletimePackageLongTypes + ++ compiletimePackageFloatTypes + ++ compiletimePackageDoubleTypes + ++ compiletimePackageBooleanTypes + ++ compiletimePackageStringTypes + + final def isCompiletimeAppliedType(sym: Symbol)(using Context): Boolean = + compiletimePackageOpTypes.contains(sym.name) + && ( + isCompiletime_S(sym) + || sym.owner == CompiletimeOpsAnyModuleClass && compiletimePackageAnyTypes.contains(sym.name) + || sym.owner == CompiletimeOpsIntModuleClass && compiletimePackageIntTypes.contains(sym.name) + || sym.owner == CompiletimeOpsLongModuleClass && compiletimePackageLongTypes.contains(sym.name) + || sym.owner == CompiletimeOpsFloatModuleClass && compiletimePackageFloatTypes.contains(sym.name) + || sym.owner == CompiletimeOpsDoubleModuleClass && compiletimePackageDoubleTypes.contains(sym.name) + || sym.owner == CompiletimeOpsBooleanModuleClass && compiletimePackageBooleanTypes.contains(sym.name) + || sym.owner == CompiletimeOpsStringModuleClass && compiletimePackageStringTypes.contains(sym.name) + ) + + // ----- Scala-2 library patches -------------------------------------- + + /** The `scala.runtime.stdLibPacthes` package contains objects + * that contain defnitions that get added as members to standard library + * objects with the same name. + */ + @tu lazy val StdLibPatchesPackage: TermSymbol = requiredPackage("scala.runtime.stdLibPatches") + @tu private lazy val ScalaPredefModuleClassPatch: Symbol = getModuleIfDefined("scala.runtime.stdLibPatches.Predef").moduleClass + @tu private lazy val LanguageModuleClassPatch: Symbol = getModuleIfDefined("scala.runtime.stdLibPatches.language").moduleClass + + /** If `sym` is a patched library class, the source file of its patch class, + * otherwise `NoSource` + */ + def patchSource(sym: Symbol)(using Context): SourceFile = + if sym == ScalaPredefModuleClass then ScalaPredefModuleClassPatch.source + else if sym == LanguageModuleClass then LanguageModuleClassPatch.source + else NoSource + + /** A finalizer that patches standard library classes. + * It copies all non-private, non-synthetic definitions from `patchCls` + * to `denot` while changing their owners to `denot`. Before that it deletes + * any definitions of `denot` that have the same name as one of the copied + * definitions. + * + * If an object is present in both the original class and the patch class, + * it is not overwritten. Instead its members are copied recursively. + * + * To avpid running into cycles on bootstrap, patching happens only if `patchCls` + * is read from a classfile. + */ + def patchStdLibClass(denot: ClassDenotation)(using Context): Unit = + def patch2(denot: ClassDenotation, patchCls: Symbol): Unit = + val scope = denot.info.decls.openForMutations + + def recurse(patch: Symbol) = patch.is(Module) && scope.lookup(patch.name).exists + + def makeClassSymbol(patch: Symbol, parents: List[Type], selfInfo: TypeOrSymbol) = + newClassSymbol( + owner = denot.symbol, + name = patch.name.asTypeName, + flags = patch.flags, + // need to rebuild a fresh ClassInfo + infoFn = cls => ClassInfo( + prefix = denot.symbol.thisType, + cls = cls, + declaredParents = parents, // assume parents in patch don't refer to symbols in the patch + decls = newScope, + selfInfo = + if patch.is(Module) + then TermRef(denot.symbol.thisType, patch.name.sourceModuleName) + else selfInfo // assume patch self type annotation does not refer to symbols in the patch + ), + privateWithin = patch.privateWithin, + coord = denot.symbol.coord, + assocFile = denot.symbol.associatedFile + ) + + def makeNonClassSymbol(patch: Symbol) = + if patch.is(Inline) then + // Inline symbols contain trees in annotations, which is coupled + // with the underlying symbol. + // Changing owner for inline symbols is a simple workaround. + patch.denot = patch.denot.copySymDenotation(owner = denot.symbol) + patch + else + // change `info` which might contain reference to the patch + patch.copy( + owner = denot.symbol, + info = + if patch.is(Module) + then TypeRef(denot.symbol.thisType, patch.name.moduleClassName) + else patch.info // assume non-object info does not refer to symbols in the patch + ) + + if patchCls.exists then + val patches = patchCls.info.decls.filter(patch => + !patch.isConstructor && !patch.isOneOf(PrivateOrSynthetic)) + for patch <- patches if !recurse(patch) do + val e = scope.lookupEntry(patch.name) + if e != null then scope.unlink(e) + for patch <- patches do + patch.ensureCompleted() + if !recurse(patch) then + val sym = + patch.info match + case ClassInfo(_, _, parents, _, selfInfo) => + makeClassSymbol(patch, parents, selfInfo) + case _ => + makeNonClassSymbol(patch) + end match + sym.annotations = patch.annotations + scope.enter(sym) + if patch.isClass then + patch2(scope.lookup(patch.name).asClass, patch) + + def patchWith(patchCls: Symbol) = + denot.sourceModule.info = denot.typeRef // we run into a cyclic reference when patching if this line is omitted + patch2(denot, patchCls) + + if denot.name == tpnme.Predef.moduleClassName && denot.symbol == ScalaPredefModuleClass then + patchWith(ScalaPredefModuleClassPatch) + else if denot.name == tpnme.language.moduleClassName && denot.symbol == LanguageModuleClass then + patchWith(LanguageModuleClassPatch) + end patchStdLibClass + + // ----- Symbol sets --------------------------------------------------- + + @tu lazy val topClasses: Set[Symbol] = Set(AnyClass, MatchableClass, ObjectClass, AnyValClass) + + @tu lazy val untestableClasses: Set[Symbol] = Set(NothingClass, NullClass, SingletonClass) + + @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0).asInstanceOf[Array[TypeRef]] + val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) + def AbstractFunctionClass(n: Int)(using Context): Symbol = AbstractFunctionClassPerRun()(using ctx)(n) + + @tu lazy val caseClassSynthesized: List[Symbol] = List( + Any_hashCode, Any_equals, Any_toString, Product_canEqual, Product_productArity, + Product_productPrefix, Product_productElement, Product_productElementName) + + val LazyHolder: PerRun[Map[Symbol, Symbol]] = new PerRun({ + def holderImpl(holderType: String) = requiredClass("scala.runtime." + holderType) + Map[Symbol, Symbol]( + IntClass -> holderImpl("LazyInt"), + LongClass -> holderImpl("LazyLong"), + BooleanClass -> holderImpl("LazyBoolean"), + FloatClass -> holderImpl("LazyFloat"), + DoubleClass -> holderImpl("LazyDouble"), + ByteClass -> holderImpl("LazyByte"), + CharClass -> holderImpl("LazyChar"), + ShortClass -> holderImpl("LazyShort") + ) + .withDefaultValue(holderImpl("LazyRef")) + }) + + @tu lazy val TupleType: Array[TypeRef | Null] = mkArityArray("scala.Tuple", MaxTupleArity, 1) + + def isSpecializedTuple(cls: Symbol)(using Context): Boolean = + cls.isClass && TupleSpecializedClasses.exists(tupleCls => cls.name.isSpecializedNameOf(tupleCls.name)) + + def SpecializedTuple(base: Symbol, args: List[Type])(using Context): Symbol = + base.owner.requiredClass(base.name.specializedName(args)) + + /** Cached function types of arbitary arities. + * Function types are created on demand with newFunctionNTrait, which is + * called from a synthesizer installed in ScalaPackageClass. + */ + private class FunType(prefix: String): + private var classRefs: Array[TypeRef | Null] = new Array(22) + def apply(n: Int): TypeRef = + while n >= classRefs.length do + val classRefs1 = new Array[TypeRef | Null](classRefs.length * 2) + Array.copy(classRefs, 0, classRefs1, 0, classRefs.length) + classRefs = classRefs1 + val funName = s"scala.$prefix$n" + if classRefs(n) == null then + classRefs(n) = + if prefix.startsWith("Impure") + then staticRef(funName.toTypeName).symbol.typeRef + else requiredClassRef(funName) + classRefs(n).nn + end FunType + + private def funTypeIdx(isContextual: Boolean, isErased: Boolean, isImpure: Boolean): Int = + (if isContextual then 1 else 0) + + (if isErased then 2 else 0) + + (if isImpure then 4 else 0) + + private val funTypeArray: IArray[FunType] = + val arr = Array.ofDim[FunType](8) + val choices = List(false, true) + for contxt <- choices; erasd <- choices; impure <- choices do + var str = "Function" + if contxt then str = "Context" + str + if erasd then str = "Erased" + str + if impure then str = "Impure" + str + arr(funTypeIdx(contxt, erasd, impure)) = FunType(str) + IArray.unsafeFromArray(arr) + + def FunctionSymbol(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): Symbol = + funTypeArray(funTypeIdx(isContextual, isErased, isImpure))(n).symbol + + @tu lazy val Function0_apply: Symbol = Function0.requiredMethod(nme.apply) + @tu lazy val ContextFunction0_apply: Symbol = ContextFunction0.requiredMethod(nme.apply) + + @tu lazy val Function0: Symbol = FunctionSymbol(0) + @tu lazy val Function1: Symbol = FunctionSymbol(1) + @tu lazy val Function2: Symbol = FunctionSymbol(2) + @tu lazy val ContextFunction0: Symbol = FunctionSymbol(0, isContextual = true) + + def FunctionType(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): TypeRef = + FunctionSymbol(n, isContextual && !ctx.erasedTypes, isErased, isImpure).typeRef + + lazy val PolyFunctionClass = requiredClass("scala.PolyFunction") + def PolyFunctionType = PolyFunctionClass.typeRef + + /** If `cls` is a class in the scala package, its name, otherwise EmptyTypeName */ + def scalaClassName(cls: Symbol)(using Context): TypeName = cls.denot match + case clsd: ClassDenotation if clsd.owner eq ScalaPackageClass => + clsd.name.asInstanceOf[TypeName] + case _ => + EmptyTypeName + + /** If type `ref` refers to a class in the scala package, its name, otherwise EmptyTypeName */ + def scalaClassName(ref: Type)(using Context): TypeName = scalaClassName(ref.classSymbol) + + private def isVarArityClass(cls: Symbol, prefix: String) = + cls.isClass + && cls.owner.eq(ScalaPackageClass) + && cls.name.testSimple(name => + name.startsWith(prefix) + && name.length > prefix.length + && digitsOnlyAfter(name, prefix.length)) + + private def digitsOnlyAfter(name: SimpleName, idx: Int): Boolean = + idx == name.length || name(idx).isDigit && digitsOnlyAfter(name, idx + 1) + + def isBottomClass(cls: Symbol): Boolean = + if ctx.mode.is(Mode.SafeNulls) && !ctx.phase.erasedTypes + then cls == NothingClass + else isBottomClassAfterErasure(cls) + + def isBottomClassAfterErasure(cls: Symbol): Boolean = cls == NothingClass || cls == NullClass + + /** Is any function class where + * - FunctionXXL + * - FunctionN for N >= 0 + * - ContextFunctionN for N >= 0 + * - ErasedFunctionN for N > 0 + * - ErasedContextFunctionN for N > 0 + */ + def isFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isFunction + + /** Is a function class, or an impure function type alias */ + def isFunctionSymbol(sym: Symbol): Boolean = + sym.isType && (sym.owner eq ScalaPackageClass) && sym.name.isFunction + + /** Is a function class where + * - FunctionN for N >= 0 and N != XXL + */ + def isPlainFunctionClass(cls: Symbol) = isVarArityClass(cls, str.Function) + + /** Is an context function class. + * - ContextFunctionN for N >= 0 + * - ErasedContextFunctionN for N > 0 + */ + def isContextFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isContextFunction + + /** Is an erased function class. + * - ErasedFunctionN for N > 0 + * - ErasedContextFunctionN for N > 0 + */ + def isErasedFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isErasedFunction + + /** Is either FunctionXXL or a class that will be erased to FunctionXXL + * - FunctionXXL + * - FunctionN for N >= 22 + * - ContextFunctionN for N >= 22 + */ + def isXXLFunctionClass(cls: Symbol): Boolean = { + val name = scalaClassName(cls) + (name eq tpnme.FunctionXXL) || name.functionArity > MaxImplementedFunctionArity + } + + /** Is a synthetic function class + * - FunctionN for N > 22 + * - ContextFunctionN for N >= 0 + * - ErasedFunctionN for N > 0 + * - ErasedContextFunctionN for N > 0 + */ + def isSyntheticFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isSyntheticFunction + + def isAbstractFunctionClass(cls: Symbol): Boolean = isVarArityClass(cls, str.AbstractFunction) + def isTupleClass(cls: Symbol): Boolean = isVarArityClass(cls, str.Tuple) + def isProductClass(cls: Symbol): Boolean = isVarArityClass(cls, str.Product) + + def isBoxedUnitClass(cls: Symbol): Boolean = + cls.isClass && (cls.owner eq ScalaRuntimePackageClass) && cls.name == tpnme.BoxedUnit + + /** Returns the erased type of the function class `cls` + * - FunctionN for N > 22 becomes FunctionXXL + * - FunctionN for 22 > N >= 0 remains as FunctionN + * - ContextFunctionN for N > 22 becomes FunctionXXL + * - ContextFunctionN for N <= 22 becomes FunctionN + * - ErasedFunctionN becomes Function0 + * - ImplicitErasedFunctionN becomes Function0 + * - anything else becomes a NoType + */ + def functionTypeErasure(cls: Symbol): Type = + val arity = scalaClassName(cls).functionArity + if cls.name.isErasedFunction then FunctionType(0) + else if arity > 22 then FunctionXXLClass.typeRef + else if arity >= 0 then FunctionType(arity) + else NoType + + private val JavaImportFns: List[RootRef] = List( + RootRef(() => JavaLangPackageVal.termRef) + ) + + private val ScalaImportFns: List[RootRef] = + JavaImportFns :+ + RootRef(() => ScalaPackageVal.termRef) + + private val PredefImportFns: RootRef = + RootRef(() => ScalaPredefModule.termRef, isPredef=true) + + @tu private lazy val JavaRootImportFns: List[RootRef] = + if ctx.settings.YnoImports.value then Nil + else JavaImportFns + + @tu private lazy val ScalaRootImportFns: List[RootRef] = + if ctx.settings.YnoImports.value then Nil + else if ctx.settings.YnoPredef.value then ScalaImportFns + else ScalaImportFns :+ PredefImportFns + + @tu private lazy val JavaRootImportTypes: List[TermRef] = JavaRootImportFns.map(_.refFn()) + @tu private lazy val ScalaRootImportTypes: List[TermRef] = ScalaRootImportFns.map(_.refFn()) + @tu private lazy val JavaUnqualifiedOwnerTypes: Set[NamedType] = unqualifiedTypes(JavaRootImportTypes) + @tu private lazy val ScalaUnqualifiedOwnerTypes: Set[NamedType] = unqualifiedTypes(ScalaRootImportTypes) + + /** Are we compiling a java source file? */ + private def isJavaContext(using Context): Boolean = + ctx.compilationUnit.isJava + + private def unqualifiedTypes(refs: List[TermRef]) = + val types = refs.toSet[NamedType] + types ++ types.map(_.symbol.moduleClass.typeRef) + + /** Lazy references to the root imports */ + def rootImportFns(using Context): List[RootRef] = + if isJavaContext then JavaRootImportFns + else ScalaRootImportFns + + /** Root types imported by default */ + def rootImportTypes(using Context): List[TermRef] = + if isJavaContext then JavaRootImportTypes + else ScalaRootImportTypes + + /** Modules whose members are in the default namespace and their module classes */ + def unqualifiedOwnerTypes(using Context): Set[NamedType] = + if isJavaContext then JavaUnqualifiedOwnerTypes + else ScalaUnqualifiedOwnerTypes + + /** Names of the root import symbols that can be hidden by other imports */ + @tu lazy val ShadowableImportNames: Set[TermName] = Set("Predef".toTermName) + + /** Class symbols for which no class exist at runtime */ + @tu lazy val NotRuntimeClasses: Set[Symbol] = Set(AnyClass, MatchableClass, AnyValClass, NullClass, NothingClass) + + @tu lazy val SpecialClassTagClasses: Set[Symbol] = Set(UnitClass, AnyClass, AnyValClass) + + @tu lazy val SpecialManifestClasses: Set[Symbol] = Set(AnyClass, AnyValClass, ObjectClass, NullClass, NothingClass) + + /** Classes that are known not to have an initializer irrespective of + * whether NoInits is set. Note: FunctionXXLClass is in this set + * because if it is compiled by Scala2, it does not get a NoInit flag. + * But since it is introduced only at erasure, there's no chance + * for augmentScala2Traits to do anything on a class that inherits it. So + * it also misses an implementation class, which means that the usual scheme + * of calling a superclass init in the implementation class of a Scala2 + * trait gets screwed up. Therefore, it is mandatory that FunctionXXL + * is treated as a NoInit trait. + */ + @tu lazy val NoInitClasses: Set[Symbol] = NotRuntimeClasses + FunctionXXLClass + + def isPolymorphicAfterErasure(sym: Symbol): Boolean = + (sym eq Any_isInstanceOf) || (sym eq Any_asInstanceOf) || (sym eq Object_synchronized) + + /** Is this type a `TupleN` type? + * + * @return true if the dealiased type of `tp` is `TupleN[T1, T2, ..., Tn]` + */ + def isTupleNType(tp: Type)(using Context): Boolean = { + val tp1 = tp.dealias + val arity = tp1.argInfos.length + arity <= MaxTupleArity && { + val tupletp = TupleType(arity) + tupletp != null && tp1.isRef(tupletp.symbol) + } + } + + def tupleType(elems: List[Type]): Type = { + val arity = elems.length + if 0 < arity && arity <= MaxTupleArity then + val tupletp = TupleType(arity) + if tupletp != null then tupletp.appliedTo(elems) + else TypeOps.nestedPairs(elems) + else TypeOps.nestedPairs(elems) + } + + def tupleTypes(tp: Type, bound: Int = Int.MaxValue)(using Context): Option[List[Type]] = { + @tailrec def rec(tp: Type, acc: List[Type], bound: Int): Option[List[Type]] = tp.normalized.dealias match { + case _ if bound < 0 => Some(acc.reverse) + case tp: AppliedType if PairClass == tp.classSymbol => rec(tp.args(1), tp.args.head :: acc, bound - 1) + case tp: AppliedType if isTupleNType(tp) => Some(acc.reverse ::: tp.args) + case tp: TermRef if tp.symbol == defn.EmptyTupleModule => Some(acc.reverse) + case _ => None + } + rec(tp.stripTypeVar, Nil, bound) + } + + def isProductSubType(tp: Type)(using Context): Boolean = tp.derivesFrom(ProductClass) + + /** Is `tp` (an alias) of either a scala.FunctionN or a scala.ContextFunctionN + * instance? + */ + def isNonRefinedFunction(tp: Type)(using Context): Boolean = + val arity = functionArity(tp) + val sym = tp.dealias.typeSymbol + + arity >= 0 + && isFunctionClass(sym) + && tp.isRef( + FunctionType(arity, sym.name.isContextFunction, sym.name.isErasedFunction).typeSymbol, + skipRefined = false) + end isNonRefinedFunction + + /** Is `tp` a representation of a (possibly dependent) function type or an alias of such? */ + def isFunctionType(tp: Type)(using Context): Boolean = + isNonRefinedFunction(tp.dropDependentRefinement) + + def isFunctionOrPolyType(tp: Type)(using Context): Boolean = + isFunctionType(tp) || (tp.typeSymbol eq defn.PolyFunctionClass) + + private def withSpecMethods(cls: ClassSymbol, bases: List[Name], paramTypes: Set[TypeRef]) = + for base <- bases; tp <- paramTypes do + cls.enter(newSymbol(cls, base.specializedName(List(tp)), Method, ExprType(tp))) + cls + + @tu lazy val Tuple1: ClassSymbol = withSpecMethods(requiredClass("scala.Tuple1"), List(nme._1), Tuple1SpecializedParamTypes) + @tu lazy val Tuple2: ClassSymbol = withSpecMethods(requiredClass("scala.Tuple2"), List(nme._1, nme._2), Tuple2SpecializedParamTypes) + + @tu lazy val TupleSpecializedClasses: Set[Symbol] = Set(Tuple1, Tuple2) + @tu lazy val Tuple1SpecializedParamTypes: Set[TypeRef] = Set(IntType, LongType, DoubleType) + @tu lazy val Tuple2SpecializedParamTypes: Set[TypeRef] = Set(IntType, LongType, DoubleType, CharType, BooleanType) + @tu lazy val Tuple1SpecializedParamClasses: PerRun[Set[Symbol]] = new PerRun(Tuple1SpecializedParamTypes.map(_.symbol)) + @tu lazy val Tuple2SpecializedParamClasses: PerRun[Set[Symbol]] = new PerRun(Tuple2SpecializedParamTypes.map(_.symbol)) + + // Specialized type parameters defined for scala.Function{0,1,2}. + @tu lazy val Function1SpecializedParamTypes: collection.Set[TypeRef] = + Set(IntType, LongType, FloatType, DoubleType) + @tu lazy val Function2SpecializedParamTypes: collection.Set[TypeRef] = + Set(IntType, LongType, DoubleType) + @tu lazy val Function0SpecializedReturnTypes: collection.Set[TypeRef] = + ScalaNumericValueTypeList.toSet + UnitType + BooleanType + @tu lazy val Function1SpecializedReturnTypes: collection.Set[TypeRef] = + Set(UnitType, BooleanType, IntType, FloatType, LongType, DoubleType) + @tu lazy val Function2SpecializedReturnTypes: collection.Set[TypeRef] = + Function1SpecializedReturnTypes + + @tu lazy val Function1SpecializedParamClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function1SpecializedParamTypes.map(_.symbol)) + @tu lazy val Function2SpecializedParamClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function2SpecializedParamTypes.map(_.symbol)) + @tu lazy val Function0SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function0SpecializedReturnTypes.map(_.symbol)) + @tu lazy val Function1SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function1SpecializedReturnTypes.map(_.symbol)) + @tu lazy val Function2SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function2SpecializedReturnTypes.map(_.symbol)) + + def isSpecializableTuple(base: Symbol, args: List[Type])(using Context): Boolean = + args.length <= 2 && base.isClass && TupleSpecializedClasses.exists(base.asClass.derivesFrom) && args.match + case List(x) => Tuple1SpecializedParamClasses().contains(x.classSymbol) + case List(x, y) => Tuple2SpecializedParamClasses().contains(x.classSymbol) && Tuple2SpecializedParamClasses().contains(y.classSymbol) + case _ => false + && base.owner.denot.info.member(base.name.specializedName(args)).exists // when dotc compiles the stdlib there are no specialised classes + + def isSpecializableFunction(cls: ClassSymbol, paramTypes: List[Type], retType: Type)(using Context): Boolean = + paramTypes.length <= 2 + && (cls.derivesFrom(FunctionSymbol(paramTypes.length)) || isByNameFunctionClass(cls)) + && isSpecializableFunctionSAM(paramTypes, retType) + + /** If the Single Abstract Method of a Function class has this type, is it specializable? */ + def isSpecializableFunctionSAM(paramTypes: List[Type], retType: Type)(using Context): Boolean = + paramTypes.length <= 2 && (paramTypes match { + case Nil => + Function0SpecializedReturnClasses().contains(retType.typeSymbol) + case List(paramType0) => + Function1SpecializedParamClasses().contains(paramType0.typeSymbol) && + Function1SpecializedReturnClasses().contains(retType.typeSymbol) + case List(paramType0, paramType1) => + Function2SpecializedParamClasses().contains(paramType0.typeSymbol) && + Function2SpecializedParamClasses().contains(paramType1.typeSymbol) && + Function2SpecializedReturnClasses().contains(retType.typeSymbol) + case _ => + false + }) + + @tu lazy val Function0SpecializedApplyNames: collection.Set[TermName] = + for r <- Function0SpecializedReturnTypes + yield nme.apply.specializedFunction(r, Nil).asTermName + + @tu lazy val Function1SpecializedApplyNames: collection.Set[TermName] = + for + r <- Function1SpecializedReturnTypes + t1 <- Function1SpecializedParamTypes + yield + nme.apply.specializedFunction(r, List(t1)).asTermName + + @tu lazy val Function2SpecializedApplyNames: collection.Set[TermName] = + for + r <- Function2SpecializedReturnTypes + t1 <- Function2SpecializedParamTypes + t2 <- Function2SpecializedParamTypes + yield + nme.apply.specializedFunction(r, List(t1, t2)).asTermName + + @tu lazy val FunctionSpecializedApplyNames: collection.Set[Name] = + Function0SpecializedApplyNames ++ Function1SpecializedApplyNames ++ Function2SpecializedApplyNames + + def functionArity(tp: Type)(using Context): Int = tp.dropDependentRefinement.dealias.argInfos.length - 1 + + /** Return underlying context function type (i.e. instance of an ContextFunctionN class) + * or NoType if none exists. The following types are considered as underlying types: + * - the alias of an alias type + * - the instance or origin of a TypeVar (i.e. the result of a stripTypeVar) + * - the upper bound of a TypeParamRef in the current constraint + */ + def asContextFunctionType(tp: Type)(using Context): Type = + tp.stripTypeVar.dealias match + case tp1: TypeParamRef if ctx.typerState.constraint.contains(tp1) => + asContextFunctionType(TypeComparer.bounds(tp1).hiBound) + case tp1 => + if tp1.typeSymbol.name.isContextFunction && isFunctionType(tp1) then tp1 + else NoType + + /** Is `tp` an context function type? */ + def isContextFunctionType(tp: Type)(using Context): Boolean = + asContextFunctionType(tp).exists + + /** An extractor for context function types `As ?=> B`, possibly with + * dependent refinements. Optionally returns a triple consisting of the argument + * types `As`, the result type `B` and a whether the type is an erased context function. + */ + object ContextFunctionType: + def unapply(tp: Type)(using Context): Option[(List[Type], Type, Boolean)] = + if ctx.erasedTypes then + atPhase(erasurePhase)(unapply(tp)) + else + val tp1 = asContextFunctionType(tp) + if tp1.exists then + val args = tp1.dropDependentRefinement.argInfos + Some((args.init, args.last, tp1.typeSymbol.name.isErasedFunction)) + else None + + def isErasedFunctionType(tp: Type)(using Context): Boolean = + tp.dealias.typeSymbol.name.isErasedFunction && isFunctionType(tp) + + /** A whitelist of Scala-2 classes that are known to be pure */ + def isAssuredNoInits(sym: Symbol): Boolean = + (sym `eq` SomeClass) || isTupleClass(sym) + + /** If `cls` is Tuple1..Tuple22, add the corresponding *: type as last parent to `parents` */ + def adjustForTuple(cls: ClassSymbol, tparams: List[TypeSymbol], parents: List[Type]): List[Type] = { + if !isTupleClass(cls) then parents + else if tparams.isEmpty then parents :+ TupleTypeRef + else + assert(parents.head.typeSymbol == ObjectClass) + TypeOps.nestedPairs(tparams.map(_.typeRef)) :: parents.tail + } + + /** If it is BoxedUnit, remove `java.io.Serializable` from `parents`. */ + def adjustForBoxedUnit(cls: ClassSymbol, parents: List[Type]): List[Type] = + if (isBoxedUnitClass(cls)) parents.filter(_.typeSymbol != JavaSerializableClass) + else parents + + private val HasProblematicGetClass: Set[Name] = Set( + tpnme.AnyVal, tpnme.Byte, tpnme.Short, tpnme.Char, tpnme.Int, tpnme.Long, tpnme.Float, tpnme.Double, + tpnme.Unit, tpnme.Boolean) + + /** When typing a primitive value class or AnyVal, we ignore the `getClass` + * member: it's supposed to be an override of the `getClass` defined on `Any`, + * but in dotty `Any#getClass` is polymorphic so it ends up being an overload. + * This is especially problematic because it means that when writing: + * + * 1.asInstanceOf[Int & AnyRef].getClass + * + * the `getClass` that returns `Class[Int]` defined in Int can be selected, + * but this call is specified to return `classOf[Integer]`, see + * tests/run/t5568.scala. + * + * FIXME: remove all the `getClass` methods defined in the standard library + * so we don't have to hot-patch it like this. + */ + def hasProblematicGetClass(className: Name): Boolean = + HasProblematicGetClass.contains(className) + + /** Is synthesized symbol with alphanumeric name allowed to be used as an infix operator? */ + def isInfix(sym: Symbol)(using Context): Boolean = + (sym eq Object_eq) || (sym eq Object_ne) + + @tu lazy val assumedTransparentTraits = + Set[Symbol](ComparableClass, ProductClass, SerializableClass, + // add these for now, until we had a chance to retrofit 2.13 stdlib + // we should do a more through sweep through it then. + requiredClass("scala.collection.SortedOps"), + requiredClass("scala.collection.StrictOptimizedSortedSetOps"), + requiredClass("scala.collection.generic.DefaultSerializable"), + requiredClass("scala.collection.generic.IsIterable"), + requiredClass("scala.collection.generic.IsIterableOnce"), + requiredClass("scala.collection.generic.IsMap"), + requiredClass("scala.collection.generic.IsSeq"), + requiredClass("scala.collection.generic.Subtractable"), + requiredClass("scala.collection.immutable.StrictOptimizedSeqOps") + ) + + // ----- primitive value class machinery ------------------------------------------ + + class PerRun[T](generate: Context ?=> T) { + private var current: RunId = NoRunId + private var cached: T = _ + def apply()(using Context): T = { + if (current != ctx.runId) { + cached = generate + current = ctx.runId + } + cached + } + } + + @tu lazy val ScalaNumericValueTypeList: List[TypeRef] = List( + ByteType, ShortType, CharType, IntType, LongType, FloatType, DoubleType) + + @tu private lazy val ScalaNumericValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypeList.toSet + @tu private lazy val ScalaValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypes `union` Set(UnitType, BooleanType) + + val ScalaNumericValueClasses: PerRun[collection.Set[Symbol]] = new PerRun(ScalaNumericValueTypes.map(_.symbol)) + val ScalaValueClasses: PerRun[collection.Set[Symbol]] = new PerRun(ScalaValueTypes.map(_.symbol)) + + val ScalaBoxedClasses: PerRun[collection.Set[Symbol]] = new PerRun( + Set(BoxedByteClass, BoxedShortClass, BoxedCharClass, BoxedIntClass, BoxedLongClass, BoxedFloatClass, BoxedDoubleClass, BoxedUnitClass, BoxedBooleanClass) + ) + + private val valueTypeEnc = mutable.Map[TypeName, PrimitiveClassEnc]() + private val typeTags = mutable.Map[TypeName, Name]().withDefaultValue(nme.specializedTypeNames.Object) + +// private val unboxedTypeRef = mutable.Map[TypeName, TypeRef]() +// private val javaTypeToValueTypeRef = mutable.Map[Class[?], TypeRef]() +// private val valueTypeNamesToJavaType = mutable.Map[TypeName, Class[?]]() + + private def valueTypeRef(name: String, jtype: Class[?], enc: Int, tag: Name): TypeRef = { + val vcls = requiredClassRef(name) + valueTypeEnc(vcls.name) = enc + typeTags(vcls.name) = tag +// unboxedTypeRef(boxed.name) = vcls +// javaTypeToValueTypeRef(jtype) = vcls +// valueTypeNamesToJavaType(vcls.name) = jtype + vcls + } + + /** The type of the boxed class corresponding to primitive value type `tp`. */ + def boxedType(tp: Type)(using Context): TypeRef = { + val cls = tp.classSymbol + if (cls eq ByteClass) BoxedByteClass + else if (cls eq ShortClass) BoxedShortClass + else if (cls eq CharClass) BoxedCharClass + else if (cls eq IntClass) BoxedIntClass + else if (cls eq LongClass) BoxedLongClass + else if (cls eq FloatClass) BoxedFloatClass + else if (cls eq DoubleClass) BoxedDoubleClass + else if (cls eq UnitClass) BoxedUnitClass + else if (cls eq BooleanClass) BoxedBooleanClass + else sys.error(s"Not a primitive value type: $tp") + }.typeRef + + def unboxedType(tp: Type)(using Context): TypeRef = { + val cls = tp.classSymbol + if (cls eq BoxedByteClass) ByteType + else if (cls eq BoxedShortClass) ShortType + else if (cls eq BoxedCharClass) CharType + else if (cls eq BoxedIntClass) IntType + else if (cls eq BoxedLongClass) LongType + else if (cls eq BoxedFloatClass) FloatType + else if (cls eq BoxedDoubleClass) DoubleType + else if (cls eq BoxedUnitClass) UnitType + else if (cls eq BoxedBooleanClass) BooleanType + else sys.error(s"Not a boxed primitive value type: $tp") + } + + /** The JVM tag for `tp` if it's a primitive, `java.lang.Object` otherwise. */ + def typeTag(tp: Type)(using Context): Name = typeTags(scalaClassName(tp)) + +// /** The `Class[?]` of a primitive value type name */ +// def valueTypeNameToJavaType(name: TypeName)(using Context): Option[Class[?]] = +// valueTypeNamesToJavaType.get(if (name.firstPart eq nme.scala) name.lastPart.toTypeName else name) + + type PrimitiveClassEnc = Int + + val ByteEnc: Int = 2 + val ShortEnc: Int = ByteEnc * 3 + val CharEnc: Int = 5 + val IntEnc: Int = ShortEnc * CharEnc + val LongEnc: Int = IntEnc * 7 + val FloatEnc: Int = LongEnc * 11 + val DoubleEnc: Int = FloatEnc * 13 + val BooleanEnc: Int = 17 + val UnitEnc: Int = 19 + + def isValueSubType(tref1: TypeRef, tref2: TypeRef)(using Context): Boolean = + valueTypeEnc(tref2.name) % valueTypeEnc(tref1.name) == 0 + def isValueSubClass(sym1: Symbol, sym2: Symbol): Boolean = + valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0 + + @tu lazy val specialErasure: SimpleIdentityMap[Symbol, ClassSymbol] = + SimpleIdentityMap.empty[Symbol] + .updated(AnyClass, ObjectClass) + .updated(MatchableClass, ObjectClass) + .updated(AnyValClass, ObjectClass) + .updated(SingletonClass, ObjectClass) + .updated(TupleClass, ProductClass) + .updated(NonEmptyTupleClass, ProductClass) + .updated(PairClass, ObjectClass) + + // ----- Initialization --------------------------------------------------- + + /** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ + @tu lazy val syntheticScalaClasses: List[TypeSymbol] = + List( + AnyClass, + MatchableClass, + AnyRefAlias, + AnyKindClass, + andType, + orType, + RepeatedParamClass, + ByNameParamClass2x, + AnyValClass, + NullClass, + NothingClass, + SingletonClass) + + @tu lazy val syntheticCoreClasses: List[Symbol] = syntheticScalaClasses ++ List( + EmptyPackageVal, + OpsPackageClass) + + /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ + @tu lazy val syntheticCoreMethods: List[TermSymbol] = + AnyMethods ++ ObjectMethods ++ List(String_+, throwMethod) + + @tu lazy val reservedScalaClassNames: Set[Name] = syntheticScalaClasses.map(_.name).toSet + + private var isInitialized = false + + def init()(using Context): Unit = { + this.initCtx = ctx + if (!isInitialized) { + // force initialization of every symbol that is synthesized or hijacked by the compiler + val forced = + syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() :+ JavaEnumClass + isInitialized = true + } + addSyntheticSymbolsComments + } + + def addSyntheticSymbolsComments(using Context): Unit = + def add(sym: Symbol, doc: String) = ctx.docCtx.foreach(_.addDocstring(sym, Some(Comment(NoSpan, doc)))) + + add(AnyClass, + """/** Class `Any` is the root of the Scala class hierarchy. Every class in a Scala + | * execution environment inherits directly or indirectly from this class. + | * + | * Starting with Scala 2.10 it is possible to directly extend `Any` using ''universal traits''. + | * A ''universal trait'' is a trait that extends `Any`, only has `def`s as members, and does no initialization. + | * + | * The main use case for universal traits is to allow basic inheritance of methods for [[scala.AnyVal value classes]]. + | * For example, + | * + | * {{{ + | * trait Printable extends Any { + | * def print(): Unit = println(this) + | * } + | * class Wrapper(val underlying: Int) extends AnyVal with Printable + | * + | * val w = new Wrapper(3) + | * w.print() + | * }}} + | * + | * See the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]] for more + | * details on the interplay of universal traits and value classes. + | */ + """.stripMargin) + + add(Any_==, + """/** Test two objects for equality. + | * The expression `x == that` is equivalent to `if (x eq null) that eq null else x.equals(that)`. + | * + | * @param that the object to compare against this object for equality. + | * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. + | */ + """.stripMargin) + + add(Any_!=, + """/** Test two objects for inequality. + | * + | * @param that the object to compare against this object for equality. + | * @return `true` if !(this == that), `false` otherwise. + | */ + """.stripMargin) + + add(Any_equals, + """/** Compares the receiver object (`this`) with the argument object (`that`) for equivalence. + | * + | * Any implementation of this method should be an [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]]: + | * + | * - It is reflexive: for any instance `x` of type `Any`, `x.equals(x)` should return `true`. + | * - It is symmetric: for any instances `x` and `y` of type `Any`, `x.equals(y)` should return `true` if and + | * only if `y.equals(x)` returns `true`. + | * - It is transitive: for any instances `x`, `y`, and `z` of type `Any` if `x.equals(y)` returns `true` and + | * `y.equals(z)` returns `true`, then `x.equals(z)` should return `true`. + | * + | * If you override this method, you should verify that your implementation remains an equivalence relation. + | * Additionally, when overriding this method it is usually necessary to override `hashCode` to ensure that + | * objects which are "equal" (`o1.equals(o2)` returns `true`) hash to the same [[scala.Int]]. + | * (`o1.hashCode.equals(o2.hashCode)`). + | * + | * @param that the object to compare against this object for equality. + | * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. + | */ + """.stripMargin) + + add(Any_hashCode, + """/** Calculate a hash code value for the object. + | * + | * The default hashing algorithm is platform dependent. + | * + | * Note that it is allowed for two objects to have identical hash codes (`o1.hashCode.equals(o2.hashCode)`) yet + | * not be equal (`o1.equals(o2)` returns `false`). A degenerate implementation could always return `0`. + | * However, it is required that if two objects are equal (`o1.equals(o2)` returns `true`) that they have + | * identical hash codes (`o1.hashCode.equals(o2.hashCode)`). Therefore, when overriding this method, be sure + | * to verify that the behavior is consistent with the `equals` method. + | * + | * @return the hash code value for this object. + | */ + """.stripMargin) + + add(Any_toString, + """/** Returns a string representation of the object. + | * + | * The default representation is platform dependent. + | * + | * @return a string representation of the object. + | */ + """.stripMargin) + + add(Any_##, + """/** Equivalent to `x.hashCode` except for boxed numeric types and `null`. + | * For numerics, it returns a hash value which is consistent + | * with value equality: if two value type instances compare + | * as true, then ## will produce the same hash value for each + | * of them. + | * For `null` returns a hashcode where `null.hashCode` throws a + | * `NullPointerException`. + | * + | * @return a hash value consistent with == + | */ + """.stripMargin) + + add(Any_isInstanceOf, + """/** Test whether the dynamic type of the receiver object is `T0`. + | * + | * Note that the result of the test is modulo Scala's erasure semantics. + | * Therefore the expression `1.isInstanceOf[String]` will return `false`, while the + | * expression `List(1).isInstanceOf[List[String]]` will return `true`. + | * In the latter example, because the type argument is erased as part of compilation it is + | * not possible to check whether the contents of the list are of the specified type. + | * + | * @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise. + | */ + """.stripMargin) + + add(Any_asInstanceOf, + """/** Cast the receiver object to be of type `T0`. + | * + | * Note that the success of a cast at runtime is modulo Scala's erasure semantics. + | * Therefore the expression `1.asInstanceOf[String]` will throw a `ClassCastException` at + | * runtime, while the expression `List(1).asInstanceOf[List[String]]` will not. + | * In the latter example, because the type argument is erased as part of compilation it is + | * not possible to check whether the contents of the list are of the requested type. + | * + | * @throws ClassCastException if the receiver object is not an instance of the erasure of type `T0`. + | * @return the receiver object. + | */ + """.stripMargin) + + add(Any_getClass, + """/** Returns the runtime class representation of the object. + | * + | * @return a class object corresponding to the runtime type of the receiver. + | */ + """.stripMargin) + + add(MatchableClass, + """/** The base trait of types that can be safely pattern matched against. + | * + | * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html]]. + | */ + """.stripMargin) + + add(AnyRefAlias, + """/** Class `AnyRef` is the root class of all ''reference types''. + | * All types except the value types descend from this class. + | */ + """.stripMargin) + + add(Object_eq, + """/** Tests whether the argument (`that`) is a reference to the receiver object (`this`). + | * + | * The `eq` method implements an [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] on + | * non-null instances of `AnyRef`, and has three additional properties: + | * + | * - It is consistent: for any non-null instances `x` and `y` of type `AnyRef`, multiple invocations of + | * `x.eq(y)` consistently returns `true` or consistently returns `false`. + | * - For any non-null instance `x` of type `AnyRef`, `x.eq(null)` and `null.eq(x)` returns `false`. + | * - `null.eq(null)` returns `true`. + | * + | * When overriding the `equals` or `hashCode` methods, it is important to ensure that their behavior is + | * consistent with reference equality. Therefore, if two objects are references to each other (`o1 eq o2`), they + | * should be equal to each other (`o1 == o2`) and they should hash to the same value (`o1.hashCode == o2.hashCode`). + | * + | * @param that the object to compare against this object for reference equality. + | * @return `true` if the argument is a reference to the receiver object; `false` otherwise. + | */ + """.stripMargin) + + add(Object_ne, + """/** Equivalent to `!(this eq that)`. + | * + | * @param that the object to compare against this object for reference equality. + | * @return `true` if the argument is not a reference to the receiver object; `false` otherwise. + | */ + """.stripMargin) + + add(Object_synchronized, + """/** Executes the code in `body` with an exclusive lock on `this`. + | * + | * @param body the code to execute + | * @return the result of `body` + | */ + """.stripMargin) + + add(Object_clone, + """/** Create a copy of the receiver object. + | * + | * The default implementation of the `clone` method is platform dependent. + | * + | * @note not specified by SLS as a member of AnyRef + | * @return a copy of the receiver object. + | */ + """.stripMargin) + + add(Object_finalize, + """/** Called by the garbage collector on the receiver object when there + | * are no more references to the object. + | * + | * The details of when and if the `finalize` method is invoked, as + | * well as the interaction between `finalize` and non-local returns + | * and exceptions, are all platform dependent. + | * + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_notify, + """/** Wakes up a single thread that is waiting on the receiver object's monitor. + | * + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_notifyAll, + """/** Wakes up all threads that are waiting on the receiver object's monitor. + | * + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_wait, + """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait--]]. + | * + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_waitL, + """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-]]. + | * + | * @param timeout the maximum time to wait in milliseconds. + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_waitLI, + """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-int-]] + | * + | * @param timeout the maximum time to wait in milliseconds. + | * @param nanos additional time, in nanoseconds range 0-999999. + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(AnyKindClass, + """/** The super-type of all types. + | * + | * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html]]. + | */ + """.stripMargin) + + add(andType, + """/** The intersection of two types. + | * + | * See [[https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html]]. + | */ + """.stripMargin) + + add(orType, + """/** The union of two types. + | * + | * See [[https://docs.scala-lang.org/scala3/reference/new-types/union-types.html]]. + | */ + """.stripMargin) + + add(AnyValClass, + """/** `AnyVal` is the root class of all ''value types'', which describe values + | * not implemented as objects in the underlying host system. Value classes + | * are specified in Scala Language Specification, section 12.2. + | * + | * The standard implementation includes nine `AnyVal` subtypes: + | * + | * [[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]], + | * [[scala.Short]], and [[scala.Byte]] are the ''numeric value types''. + | * + | * [[scala.Unit]] and [[scala.Boolean]] are the ''non-numeric value types''. + | * + | * Other groupings: + | * + | * - The ''subrange types'' are [[scala.Byte]], [[scala.Short]], and [[scala.Char]]. + | * - The ''integer types'' include the subrange types as well as [[scala.Int]] and [[scala.Long]]. + | * - The ''floating point types'' are [[scala.Float]] and [[scala.Double]]. + | * + | * Prior to Scala 2.10, `AnyVal` was a sealed trait. Beginning with Scala 2.10, + | * however, it is possible to define a subclass of `AnyVal` called a ''user-defined value class'' + | * which is treated specially by the compiler. Properly-defined user value classes provide a way + | * to improve performance on user-defined types by avoiding object allocation at runtime, and by + | * replacing virtual method invocations with static method invocations. + | * + | * User-defined value classes which avoid object allocation... + | * + | * - must have a single `val` parameter that is the underlying runtime representation. + | * - can define `def`s, but no `val`s, `var`s, or nested `traits`s, `class`es or `object`s. + | * - typically extend no other trait apart from `AnyVal`. + | * - cannot be used in type tests or pattern matching. + | * - may not override `equals` or `hashCode` methods. + | * + | * A minimal example: + | * {{{ + | * class Wrapper(val underlying: Int) extends AnyVal { + | * def foo: Wrapper = new Wrapper(underlying * 19) + | * } + | * }}} + | * + | * It's important to note that user-defined value classes are limited, and in some circumstances, + | * still must allocate a value class instance at runtime. These limitations and circumstances are + | * explained in greater detail in the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]]. + | */ + """.stripMargin) + + add(NullClass, + """/** `Null` is - together with [[scala.Nothing]] - at the bottom of the Scala type hierarchy. + | * + | * `Null` is the type of the `null` literal. It is a subtype of every type + | * except those of value classes. Value classes are subclasses of [[AnyVal]], which includes + | * primitive types such as [[Int]], [[Boolean]], and user-defined value classes. + | * + | * Since `Null` is not a subtype of value types, `null` is not a member of any such type. + | * For instance, it is not possible to assign `null` to a variable of type [[scala.Int]]. + | */ + """.stripMargin) + + add(NothingClass, + """/** `Nothing` is - together with [[scala.Null]] - at the bottom of Scala's type hierarchy. + | * + | * `Nothing` is a subtype of every other type (including [[scala.Null]]); there exist + | * ''no instances'' of this type. Although type `Nothing` is uninhabited, it is + | * nevertheless useful in several ways. For instance, the Scala library defines a value + | * [[scala.collection.immutable.Nil]] of type `List[Nothing]`. Because lists are covariant in Scala, + | * this makes [[scala.collection.immutable.Nil]] an instance of `List[T]`, for any element of type `T`. + | * + | * Another usage for Nothing is the return type for methods which never return normally. + | * One example is method error in [[scala.sys]], which always throws an exception. + | */ + """.stripMargin) + + add(SingletonClass, + """/** `Singleton` is used by the compiler as a supertype for singleton types. This includes literal types, + | * as they are also singleton types. + | * + | * {{{ + | * scala> object A { val x = 42 } + | * defined object A + | * + | * scala> implicitly[A.type <:< Singleton] + | * res12: A.type <:< Singleton = generalized constraint + | * + | * scala> implicitly[A.x.type <:< Singleton] + | * res13: A.x.type <:< Singleton = generalized constraint + | * + | * scala> implicitly[42 <:< Singleton] + | * res14: 42 <:< Singleton = generalized constraint + | * + | * scala> implicitly[Int <:< Singleton] + | * ^ + | * error: Cannot prove that Int <:< Singleton. + | * }}} + | * + | * `Singleton` has a special meaning when it appears as an upper bound on a formal type + | * parameter. Normally, type inference in Scala widens singleton types to the underlying + | * non-singleton type. When a type parameter has an explicit upper bound of `Singleton`, + | * the compiler infers a singleton type. + | * + | * {{{ + | * scala> def check42[T](x: T)(implicit ev: T =:= 42): T = x + | * check42: [T](x: T)(implicit ev: T =:= 42)T + | * + | * scala> val x1 = check42(42) + | * ^ + | * error: Cannot prove that Int =:= 42. + | * + | * scala> def singleCheck42[T <: Singleton](x: T)(implicit ev: T =:= 42): T = x + | * singleCheck42: [T <: Singleton](x: T)(implicit ev: T =:= 42)T + | * + | * scala> val x2 = singleCheck42(42) + | * x2: Int = 42 + | * }}} + | * + | * See also [[https://docs.scala-lang.org/sips/42.type.html SIP-23 about Literal-based Singleton Types]]. + | */ + """.stripMargin) +} diff --git a/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala b/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala new file mode 100644 index 000000000000..6690cae3a142 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala @@ -0,0 +1,82 @@ +package dotty.tools.dotc +package core + +import Periods._ +import SymDenotations._ +import Contexts._ +import Types._ +import Symbols._ +import Denotations._ +import Phases._ + +object DenotTransformers { + + /** A transformer group contains a sequence of transformers, + * ordered by the phase where they apply. Transformers are added + * to a group via `install`. + */ + + /** A transformer transforms denotations at a given phase */ + trait DenotTransformer extends Phase { + + /** The last phase during which the transformed denotations are valid */ + def lastPhaseId(using Context): Int = ctx.base.nextDenotTransformerId(id + 1) + + /** The validity period of the transformed denotations in the given context */ + def validFor(using Context): Period = + Period(ctx.runId, id + 1, lastPhaseId) + + /** The transformation method */ + def transform(ref: SingleDenotation)(using Context): SingleDenotation + } + + /** A transformer that only transforms the info field of denotations */ + trait InfoTransformer extends DenotTransformer { + + def transformInfo(tp: Type, sym: Symbol)(using Context): Type + + def transform(ref: SingleDenotation)(using Context): SingleDenotation = { + val sym = ref.symbol + if (sym.exists && !infoMayChange(sym)) ref + else { + val info1 = transformInfo(ref.info, ref.symbol) + if (info1 eq ref.info) ref + else ref match { + case ref: SymDenotation => + ref.copySymDenotation(info = info1).copyCaches(ref, ctx.phase.next) + case _ => + ref.derivedSingleDenotation(ref.symbol, info1) + } + } + } + + /** Denotations with a symbol where `infoMayChange` is false are guaranteed to be + * unaffected by this transform, so `transformInfo` need not be run. This + * can save time, and more importantly, can help avoid forcing symbol completers. + */ + protected def infoMayChange(sym: Symbol)(using Context): Boolean = true + } + + /** A transformer that only transforms SymDenotations. + * Note: Infos of non-sym denotations are left as is. So the transformer should + * be used before erasure only if this is not a problem. After erasure, all + * denotations are SymDenotations, so SymTransformers can be used freely. + */ + trait SymTransformer extends DenotTransformer { + + def transformSym(sym: SymDenotation)(using Context): SymDenotation + + def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref match { + case ref: SymDenotation => transformSym(ref) + case _ => ref + } + } + + /** A `DenotTransformer` trait that has the identity as its `transform` method. + * You might want to inherit from this trait so that new denotations can be + * installed using `installAfter` and `enteredAfter` at the end of the phase. + */ + trait IdentityDenotTransformer extends DenotTransformer { + def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala new file mode 100644 index 000000000000..5d99118e56af --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala @@ -0,0 +1,1375 @@ +package dotty.tools +package dotc +package core + +import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, LazyType, stillValid, acceptStale, traceInvalid } +import Contexts._ +import Names._ +import NameKinds._ +import StdNames._ +import Symbols.NoSymbol +import Symbols._ +import Types._ +import Periods._ +import Flags._ +import DenotTransformers._ +import Decorators._ +import Signature.MatchDegree._ +import printing.Texts._ +import printing.Printer +import io.AbstractFile +import config.Config +import config.Printers.overload +import util.common._ +import typer.ProtoTypes.NoViewsAllowed +import collection.mutable.ListBuffer +import language.experimental.pureFunctions + +/** Denotations represent the meaning of symbols and named types. + * The following diagram shows how the principal types of denotations + * and their denoting entities relate to each other. Lines ending in + * a down-arrow `v` are member methods. The two methods shown in the diagram are + * "symbol" and "deref". Both methods are parameterized by the current context, + * and are effectively indexed by current period. + * + * Lines ending in a horizontal line mean subtyping (right is a subtype of left). + * + * NamedType + * | Symbol---------ClassSymbol + * | | | + * | denot | denot | denot + * v v v + * Denotation-+-----SingleDenotation-+------SymDenotation-+----ClassDenotation + * | | + * +-----MultiDenotation | + * | + * +--UniqueRefDenotation + * +--JointRefDenotation + * + * Here's a short summary of the classes in this diagram. + * + * NamedType A type consisting of a prefix type and a name, with fields + * prefix: Type + * name: Name + * It has two subtypes: TermRef and TypeRef + * Symbol A label for a definition or declaration in one compiler run + * ClassSymbol A symbol representing a class + * Denotation The meaning of a named type or symbol during a period + * MultiDenotation A denotation representing several overloaded members + * SingleDenotation A denotation representing a non-overloaded member or definition, with main fields + * symbol: Symbol + * info: Type + * UniqueRefDenotation A denotation referring to a single definition with some member type + * JointRefDenotation A denotation referring to a member that could resolve to several definitions + * SymDenotation A denotation representing a single definition with its original type, with main fields + * name: Name + * owner: Symbol + * flags: Flags + * privateWithin: Symbol + * annotations: List[Annotation] + * ClassDenotation A denotation representing a single class definition. + */ +object Denotations { + + implicit def eqDenotation: CanEqual[Denotation, Denotation] = CanEqual.derived + + /** A PreDenotation represents a group of single denotations or a single multi-denotation + * It is used as an optimization to avoid forming MultiDenotations too eagerly. + */ + abstract class PreDenotation extends caps.Pure { + + /** A denotation in the group exists */ + def exists: Boolean + + /** First/last denotation in the group */ + def first: Denotation + def last: Denotation + + /** Convert to full denotation by &-ing all elements */ + def toDenot(pre: Type)(using Context): Denotation + + /** Group contains a denotation that refers to given symbol */ + def containsSym(sym: Symbol): Boolean + + /** Group contains a denotation with the same signature as `other` */ + def matches(other: SingleDenotation)(using Context): Boolean + + /** Keep only those denotations in this group which satisfy predicate `p`. */ + def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation + + /** Keep only those denotations in this group which have a signature + * that's not already defined by `denots`. + */ + def filterDisjoint(denots: PreDenotation)(using Context): PreDenotation + + /** Keep only those inherited members M of this predenotation for which the following is true + * - M is not marked Private + * - If M has a unique symbol, it does not appear in `prevDenots`. + * - M's signature as seen from prefix `pre` does not appear in `ownDenots` + * Return the denotation as seen from `pre`. + * Called from SymDenotations.computeMember. There, `ownDenots` are the denotations found in + * the base class, which shadow any inherited denotations with the same signature. + * `prevDenots` are the denotations that are defined in the class or inherited from + * a base type which comes earlier in the linearization. + */ + def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(using Context): PreDenotation + + /** Keep only those denotations in this group that have all of the flags in `required`, + * but none of the flags in `excluded`. + */ + def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): PreDenotation + + /** Map `f` over all single denotations and aggregate the results with `g`. */ + def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T + + private var cachedPrefix: Type = _ + private var cachedAsSeenFrom: AsSeenFromResult = _ + private var validAsSeenFrom: Period = Nowhere + + type AsSeenFromResult <: PreDenotation + + /** The denotation with info(s) as seen from prefix type */ + def asSeenFrom(pre: Type)(using Context): AsSeenFromResult = + if (Config.cacheAsSeenFrom) { + if ((cachedPrefix ne pre) || ctx.period != validAsSeenFrom) { + cachedAsSeenFrom = computeAsSeenFrom(pre) + cachedPrefix = pre + validAsSeenFrom = if (pre.isProvisional) Nowhere else ctx.period + } + cachedAsSeenFrom + } + else computeAsSeenFrom(pre) + + protected def computeAsSeenFrom(pre: Type)(using Context): AsSeenFromResult + + /** The union of two groups. */ + def union(that: PreDenotation): PreDenotation = + if (!this.exists) that + else if (!that.exists) this + else DenotUnion(this, that) + } + + /** A denotation is the result of resolving + * a name (either simple identifier or select) during a given period. + * + * Denotations can be combined with `&` and `|`. + * & is conjunction, | is disjunction. + * + * `&` will create an overloaded denotation from two + * non-overloaded denotations if their signatures differ. + * Analogously `|` of two denotations with different signatures will give + * an empty denotation `NoDenotation`. + * + * A denotation might refer to `NoSymbol`. This is the case if the denotation + * was produced from a disjunction of two denotations with different symbols + * and there was no common symbol in a superclass that could substitute for + * both symbols. Here is an example: + * + * Say, we have: + * + * class A { def f: A } + * class B { def f: B } + * val x: A | B = if (test) new A else new B + * val y = x.f + * + * Then the denotation of `y` is `SingleDenotation(NoSymbol, A | B)`. + * + * @param symbol The referencing symbol, or NoSymbol is none exists + */ + abstract class Denotation(val symbol: Symbol, protected var myInfo: Type) extends PreDenotation with printing.Showable { + type AsSeenFromResult <: Denotation + + /** The type info. + * The info is an instance of TypeType iff this is a type denotation + * Uncompleted denotations set myInfo to a LazyType. + */ + final def info(using Context): Type = { + def completeInfo = { // Written this way so that `info` is small enough to be inlined + this.asInstanceOf[SymDenotation].completeFrom(myInfo.asInstanceOf[LazyType]); info + } + if (myInfo.isInstanceOf[LazyType]) completeInfo else myInfo + } + + /** The type info, or, if this is a SymDenotation where the symbol + * is not yet completed, the completer + */ + def infoOrCompleter: Type + + /** The period during which this denotation is valid. */ + def validFor: Period + + /** Is this a reference to a type symbol? */ + def isType: Boolean + + /** Is this a reference to a term symbol? */ + def isTerm: Boolean = !isType + + /** Is this denotation overloaded? */ + final def isOverloaded: Boolean = isInstanceOf[MultiDenotation] + + /** Denotation points to unique symbol; false for overloaded denotations + * and JointRef denotations. + */ + def hasUniqueSym: Boolean + + /** The name of the denotation */ + def name(using Context): Name + + /** The signature of the denotation. */ + def signature(using Context): Signature + + /** Resolve overloaded denotation to pick the ones with the given signature + * when seen from prefix `site`. + * @param relaxed When true, consider only parameter signatures for a match. + */ + def atSignature(sig: Signature, targetName: Name, site: Type = NoPrefix, relaxed: Boolean = false)(using Context): Denotation + + /** The variant of this denotation that's current in the given context. + * If no such denotation exists, returns the denotation with each alternative + * at its first point of definition. + */ + def current(using Context): Denotation + + /** Is this denotation different from NoDenotation or an ErrorDenotation? */ + def exists: Boolean = true + + /** A denotation with the info of this denotation transformed using `f` */ + def mapInfo(f: Type => Type)(using Context): Denotation + + /** If this denotation does not exist, fallback to alternative */ + inline def orElse(inline that: Denotation): Denotation = if (this.exists) this else that + + /** The set of alternative single-denotations making up this denotation */ + final def alternatives: List[SingleDenotation] = altsWith(alwaysTrue) + + /** The alternatives of this denotation that satisfy the predicate `p`. */ + def altsWith(p: Symbol => Boolean): List[SingleDenotation] + + /** The unique alternative of this denotation that satisfies the predicate `p`, + * or NoDenotation if no satisfying alternative exists. + * @throws TypeError if there is at more than one alternative that satisfies `p`. + */ + def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation + + override def filterWithPredicate(p: SingleDenotation => Boolean): Denotation + + /** If this is a SingleDenotation, return it, otherwise throw a TypeError */ + def checkUnique(using Context): SingleDenotation = suchThat(alwaysTrue) + + /** Does this denotation have an alternative that satisfies the predicate `p`? */ + def hasAltWith(p: SingleDenotation => Boolean): Boolean + + /** The denotation made up from the alternatives of this denotation that + * are accessible from prefix `pre`, or NoDenotation if no accessible alternative exists. + */ + def accessibleFrom(pre: Type, superAccess: Boolean = false)(using Context): Denotation + + /** Find member of this denotation with given `name`, all `required` + * flags and no `excluded` flag, and produce a denotation that contains the type of the member + * as seen from given prefix `pre`. + */ + def findMember(name: Name, pre: Type, required: FlagSet, excluded: FlagSet)(using Context): Denotation = + info.findMember(name, pre, required, excluded) + + /** If this denotation is overloaded, filter with given predicate. + * If result is still overloaded throw a TypeError. + * Note: disambiguate is slightly different from suchThat in that + * single-denotations that do not satisfy the predicate are left alone + * (whereas suchThat would map them to NoDenotation). + */ + inline def disambiguate(inline p: Symbol => Boolean)(using Context): SingleDenotation = this match { + case sdenot: SingleDenotation => sdenot + case mdenot => suchThat(p) orElse NoQualifyingRef(alternatives) + } + + /** Return symbol in this denotation that satisfies the given predicate. + * if generateStubs is specified, return a stubsymbol if denotation is a missing ref. + * Throw a `TypeError` if predicate fails to disambiguate symbol or no alternative matches. + */ + def requiredSymbol(kind: String, + name: Name, + site: Denotation = NoDenotation, + args: List[Type] = Nil, + source: AbstractFile | Null = null, + generateStubs: Boolean = true) + (p: Symbol => Boolean) + (using Context): Symbol = + disambiguate(p) match { + case m @ MissingRef(ownerd, name) if generateStubs => + if ctx.settings.YdebugMissingRefs.value then m.ex.printStackTrace() + newStubSymbol(ownerd.symbol, name, source) + case NoDenotation | _: NoQualifyingRef | _: MissingRef => + def argStr = if (args.isEmpty) "" else i" matching ($args%, %)" + val msg = + if (site.exists) i"$site does not have a member $kind $name$argStr" + else i"missing: $kind $name$argStr" + throw new TypeError(msg) + case denot => + denot.symbol + } + + def requiredMethod(pname: PreName)(using Context): TermSymbol = { + val name = pname.toTermName + info.member(name).requiredSymbol("method", name, this)(_.is(Method)).asTerm + } + def requiredMethodRef(name: PreName)(using Context): TermRef = + requiredMethod(name).termRef + + def requiredMethod(pname: PreName, argTypes: List[Type])(using Context): TermSymbol = { + val name = pname.toTermName + info.member(name).requiredSymbol("method", name, this, argTypes) { x => + x.is(Method) && { + x.info.paramInfoss match { + case paramInfos :: Nil => paramInfos.corresponds(argTypes)(_ =:= _) + case _ => false + } + } + }.asTerm + } + def requiredMethodRef(name: PreName, argTypes: List[Type])(using Context): TermRef = + requiredMethod(name, argTypes).termRef + + def requiredValue(pname: PreName)(using Context): TermSymbol = { + val name = pname.toTermName + info.member(name).requiredSymbol("field or getter", name, this)(_.info.isParameterless).asTerm + } + def requiredValueRef(name: PreName)(using Context): TermRef = + requiredValue(name).termRef + + def requiredClass(pname: PreName)(using Context): ClassSymbol = { + val name = pname.toTypeName + info.member(name).requiredSymbol("class", name, this)(_.isClass).asClass + } + + def requiredType(pname: PreName)(using Context): TypeSymbol = { + val name = pname.toTypeName + info.member(name).requiredSymbol("type", name, this)(_.isType).asType + } + + /** The alternative of this denotation that has a type matching `targetType` when seen + * as a member of type `site` and that has a target name matching `targetName`, or + * `NoDenotation` if none exists. + */ + def matchingDenotation(site: Type, targetType: Type, targetName: Name)(using Context): SingleDenotation = { + def qualifies(sym: Symbol) = + site.memberInfo(sym).matchesLoosely(targetType) && sym.hasTargetName(targetName) + if (isOverloaded) + atSignature(targetType.signature, targetName, site, relaxed = true) match { + case sd: SingleDenotation => sd.matchingDenotation(site, targetType, targetName) + case md => md.suchThat(qualifies(_)) + } + else if (exists && !qualifies(symbol)) NoDenotation + else asSingleDenotation + } + + /** Form a denotation by conjoining with denotation `that`. + * + * NoDenotations are dropped. MultiDenotations are handled by merging + * parts with same signatures. SingleDenotations with equal signatures + * are joined by following this sequence of steps: + * + * 1. If exactly one the denotations has an inaccessible symbol, pick the other one. + * 2. Otherwise, if one of the infos overrides the other one, and the associated + * symbol does not score strictly lower than the other one, + * pick the associated denotation. + * 3. Otherwise, if the two infos can be combined with `infoMeet`, pick that as + * result info, and pick the symbol that scores higher as result symbol, + * or pick `sym1` as a tie breaker. The picked info and symbol are combined + * in a JointDenotation. + * 4. Otherwise, if one of the two symbols scores strongly higher than the + * other one, pick the associated denotation. + * 5. Otherwise return a multi-denotation consisting of both denotations. + * + * Symbol scoring is determined according to the following ranking + * where earlier criteria trump later ones. Cases marked with (*) + * give a strong score advantage, the others a weak one. + * + * 1. The symbol exists, and the other one does not. (*) + * 2. The symbol is not a bridge, but the other one is. (*) + * 3. The symbol is concrete, and the other one is deferred + * 4. The symbol appears before the other in the linearization of `pre` + * 5. The symbol's visibility is strictly greater than the other one's. + * 6. The symbol is a method, but the other one is not. + */ + def meet(that: Denotation, pre: Type, safeIntersection: Boolean = false)(using Context): Denotation = { + /** Try to merge denot1 and denot2 without adding a new signature. */ + def mergeDenot(denot1: Denotation, denot2: SingleDenotation): Denotation = denot1 match { + case denot1 @ MultiDenotation(denot11, denot12) => + val d1 = mergeDenot(denot11, denot2) + if (d1.exists) denot1.derivedUnionDenotation(d1, denot12) + else { + val d2 = mergeDenot(denot12, denot2) + if (d2.exists) denot1.derivedUnionDenotation(denot11, d2) + else NoDenotation + } + case denot1: SingleDenotation => + if (denot1 eq denot2) denot1 + else if denot1.matches(denot2) then mergeSingleDenot(denot1, denot2) + else NoDenotation + } + + /** Try to merge single-denotations. */ + def mergeSingleDenot(denot1: SingleDenotation, denot2: SingleDenotation): Denotation = + val info1 = denot1.info + val info2 = denot2.info + val sym1 = denot1.symbol + val sym2 = denot2.symbol + + /** Does `owner1` come before `owner2` in the linearization of `pre`? */ + def linearScore(owner1: Symbol, owner2: Symbol): Int = + + def searchBaseClasses(bcs: List[ClassSymbol]): Int = bcs match + case bc :: bcs1 => + if bc eq owner1 then 1 + else if bc eq owner2 then -1 + else searchBaseClasses(bcs1) + case Nil => 0 + + if owner1 eq owner2 then 0 + else if owner1.derivesFrom(owner2) then 1 + else if owner2.derivesFrom(owner1) then -1 + else searchBaseClasses(pre.baseClasses) + end linearScore + + /** Similar to SymDenotation#accessBoundary, but without the special cases. */ + def accessBoundary(sym: Symbol) = + if (sym.is(Private)) sym.owner + else sym.privateWithin.orElse( + if (sym.is(Protected)) sym.owner.enclosingPackageClass + else defn.RootClass) + + def isHidden(sym: Symbol) = sym.exists && !sym.isAccessibleFrom(pre) + // In typer phase filter out denotations with symbols that are not + // accessible. After typer, this is not possible since we cannot guarantee + // that the current owner is set correctly. See pos/14660.scala. + val hidden1 = isHidden(sym1) && ctx.isTyper + val hidden2 = isHidden(sym2) && ctx.isTyper + if hidden1 && !hidden2 then denot2 + else if hidden2 && !hidden1 then denot1 + else + // The score that determines which symbol to pick for the result denotation. + // A value > 0 means pick `sym1`, < 0 means pick `sym2`. + // A value of +/- 2 means pick one of the denotations as a tie-breaker + // if a common info does not exist. + val symScore: Int = + if !sym1.exists then -2 + else if !sym2.exists then 2 + else if sym1.is(Bridge) && !sym2.is(Bridge) then -2 + else if sym2.is(Bridge) && !sym1.is(Bridge) then 2 + else if !sym1.isAsConcrete(sym2) then -1 + else if !sym2.isAsConcrete(sym1) then 1 + else + val linScore = linearScore(sym1.owner, sym2.owner) + if linScore != 0 then linScore + else + val boundary1 = accessBoundary(sym1) + val boundary2 = accessBoundary(sym2) + if boundary1.isProperlyContainedIn(boundary2) then -1 + else if boundary2.isProperlyContainedIn(boundary1) then 1 + else if sym2.is(Method) && !sym1.is(Method) then -1 + else if sym1.is(Method) && !sym2.is(Method) then 1 + else 0 + + val relaxedOverriding = ctx.explicitNulls && (sym1.is(JavaDefined) || sym2.is(JavaDefined)) + val matchLoosely = sym1.matchNullaryLoosely || sym2.matchNullaryLoosely + + if symScore <= 0 && info2.overrides(info1, relaxedOverriding, matchLoosely, checkClassInfo = false) then + denot2 + else if symScore >= 0 && info1.overrides(info2, relaxedOverriding, matchLoosely, checkClassInfo = false) then + denot1 + else + val jointInfo = infoMeet(info1, info2, safeIntersection) + if jointInfo.exists then + val sym = if symScore >= 0 then sym1 else sym2 + JointRefDenotation(sym, jointInfo, denot1.validFor & denot2.validFor, pre, denot1.isRefinedMethod || denot2.isRefinedMethod) + else if symScore == 2 then denot1 + else if symScore == -2 then denot2 + else + overload.println(i"overloaded with same signature: ${sym1.showLocated}: $info1 / ${sym2.showLocated}: $info2, info = ${info1.getClass}, ${info2.getClass}, $jointInfo") + MultiDenotation(denot1, denot2) + end mergeSingleDenot + + if (this eq that) this + else if (!this.exists) that + else if (!that.exists) this + else that match { + case that: SingleDenotation => + val r = mergeDenot(this, that) + if (r.exists) r else MultiDenotation(this, that) + case that @ MultiDenotation(denot1, denot2) => + this.meet(denot1, pre).meet(denot2, pre) + } + } + + final def asSingleDenotation: SingleDenotation = asInstanceOf[SingleDenotation] + final def asSymDenotation: SymDenotation = asInstanceOf[SymDenotation] + + def toText(printer: Printer): Text = printer.toText(this) + + // ------ PreDenotation ops ---------------------------------------------- + + final def toDenot(pre: Type)(using Context): Denotation = this + final def containsSym(sym: Symbol): Boolean = hasUniqueSym && (symbol eq sym) + } + + // ------ Info meets ---------------------------------------------------- + + /** Merge parameter names of lambda types. If names in corresponding positions match, keep them, + * otherwise generate new synthetic names. + */ + private def mergeParamNames(tp1: LambdaType, tp2: LambdaType): List[tp1.ThisName] = + (for ((name1, name2, idx) <- tp1.paramNames.lazyZip(tp2.paramNames).lazyZip(tp1.paramNames.indices)) + yield if (name1 == name2) name1 else tp1.companion.syntheticParamName(idx)).toList + + /** Normally, `tp1 & tp2`, with extra care taken to return `tp1` or `tp2` directly if that's + * a valid answer. Special cases for matching methods and classes, with + * the possibility of returning NoType. Special handling of ExprTypes, where mixed + * intersections widen the ExprType away. + */ + def infoMeet(tp1: Type, tp2: Type, safeIntersection: Boolean)(using Context): Type = + if tp1 eq tp2 then tp1 + else tp1 match + case tp1: TypeBounds => + tp2 match + case tp2: TypeBounds => if safeIntersection then tp1 safe_& tp2 else tp1 & tp2 + case tp2: ClassInfo => tp2 + case _ => NoType + case tp1: ClassInfo => + tp2 match + case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix & tp2.prefix) + case tp2: TypeBounds => tp1 + case _ => NoType + case tp1: MethodType => + tp2 match + case tp2: MethodType + if TypeComparer.matchingMethodParams(tp1, tp2) + && tp1.isImplicitMethod == tp2.isImplicitMethod + && tp1.isErasedMethod == tp2.isErasedMethod => + val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) + if resType.exists then + tp1.derivedLambdaType(mergeParamNames(tp1, tp2), tp1.paramInfos, resType) + else NoType + case _ => NoType + case tp1: PolyType => + tp2 match + case tp2: PolyType if tp1.paramNames.hasSameLengthAs(tp2.paramNames) => + val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) + if resType.exists then + tp1.derivedLambdaType( + mergeParamNames(tp1, tp2), + tp1.paramInfos.zipWithConserve(tp2.paramInfos)( _ & _ ), + resType) + else NoType + case _ => NoType + case ExprType(rtp1) => + tp2 match + case ExprType(rtp2) => ExprType(rtp1 & rtp2) + case _ => infoMeet(rtp1, tp2, safeIntersection) + case _ => + tp2 match + case _: MethodType | _: PolyType => NoType + case _ => tp1 & tp2.widenExpr + end infoMeet + + /** A non-overloaded denotation */ + abstract class SingleDenotation(symbol: Symbol, initInfo: Type) extends Denotation(symbol, initInfo) { + protected def newLikeThis(symbol: Symbol, info: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation + + final def name(using Context): Name = symbol.name + + /** For SymDenotation, this is NoPrefix. For other denotations this is the prefix + * under which the denotation was constructed. + * + * Note that `asSeenFrom` might return a `SymDenotation` and therefore in + * general one cannot rely on `prefix` being set, see + * `Config.reuseSymDenotations` for details. + */ + def prefix: Type = NoPrefix + + /** True if the info of this denotation comes from a refinement. */ + def isRefinedMethod: Boolean = false + + /** For SymDenotations, the language-specific signature of the info, depending on + * where the symbol is defined. For non-SymDenotations, the Scala 3 + * signature. + * + * Invariants: + * - Before erasure, the signature of a denotation is always equal to the + * signature of its corresponding initial denotation. + * - Two distinct overloads will have SymDenotations with distinct + * signatures (the SELECTin tag in Tasty relies on this to refer to an + * overload unambiguously). Note that this only applies to + * SymDenotations, in general we cannot assume that distinct + * SingleDenotations will have distinct signatures (cf #9050). + */ + final def signature(using Context): Signature = + signature(sourceLanguage = if isType || !this.isInstanceOf[SymDenotation] then SourceLanguage.Scala3 else SourceLanguage(symbol)) + + /** Overload of `signature` which lets the caller pick the language used + * to compute the signature of the info. Useful to match denotations defined in + * different classes (see `matchesLoosely`). + */ + def signature(sourceLanguage: SourceLanguage)(using Context): Signature = + if (isType) Signature.NotAMethod // don't force info if this is a type denotation + else info match { + case info: MethodOrPoly => + try info.signature(sourceLanguage) + catch { // !!! DEBUG + case scala.util.control.NonFatal(ex) => + report.echo(s"cannot take signature of $info") + throw ex + } + case _ => Signature.NotAMethod + } + + def derivedSingleDenotation(symbol: Symbol, info: Type, pre: Type = this.prefix, isRefinedMethod: Boolean = this.isRefinedMethod)(using Context): SingleDenotation = + if ((symbol eq this.symbol) && (info eq this.info) && (pre eq this.prefix) && (isRefinedMethod == this.isRefinedMethod)) this + else newLikeThis(symbol, info, pre, isRefinedMethod) + + def mapInfo(f: Type => Type)(using Context): SingleDenotation = + derivedSingleDenotation(symbol, f(info)) + + inline def orElse(inline that: SingleDenotation): SingleDenotation = if (this.exists) this else that + + def altsWith(p: Symbol => Boolean): List[SingleDenotation] = + if (exists && p(symbol)) this :: Nil else Nil + + def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation = + if (exists && p(symbol)) this else NoDenotation + + def hasAltWith(p: SingleDenotation => Boolean): Boolean = + exists && p(this) + + def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = + if (!symbol.exists || symbol.isAccessibleFrom(pre, superAccess)) this else NoDenotation + + def atSignature(sig: Signature, targetName: Name, site: Type, relaxed: Boolean)(using Context): SingleDenotation = + val situated = if site == NoPrefix then this else asSeenFrom(site) + val sigMatches = sig.matchDegree(situated.signature) match + case FullMatch => + true + case MethodNotAMethodMatch => + // See comment in `matches` + relaxed && !symbol.is(JavaDefined) + case ParamMatch => + relaxed + case noMatch => + false + if sigMatches && symbol.hasTargetName(targetName) then this else NoDenotation + + def matchesImportBound(bound: Type)(using Context): Boolean = + if bound.isRef(defn.NothingClass) then false + else if bound.isAny then true + else NoViewsAllowed.normalizedCompatible(info, bound, keepConstraint = false) + + // ------ Transformations ----------------------------------------- + + private var myValidFor: Period = Nowhere + + def validFor: Period = myValidFor + def validFor_=(p: Period): Unit = { + myValidFor = p + symbol.invalidateDenotCache() + } + + /** The next SingleDenotation in this run, with wrap-around from last to first. + * + * There may be several `SingleDenotation`s with different validity + * representing the same underlying definition at different phases. + * These are called a "flock". Flock members are generated by + * @See current. Flock members are connected in a ring + * with their `nextInRun` fields. + * + * There are the following invariants concerning flock members + * + * 1) validity periods are non-overlapping + * 2) the union of all validity periods is a contiguous + * interval. + */ + protected var nextInRun: SingleDenotation = this + + /** The version of this SingleDenotation that was valid in the first phase + * of this run. + */ + def initial: SingleDenotation = + if (validFor.firstPhaseId <= 1) this + else { + var current = nextInRun + while (current.validFor.code > this.myValidFor.code) current = current.nextInRun + current + } + + def history: List[SingleDenotation] = { + val b = new ListBuffer[SingleDenotation] + var current = initial + while ({ + b += (current) + current = current.nextInRun + current ne initial + }) + () + b.toList + } + + /** Invalidate all caches and fields that depend on base classes and their contents */ + def invalidateInheritedInfo(): Unit = () + + private def updateValidity()(using Context): this.type = { + assert( + ctx.runId >= validFor.runId + || ctx.settings.YtestPickler.value // mixing test pickler with debug printing can travel back in time + || ctx.mode.is(Mode.Printing) // no use to be picky when printing error messages + || symbol.isOneOf(ValidForeverFlags), + s"denotation $this invalid in run ${ctx.runId}. ValidFor: $validFor") + var d: SingleDenotation = this + while ({ + d.validFor = Period(ctx.runId, d.validFor.firstPhaseId, d.validFor.lastPhaseId) + d.invalidateInheritedInfo() + d = d.nextInRun + d ne this + }) + () + this + } + + /** Move validity period of this denotation to a new run. Throw a StaleSymbol error + * if denotation is no longer valid. + * However, StaleSymbol error is not thrown in the following situations: + * + * - If acceptStale returns true (e.g. because we are in the IDE), + * update the symbol to the new version if it exists, or return + * the old version otherwise. + * - If the symbol did not have a denotation that was defined at the current phase + * return a NoDenotation instead. + */ + private def bringForward()(using Context): SingleDenotation = { + this match { + case symd: SymDenotation => + if (stillValid(symd)) return updateValidity() + if acceptStale(symd) && symd.initial.validFor.firstPhaseId <= ctx.lastPhaseId then + // New run might have fewer phases than old, so symbol might no longer be + // visible at all. TabCompleteTests have examples where this happens. + return symd.currentSymbol.denot.orElse(symd).updateValidity() + case _ => + } + if (!symbol.exists) return updateValidity() + if (!coveredInterval.containsPhaseId(ctx.phaseId)) return NoDenotation + if (ctx.debug) traceInvalid(this) + staleSymbolError + } + + /** The next defined denotation (following `nextInRun`) or an arbitrary + * undefined denotation, if all denotations in a `nextinRun` cycle are + * undefined. + */ + private def nextDefined: SingleDenotation = { + var p1 = this + var p2 = nextInRun + while (p1.validFor == Nowhere && (p1 ne p2)) { + p1 = p1.nextInRun + p2 = p2.nextInRun.nextInRun + } + p1 + } + + /** Skip any denotations that have been removed by an installAfter or that + * are otherwise undefined. + */ + def skipRemoved(using Context): SingleDenotation = + if (myValidFor.code <= 0) nextDefined else this + + /** Produce a denotation that is valid for the given context. + * Usually called when !(validFor contains ctx.period) + * (even though this is not a precondition). + * If the runId of the context is the same as runId of this denotation, + * the right flock member is located, or, if it does not exist yet, + * created by invoking a transformer (@See Transformers). + * If the runId's differ, but this denotation is a SymDenotation + * and its toplevel owner class or module + * is still a member of its enclosing package, then the whole flock + * is brought forward to be valid in the new runId. Otherwise + * the symbol is stale, which constitutes an internal error. + */ + def current(using Context): SingleDenotation = + util.Stats.record("current") + val currentPeriod = ctx.period + val valid = myValidFor + + def assertNotPackage(d: SingleDenotation, transformer: DenotTransformer) = d match + case d: ClassDenotation => + assert(!d.is(Package), s"illegal transformation of package denotation by transformer $transformer") + case _ => + + def escapeToNext = nextDefined.ensuring(_.validFor != Nowhere) + + def toNewRun = + util.Stats.record("current.bringForward") + if exists then initial.bringForward().current else this + + def goForward = + var cur = this + // search for containing period as long as nextInRun increases. + var next = nextInRun + while next.validFor.code > valid.code && !(next.validFor contains currentPeriod) do + cur = next + next = next.nextInRun + if next.validFor.code > valid.code then + // in this case, next.validFor contains currentPeriod + cur = next + cur + else + //println(s"might need new denot for $cur, valid for ${cur.validFor} at $currentPeriod") + // not found, cur points to highest existing variant + val nextTransformerId = ctx.base.nextDenotTransformerId(cur.validFor.lastPhaseId) + if currentPeriod.lastPhaseId <= nextTransformerId then + cur.validFor = Period(currentPeriod.runId, cur.validFor.firstPhaseId, nextTransformerId) + else + var startPid = nextTransformerId + 1 + val transformer = ctx.base.denotTransformers(nextTransformerId) + //println(s"transforming $this with $transformer") + val savedPeriod = ctx.period + val mutCtx = ctx.asInstanceOf[FreshContext] + try + mutCtx.setPhase(transformer) + next = transformer.transform(cur) + // We temporarily update the context with the new phase instead of creating a + // new one. This is done for performance. We cut down on about 30% of context + // creations that way, and also avoid phase caches in contexts to get large. + // To work correctly, we need to demand that the context with the new phase + // is not retained in the result. + catch case ex: CyclicReference => + // println(s"error while transforming $this") + throw ex + finally + mutCtx.setPeriod(savedPeriod) + if next eq cur then + startPid = cur.validFor.firstPhaseId + else + assertNotPackage(next, transformer) + next.insertAfter(cur) + cur = next + cur.validFor = Period(currentPeriod.runId, startPid, transformer.lastPhaseId) + //printPeriods(cur) + //println(s"new denot: $cur, valid for ${cur.validFor}") + cur.current // multiple transformations could be required + end goForward + + def goBack: SingleDenotation = + // currentPeriod < end of valid; in this case a version must exist + // but to be defensive we check for infinite loop anyway + var cur = this + var cnt = 0 + while !(cur.validFor contains currentPeriod) do + //println(s"searching: $cur at $currentPeriod, valid for ${cur.validFor}") + cur = cur.nextInRun + // Note: One might be tempted to add a `prev` field to get to the new denotation + // more directly here. I tried that, but it degrades rather than improves + // performance: Test setup: Compile everything in dotc and immediate subdirectories + // 10 times. Best out of 10: 18154ms with `prev` field, 17777ms without. + cnt += 1 + if cnt > MaxPossiblePhaseId then + return atPhase(coveredInterval.firstPhaseId)(current) + cur + end goBack + + if valid.code <= 0 then + // can happen if we sit on a stale denotation which has been replaced + // wholesale by an installAfter; in this case, proceed to the next + // denotation and try again. + escapeToNext + else if valid.runId != currentPeriod.runId then + toNewRun + else if currentPeriod.code > valid.code then + goForward + else + goBack + end current + + private def demandOutsideDefinedMsg(using Context): String = + s"demanding denotation of $this at phase ${ctx.phase}(${ctx.phaseId}) outside defined interval: defined periods are${definedPeriodsString}" + + /** Install this denotation to be the result of the given denotation transformer. + * This is the implementation of the same-named method in SymDenotations. + * It's placed here because it needs access to private fields of SingleDenotation. + * @pre Can only be called in `phase.next`. + */ + protected def installAfter(phase: DenotTransformer)(using Context): Unit = { + val targetId = phase.next.id + if (ctx.phaseId != targetId) atPhase(phase.next)(installAfter(phase)) + else { + val current = symbol.current + // println(s"installing $this after $phase/${phase.id}, valid = ${current.validFor}") + // printPeriods(current) + this.validFor = Period(ctx.runId, targetId, current.validFor.lastPhaseId) + if (current.validFor.firstPhaseId >= targetId) + current.replaceWith(this) + else { + current.validFor = Period(ctx.runId, current.validFor.firstPhaseId, targetId - 1) + insertAfter(current) + } + } + // printPeriods(this) + } + + /** Apply a transformation `f` to all denotations in this group that start at or after + * given phase. Denotations are replaced while keeping the same validity periods. + */ + protected def transformAfter(phase: DenotTransformer, f: SymDenotation => SymDenotation)(using Context): Unit = { + var current = symbol.current + while (current.validFor.firstPhaseId < phase.id && (current.nextInRun.validFor.code > current.validFor.code)) + current = current.nextInRun + var hasNext = true + while ((current.validFor.firstPhaseId >= phase.id) && hasNext) { + val current1: SingleDenotation = f(current.asSymDenotation) + if (current1 ne current) { + current1.validFor = current.validFor + current.replaceWith(current1) + } + hasNext = current1.nextInRun.validFor.code > current1.validFor.code + current = current1.nextInRun + } + } + + /** Insert this denotation so that it follows `prev`. */ + private def insertAfter(prev: SingleDenotation) = { + this.nextInRun = prev.nextInRun + prev.nextInRun = this + } + + /** Insert this denotation instead of `old`. + * Also ensure that `old` refers with `nextInRun` to this denotation + * and set its `validFor` field to `Nowhere`. This is necessary so that + * references to the old denotation can be brought forward via `current` + * to a valid denotation. + * + * The code to achieve this is subtle in that it works correctly + * whether the replaced denotation is the only one in its cycle or not. + */ + private[dotc] def replaceWith(newd: SingleDenotation): Unit = { + var prev = this + while (prev.nextInRun ne this) prev = prev.nextInRun + // order of next two assignments is important! + prev.nextInRun = newd + newd.nextInRun = nextInRun + validFor = Nowhere + nextInRun = newd + } + + def staleSymbolError(using Context): Nothing = + throw new StaleSymbol(staleSymbolMsg) + + def staleSymbolMsg(using Context): String = { + def ownerMsg = this match { + case denot: SymDenotation => s"in ${denot.owner}" + case _ => "" + } + s"stale symbol; $this#${symbol.id} $ownerMsg, defined in ${myValidFor}, is referred to in run ${ctx.period}" + } + + /** The period (interval of phases) for which there exists + * a valid denotation in this flock. + */ + def coveredInterval(using Context): Period = { + var cur = this + var cnt = 0 + var interval = validFor + while ({ + cur = cur.nextInRun + cnt += 1 + assert(cnt <= MaxPossiblePhaseId, demandOutsideDefinedMsg) + interval |= cur.validFor + cur ne this + }) + () + interval + } + + /** Show declaration string; useful for showing declarations + * as seen from subclasses. + */ + def showDcl(using Context): String = ctx.printer.dclText(this).show + + override def toString: String = + if (symbol == NoSymbol) symbol.toString + else s"" + + def definedPeriodsString: String = { + var sb = new StringBuilder() + var cur = this + var cnt = 0 + while ({ + sb.append(" " + cur.validFor) + cur = cur.nextInRun + cnt += 1 + if (cnt > MaxPossiblePhaseId) { sb.append(" ..."); cur = this } + cur ne this + }) + () + sb.toString + } + + // ------ PreDenotation ops ---------------------------------------------- + + final def first: SingleDenotation = this + final def last: SingleDenotation = this + + def matches(other: SingleDenotation)(using Context): Boolean = + symbol.hasTargetName(other.symbol.targetName) + && matchesLoosely(other) + + /** `matches` without a target name check. + * + * For definitions coming from different languages, we pick a common + * language to compute their signatures. This allows us for example to + * override some Java definitions from Scala even if they have a different + * erasure (see i8615b, i9109b), Erasure takes care of adding any necessary + * bridge to make this work at runtime. + */ + def matchesLoosely(other: SingleDenotation, alwaysCompareTypes: Boolean = false)(using Context): Boolean = + if isType then true + else + val thisLanguage = SourceLanguage(symbol) + val otherLanguage = SourceLanguage(other.symbol) + val commonLanguage = SourceLanguage.commonLanguage(thisLanguage, otherLanguage) + val sig = signature(commonLanguage) + val otherSig = other.signature(commonLanguage) + sig.matchDegree(otherSig) match + case FullMatch => + !alwaysCompareTypes || info.matches(other.info) + case MethodNotAMethodMatch => + !ctx.erasedTypes && { + // A Scala zero-parameter method and a Scala non-method always match. + if !thisLanguage.isJava && !otherLanguage.isJava then + true + // Java allows defining both a field and a zero-parameter method with the same name, + // so they must not match. + else if thisLanguage.isJava && otherLanguage.isJava then + false + // A Java field never matches a Scala method. + else if thisLanguage.isJava then + symbol.is(Method) + else // otherLanguage.isJava + other.symbol.is(Method) + } + case ParamMatch => + // The signatures do not tell us enough to be sure about matching + !ctx.erasedTypes && info.matches(other.info) + case noMatch => + false + + def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(using Context): SingleDenotation = + if hasUniqueSym && prevDenots.containsSym(symbol) then NoDenotation + else if isType then filterDisjoint(ownDenots).asSeenFrom(pre) + else asSeenFrom(pre).filterDisjoint(ownDenots) + + def filterWithPredicate(p: SingleDenotation => Boolean): SingleDenotation = + if (p(this)) this else NoDenotation + def filterDisjoint(denots: PreDenotation)(using Context): SingleDenotation = + if (denots.exists && denots.matches(this)) NoDenotation else this + def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): SingleDenotation = + val realExcluded = if ctx.isAfterTyper then excluded else excluded | Invisible + def symd: SymDenotation = this match + case symd: SymDenotation => symd + case _ => symbol.denot + if !required.isEmpty && !symd.isAllOf(required) + || symd.isOneOf(realExcluded) then NoDenotation + else this + def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T = f(this) + + type AsSeenFromResult = SingleDenotation + + protected def computeAsSeenFrom(pre: Type)(using Context): SingleDenotation = { + val symbol = this.symbol + val owner = this match { + case thisd: SymDenotation => thisd.owner + case _ => if (symbol.exists) symbol.owner else NoSymbol + } + + /** The derived denotation with the given `info` transformed with `asSeenFrom`. + * + * As a performance hack, we might reuse an existing SymDenotation, + * instead of creating a new denotation with a given `prefix`, + * see `Config.reuseSymDenotations`. + */ + def derived(info: Type) = + /** Do we need to return a denotation with a prefix set? */ + def needsPrefix = + // For opaque types, the prefix is used in `ElimOpaques#transform`, + // without this i7159.scala would fail when compiled from tasty. + symbol.is(Opaque) + + val derivedInfo = info.asSeenFrom(pre, owner) + if Config.reuseSymDenotations && this.isInstanceOf[SymDenotation] + && (derivedInfo eq info) && !needsPrefix then + this + else + derivedSingleDenotation(symbol, derivedInfo, pre) + end derived + + // Tt could happen that we see the symbol with prefix `this` as a member a different class + // through a self type and that it then has a different info. In this case we have to go + // through the asSeenFrom to switch the type back. Test case is pos/i9352.scala. + def hasOriginalInfo: Boolean = this match + case sd: SymDenotation => true + case _ => info eq symbol.info + + def ownerIsPrefix = pre match + case pre: ThisType => pre.sameThis(owner.thisType) + case _ => false + + if !owner.membersNeedAsSeenFrom(pre) && (!ownerIsPrefix || hasOriginalInfo) + || symbol.is(NonMember) + then this + else if symbol.isAllOf(ClassTypeParam) then + val arg = symbol.typeRef.argForParam(pre, widenAbstract = true) + if arg.exists + then derivedSingleDenotation(symbol, normalizedArgBounds(arg.bounds), pre) + else derived(symbol.info) + else derived(symbol.info) + } + + /** The argument bounds, possibly intersected with the parameter's info TypeBounds, + * if the latter is not F-bounded and does not refer to other type parameters + * of the same class, and the intersection is provably nonempty. + */ + private def normalizedArgBounds(argBounds: TypeBounds)(using Context): TypeBounds = + if symbol.isCompleted && !hasBoundsDependingOnParamsOf(symbol.owner) then + val combined @ TypeBounds(lo, hi) = symbol.info.bounds & argBounds + if (lo frozen_<:< hi) then combined + else argBounds + else argBounds + + private def hasBoundsDependingOnParamsOf(cls: Symbol)(using Context): Boolean = + val acc = new TypeAccumulator[Boolean]: + def apply(x: Boolean, tp: Type): Boolean = tp match + case _: LazyRef => true + case tp: TypeRef + if tp.symbol.isAllOf(ClassTypeParam) && tp.symbol.owner == cls => true + case _ => foldOver(x, tp) + acc(false, symbol.info) + } + + abstract class NonSymSingleDenotation(symbol: Symbol, initInfo: Type, override val prefix: Type) extends SingleDenotation(symbol, initInfo) { + def infoOrCompleter: Type = initInfo + def isType: Boolean = infoOrCompleter.isInstanceOf[TypeType] + } + + class UniqueRefDenotation( + symbol: Symbol, + initInfo: Type, + initValidFor: Period, + prefix: Type) extends NonSymSingleDenotation(symbol, initInfo, prefix) { + validFor = initValidFor + override def hasUniqueSym: Boolean = true + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + if isRefinedMethod then + new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) + else + new UniqueRefDenotation(s, i, validFor, pre) + } + + class JointRefDenotation( + symbol: Symbol, + initInfo: Type, + initValidFor: Period, + prefix: Type, + override val isRefinedMethod: Boolean) extends NonSymSingleDenotation(symbol, initInfo, prefix) { + validFor = initValidFor + override def hasUniqueSym: Boolean = false + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) + } + + class ErrorDenotation(using Context) extends NonSymSingleDenotation(NoSymbol, NoType, NoType) { + override def exists: Boolean = false + override def hasUniqueSym: Boolean = false + validFor = Period.allInRun(ctx.runId) + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + this + } + + /** An error denotation that provides more info about the missing reference. + * Produced by staticRef, consumed by requiredSymbol. + */ + case class MissingRef(val owner: SingleDenotation, name: Name)(using Context) extends ErrorDenotation { + val ex: Exception = new Exception // DEBUG + } + + /** An error denotation that provides more info about alternatives + * that were found but that do not qualify. + * Produced by staticRef, consumed by requiredSymbol. + */ + case class NoQualifyingRef(alts: List[SingleDenotation])(using Context) extends ErrorDenotation + + /** A double definition + */ + def isDoubleDef(sym1: Symbol, sym2: Symbol)(using Context): Boolean = + (sym1.exists && sym2.exists && + (sym1 `ne` sym2) && (sym1.effectiveOwner `eq` sym2.effectiveOwner) && + !sym1.is(Bridge) && !sym2.is(Bridge)) + + // --- Overloaded denotations and predenotations ------------------------------------------------- + + trait MultiPreDenotation extends PreDenotation { + def denot1: PreDenotation + def denot2: PreDenotation + + assert(denot1.exists && denot2.exists, s"Union of non-existing denotations ($denot1) and ($denot2)") + def first: Denotation = denot1.first + def last: Denotation = denot2.last + def matches(other: SingleDenotation)(using Context): Boolean = + denot1.matches(other) || denot2.matches(other) + def mapInherited(owndenot: PreDenotation, prevdenot: PreDenotation, pre: Type)(using Context): PreDenotation = + derivedUnion(denot1.mapInherited(owndenot, prevdenot, pre), denot2.mapInherited(owndenot, prevdenot, pre)) + def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation = + derivedUnion(denot1 filterWithPredicate p, denot2 filterWithPredicate p) + def filterDisjoint(denot: PreDenotation)(using Context): PreDenotation = + derivedUnion(denot1 filterDisjoint denot, denot2 filterDisjoint denot) + def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): PreDenotation = + derivedUnion(denot1.filterWithFlags(required, excluded), denot2.filterWithFlags(required, excluded)) + def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T = + g(denot1.aggregate(f, g), denot2.aggregate(f, g)) + protected def derivedUnion(denot1: PreDenotation, denot2: PreDenotation) = + if ((denot1 eq this.denot1) && (denot2 eq this.denot2)) this + else denot1 union denot2 + } + + final case class DenotUnion(denot1: PreDenotation, denot2: PreDenotation) extends MultiPreDenotation { + def exists: Boolean = true + def toDenot(pre: Type)(using Context): Denotation = + denot1.toDenot(pre).meet(denot2.toDenot(pre), pre) + def containsSym(sym: Symbol): Boolean = + (denot1 containsSym sym) || (denot2 containsSym sym) + type AsSeenFromResult = PreDenotation + def computeAsSeenFrom(pre: Type)(using Context): PreDenotation = + derivedUnion(denot1.asSeenFrom(pre), denot2.asSeenFrom(pre)) + } + + /** An overloaded denotation consisting of the alternatives of both given denotations. + */ + case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol, NoType) with MultiPreDenotation { + final def infoOrCompleter: Type = multiHasNot("info") + final def validFor: Period = denot1.validFor & denot2.validFor + final def isType: Boolean = false + final def hasUniqueSym: Boolean = false + final def name(using Context): Name = denot1.name + final def signature(using Context): Signature = Signature.OverloadedSignature + def atSignature(sig: Signature, targetName: Name, site: Type, relaxed: Boolean)(using Context): Denotation = + if (sig eq Signature.OverloadedSignature) this + else derivedUnionDenotation( + denot1.atSignature(sig, targetName, site, relaxed), + denot2.atSignature(sig, targetName, site, relaxed)) + def current(using Context): Denotation = + derivedUnionDenotation(denot1.current, denot2.current) + def altsWith(p: Symbol => Boolean): List[SingleDenotation] = + denot1.altsWith(p) ++ denot2.altsWith(p) + def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation = { + val sd1 = denot1.suchThat(p) + val sd2 = denot2.suchThat(p) + if sd1.exists then + if sd2.exists then + throw TypeError( + em"""Failure to disambiguate overloaded reference with + | ${denot1.symbol.showLocated}: ${denot1.info} and + | ${denot2.symbol.showLocated}: ${denot2.info}""") + else sd1 + else sd2 + } + override def filterWithPredicate(p: SingleDenotation => Boolean): Denotation = + derivedUnionDenotation(denot1.filterWithPredicate(p), denot2.filterWithPredicate(p)) + def hasAltWith(p: SingleDenotation => Boolean): Boolean = + denot1.hasAltWith(p) || denot2.hasAltWith(p) + def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = { + val d1 = denot1 accessibleFrom (pre, superAccess) + val d2 = denot2 accessibleFrom (pre, superAccess) + if (!d1.exists) d2 + else if (!d2.exists) d1 + else derivedUnionDenotation(d1, d2) + } + def mapInfo(f: Type => Type)(using Context): Denotation = + derivedUnionDenotation(denot1.mapInfo(f), denot2.mapInfo(f)) + def derivedUnionDenotation(d1: Denotation, d2: Denotation): Denotation = + if ((d1 eq denot1) && (d2 eq denot2)) this + else if (!d1.exists) d2 + else if (!d2.exists) d1 + else MultiDenotation(d1, d2) + type AsSeenFromResult = Denotation + def computeAsSeenFrom(pre: Type)(using Context): Denotation = + derivedUnionDenotation(denot1.asSeenFrom(pre), denot2.asSeenFrom(pre)) + override def toString: String = alternatives.mkString(" ") + + private def multiHasNot(op: String): Nothing = + throw new UnsupportedOperationException( + s"multi-denotation with alternatives $alternatives does not implement operation $op") + } + + /** The current denotation of the static reference given by path, + * or a MissingRef or NoQualifyingRef instance, if it does not exist. + * if generateStubs is set, generates stubs for missing top-level symbols + */ + def staticRef(path: Name, generateStubs: Boolean = true, isPackage: Boolean = false)(using Context): Denotation = { + def select(prefix: Denotation, selector: Name): Denotation = { + val owner = prefix.disambiguate(_.info.isParameterless) + def isPackageFromCoreLibMissing: Boolean = + // if the scala package is missing, the stdlib must be missing + owner.symbol == defn.RootClass && selector == nme.scala + if (owner.exists) { + val result = if (isPackage) owner.info.decl(selector) else owner.info.member(selector) + if (result.exists) result + else if (isPackageFromCoreLibMissing) throw new MissingCoreLibraryException(selector.toString) + else { + val alt = + if (generateStubs) missingHook(owner.symbol.moduleClass, selector) + else NoSymbol + if (alt.exists) alt.denot + else MissingRef(owner, selector) + } + } + else owner + } + def recur( + path: Name, + wrap: TermName -> Name = identity[Name] // !cc! default argument needs to be instantiated, error if [Name] is dropped + ): Denotation = path match { + case path: TypeName => + recur(path.toTermName, n => n.toTypeName) + case ModuleClassName(underlying) => + recur(underlying, n => wrap(ModuleClassName(n))) + case QualifiedName(prefix, selector) => + select(recur(prefix), wrap(selector)) + case qn @ AnyQualifiedName(prefix, _) => + recur(prefix, n => wrap(qn.info.mkString(n).toTermName)) + case path: SimpleName => + def recurSimple(len: Int, wrap: TermName -> Name): Denotation = { + val point = path.lastIndexOf('.', len - 1) + val selector = wrap(path.slice(point + 1, len).asTermName) + val prefix = + if (point > 0) recurSimple(point, identity) + else if (selector.isTermName) defn.RootClass.denot + else defn.EmptyPackageClass.denot + select(prefix, selector) + } + recurSimple(path.length, wrap) + } + + val run = ctx.run + if run == null then recur(path) + else run.staticRefs.getOrElseUpdate(path, recur(path)) + } + + /** If we are looking for a non-existing term name in a package, + * assume it is a package for which we do not have a directory and + * enter it. + */ + def missingHook(owner: Symbol, name: Name)(using Context): Symbol = + if (owner.is(Package) && name.isTermName) + newCompletePackageSymbol(owner, name.asTermName).entered + else + NoSymbol + + /** An exception for accessing symbols that are no longer valid in current run */ + class StaleSymbol(msg: -> String) extends Exception { + util.Stats.record("stale symbol") + override def getMessage(): String = msg + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Flags.scala b/tests/pos-with-compiler-cc/dotc/core/Flags.scala new file mode 100644 index 000000000000..8bf65ed8288f --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Flags.scala @@ -0,0 +1,613 @@ +package dotty.tools.dotc +package core + +object Flags { + + object opaques { + + /** A FlagSet represents a set of flags. Flags are encoded as follows: + * The first two bits indicate whether a flag set applies to terms, + * to types, or to both. Bits 2..63 are available for properties + * and can be doubly used for terms and types. + */ + opaque type FlagSet = Long + def FlagSet(bits: Long): FlagSet = bits + def toBits(fs: FlagSet): Long = fs + + /** A flag set consisting of a single flag */ + opaque type Flag <: FlagSet = Long + private[Flags] def Flag(bits: Long): Flag = bits + } + export opaques.FlagSet + + type Flag = opaques.Flag + + extension (x: FlagSet) { + + inline def bits: Long = opaques.toBits(x) + + /** The union of the given flag sets. + * Combining two FlagSets with `|` will give a FlagSet + * that has the intersection of the applicability to terms/types + * of the two flag sets. It is checked that the intersection is not empty. + */ + def | (y: FlagSet): FlagSet = + if (x.bits == 0) y + else if (y.bits == 0) x + else { + val tbits = x.bits & y.bits & KINDFLAGS + if (tbits == 0) + assert(false, s"illegal flagset combination: ${x.flagsString} and ${y.flagsString}") + FlagSet(tbits | ((x.bits | y.bits) & ~KINDFLAGS)) + } + + /** The intersection of the given flag sets */ + def & (y: FlagSet): FlagSet = FlagSet(x.bits & y.bits) + + /** The intersection of a flag set with the complement of another flag set */ + def &~ (y: FlagSet): FlagSet = { + val tbits = x.bits & KINDFLAGS + if ((tbits & y.bits) == 0) x + else FlagSet(tbits | ((x.bits & ~y.bits) & ~KINDFLAGS)) + } + + def ^ (y: FlagSet) = + FlagSet((x.bits | y.bits) & KINDFLAGS | (x.bits ^ y.bits) & ~KINDFLAGS) + + /** Does the given flag set contain the given flag? + * This means that both the kind flags and the carrier bits have non-empty intersection. + */ + def is (flag: Flag): Boolean = { + val fs = x.bits & flag.bits + (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 + } + + /** Does the given flag set contain the given flag + * and at the same time contain none of the flags in the `butNot` set? + */ + def is (flag: Flag, butNot: FlagSet): Boolean = x.is(flag) && !x.isOneOf(butNot) + + /** Does the given flag set have a non-empty intersection with another flag set? + * This means that both the kind flags and the carrier bits have non-empty intersection. + */ + def isOneOf (flags: FlagSet): Boolean = { + val fs = x.bits & flags.bits + (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 + } + + /** Does the given flag set have a non-empty intersection with another flag set, + * and at the same time contain none of the flags in the `butNot` set? + */ + def isOneOf (flags: FlagSet, butNot: FlagSet): Boolean = x.isOneOf(flags) && !x.isOneOf(butNot) + + /** Does a given flag set have all of the flags of another flag set? + * Pre: The intersection of the term/type flags of both sets must be non-empty. + */ + def isAllOf (flags: FlagSet): Boolean = { + val fs = x.bits & flags.bits + ((fs & KINDFLAGS) != 0 || flags.bits == 0) && + (fs >>> TYPESHIFT) == (flags.bits >>> TYPESHIFT) + } + + /** Does a given flag set have all of the flags in another flag set + * and at the same time contain none of the flags in the `butNot` set? + * Pre: The intersection of the term/type flags of both sets must be non-empty. + */ + def isAllOf (flags: FlagSet, butNot: FlagSet): Boolean = x.isAllOf(flags) && !x.isOneOf(butNot) + + def isEmpty: Boolean = (x.bits & ~KINDFLAGS) == 0 + + /** Is a given flag set a subset of another flag set? */ + def <= (y: FlagSet): Boolean = (x.bits & y.bits) == x.bits + + /** Does the given flag set apply to terms? */ + def isTermFlags: Boolean = (x.bits & TERMS) != 0 + + /** Does the given flag set apply to terms? */ + def isTypeFlags: Boolean = (x.bits & TYPES) != 0 + + /** The given flag set with all flags transposed to be type flags */ + def toTypeFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits & ~KINDFLAGS | TYPES) + + /** The given flag set with all flags transposed to be term flags */ + def toTermFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits & ~KINDFLAGS | TERMS) + + /** The given flag set with all flags transposed to be common flags */ + def toCommonFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits | KINDFLAGS) + + /** The number of non-kind flags in the given flag set */ + def numFlags: Int = java.lang.Long.bitCount(x.bits & ~KINDFLAGS) + + /** The lowest non-kind bit set in the given flag set */ + def firstBit: Int = java.lang.Long.numberOfTrailingZeros(x.bits & ~KINDFLAGS) + + /** The list of non-empty names of flags with given index idx that are set in the given flag set */ + private def flagString(idx: Int): List[String] = + if ((x.bits & (1L << idx)) == 0) Nil + else { + def halfString(kind: Int) = + if ((x.bits & (1L << kind)) != 0) flagName(idx)(kind) else "" + val termFS = halfString(TERMindex) + val typeFS = halfString(TYPEindex) + val strs = termFS :: (if (termFS == typeFS) Nil else typeFS :: Nil) + strs filter (_.nonEmpty) + } + + /** The list of non-empty names of flags that are set in the given flag set */ + def flagStrings(privateWithin: String = ""): Seq[String] = { + var rawStrings = (2 to MaxFlag).flatMap(x.flagString(_)) // DOTTY problem: cannot drop with (_) + if (!privateWithin.isEmpty && !x.is(Protected)) + rawStrings = rawStrings :+ "private" + val scopeStr = if (x.is(Local)) "this" else privateWithin + if (scopeStr != "") + rawStrings.filter(_ != "").map { + case "private" => s"private[$scopeStr]" + case "protected" => s"protected[$scopeStr]" + case str => str + } + else rawStrings + } + + /** The string representation of the given flag set */ + def flagsString: String = x.flagStrings("").mkString(" ") + } + + // Temporary while extension names are in flux + def or(x1: FlagSet, x2: FlagSet) = x1 | x2 + def and(x1: FlagSet, x2: FlagSet) = x1 & x2 + + def termFlagSet(x: Long) = FlagSet(TERMS | x) + + private inline val TYPESHIFT = 2 + private inline val TERMindex = 0 + private inline val TYPEindex = 1 + private inline val TERMS = 1 << TERMindex + private inline val TYPES = 1 << TYPEindex + private inline val KINDFLAGS = TERMS | TYPES + + private inline val FirstFlag = 2 + private inline val FirstNotPickledFlag = 48 + private inline val MaxFlag = 63 + + private val flagName = Array.fill(64, 2)("") + + private def isDefinedAsFlag(idx: Int) = flagName(idx).exists(_.nonEmpty) + + /** The flag set containing all defined flags of either kind whose bits + * lie in the given range + */ + private def flagRange(start: Int, end: Int) = + FlagSet((start until end).foldLeft(KINDFLAGS.toLong) ((bits, idx) => + if (isDefinedAsFlag(idx)) bits | (1L << idx) else bits)) + + /** The union of all flags in given flag set */ + def union(flagss: FlagSet*): FlagSet = { + var flag = EmptyFlags + for (f <- flagss) + flag |= f + flag + } + + def commonFlags(flagss: FlagSet*): FlagSet = union(flagss.map(_.toCommonFlags): _*) + + /** The empty flag set */ + val EmptyFlags: FlagSet = FlagSet(0) + + /** The undefined flag set */ + val UndefinedFlags: FlagSet = FlagSet(~KINDFLAGS) + + /** Three flags with given index between 2 and 63. + * The first applies to both terms and types. the second is a term flag, and + * the third is a type flag. Installs given name(s) as the name(s) of the flags. + * @param name The name to be used for the term flag + * @param typeName The name to be used for the type flag, if it is different from `name`. + */ + private def newFlags(index: Int, name: String, typeName: String = ""): (Flag, Flag, Flag) = { + flagName(index)(TERMindex) = name + flagName(index)(TYPEindex) = if (typeName.isEmpty) name else typeName + val bits = 1L << index + (opaques.Flag(KINDFLAGS | bits), opaques.Flag(TERMS | bits), opaques.Flag(TYPES | bits)) + } + + // ----------------- Available flags ----------------------------------------------------- + + /** Labeled with `private` modifier */ + val (Private @ _, PrivateTerm @ _, PrivateType @ _) = newFlags(2, "private") + + /** Labeled with `protected` modifier */ + val (Protected @ _, _, _) = newFlags(3, "protected") + + /** Labeled with `override` modifier */ + val (Override @ _, _, _) = newFlags(4, "override") + + /** A declared, but not defined member */ + val (Deferred @ _, DeferredTerm @ _, DeferredType @ _) = newFlags(5, "") + + /** Labeled with `final` modifier */ + val (Final @ _, _, _) = newFlags(6, "final") + + /** A method symbol / a super trait */ + val (_, Method @ _, _) = newFlags(7, "") + + /** A (term or type) parameter to a class or method */ + val (Param @ _, TermParam @ _, TypeParam @ _) = newFlags(8, "") + + /** Labeled with `implicit` modifier (implicit value) */ + val (Implicit @ _, ImplicitVal @ _, _) = newFlags(9, "implicit") + + /** Labeled with `lazy` (a lazy val) / a trait */ + val (LazyOrTrait @ _, Lazy @ _, Trait @ _) = newFlags(10, "lazy", "") + + /** A value or variable accessor (getter or setter) */ + val (AccessorOrSealed @ _, Accessor @ _, Sealed @ _) = newFlags(11, "", "sealed") + + /** A mutable var, an open class */ + val (MutableOrOpen @ __, Mutable @ _, Open @ _) = newFlags(12, "mutable", "open") + + /** Symbol is local to current class (i.e. private[this] or protected[this] + * pre: Private or Protected are also set + */ + val (Local @ _, _, _) = newFlags(13, "") + + /** A field generated for a primary constructor parameter (no matter if it's a 'val' or not), + * or an accessor of such a field. + */ + val (_, ParamAccessor @ _, _) = newFlags(14, "") + + /** A value or class implementing a module */ + val (Module @ _, ModuleVal @ _, ModuleClass @ _) = newFlags(15, "module") + + /** A value or class representing a package */ + val (Package @ _, PackageVal @ _, PackageClass @ _) = newFlags(16, "") + + /** A case class or its companion object + * Note: Case is also used to indicate that a symbol is bound by a pattern. + */ + val (Case @ _, CaseVal @ _, CaseClass @ _) = newFlags(17, "case") + + /** A compiler-generated symbol, which is visible for type-checking + * (compare with artifact) + */ + val (Synthetic @ _, _, _) = newFlags(18, "") + + /** Labelled with `inline` modifier */ + val (Inline @ _, _, _) = newFlags(19, "inline") + + /** An outer accessor / a covariant type variable */ + val (OuterOrCovariant @ _, OuterAccessor @ _, Covariant @ _) = newFlags(20, "", "") + + /** The label of a labeled block / a contravariant type variable */ + val (LabelOrContravariant @ _, Label @ _, Contravariant @ _) = newFlags(21, "