diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 66d0e1c73bef..af17a866c5c0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -2,7 +2,15 @@ name: Dotty CI on: push: + paths-ignore: + # Do not run everything on changes only in docs + - 'scala3doc/**' + - 'scala3doc-testcases/**' pull_request: + paths-ignore: + # Do not run everything on changes only in docs + - 'scala3doc/**' + - 'scala3doc-testcases/**' schedule: - cron: '0 3 * * *' # Every day at 3 AM diff --git a/.github/workflows/scala3doc.yaml b/.github/workflows/scala3doc.yaml new file mode 100644 index 000000000000..bf7ecc10882d --- /dev/null +++ b/.github/workflows/scala3doc.yaml @@ -0,0 +1,46 @@ +name: CI for Scala3doc + +on: + push: + branches: + - master + pull_request: +jobs: + build: + runs-on: ubuntu-latest + container: lampepfl/dotty:2020-04-24 + + steps: + - name: Git Checkout + uses: actions/checkout@v2 + + - name: Cache Coursier + uses: actions/cache@v1 + with: + path: ~/.cache/coursier + key: sbt-coursier-cache + - name: Cache SBT + uses: actions/cache@v1 + with: + path: ~/.sbt + key: sbt-${{ hashFiles('**/build.sbt') }} + + - name: Set up JDK 11 + uses: actions/setup-java@v1 + with: + java-version: 11 + + - name: Compile and test + run: ./project/scripts/sbt scala3doc/test + + - name: Locally publish self + run: ./project/scripts/sbt scala3doc/publishLocal + + - name: Generate test documentation + run: ./project/scripts/sbt scala3doc/generateSelfDocumentation + + - name: Generate Scala 3 documentation + run: ./project/scripts/sbt scala3doc/generateScala3Documentation + + - name: Generate documentation for example project using dotty-sbt + run: ./project/scripts/sbt "sbt-dotty/scripted sbt-dotty/scala3doc" diff --git a/build.sbt b/build.sbt index a8d447d7c001..6a350adb4936 100644 --- a/build.sbt +++ b/build.sbt @@ -20,6 +20,8 @@ val `stdlib-bootstrapped-tasty-tests` = Build.`stdlib-bootstrapped-tasty-tests` val `tasty-core` = Build.`tasty-core` val `tasty-core-bootstrapped` = Build.`tasty-core-bootstrapped` val `tasty-core-scala2` = Build.`tasty-core-scala2` +val scala3doc = Build.scala3doc +val `scala3doc-testcases` = Build.`scala3doc-testcases` val `scala3-bench-run` = Build.`scala3-bench-run` val dist = Build.dist val `community-build` = Build.`community-build` diff --git a/project/Build.scala b/project/Build.scala index c1737766720b..79bd0a5ff89f 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -213,6 +213,12 @@ object Build { } ) + lazy val disableDocSetting = + // Disable scaladoc generation, it's way too slow and we'll replace it + // by dottydoc anyway. We still publish an empty -javadoc.jar to make + // sonatype happy. + sources in (Compile, doc) := Seq() + lazy val commonSettings = publishSettings ++ Seq( scalaSource in Compile := baseDirectory.value / "src", scalaSource in Test := baseDirectory.value / "test", @@ -221,11 +227,6 @@ object Build { resourceDirectory in Compile := baseDirectory.value / "resources", resourceDirectory in Test := baseDirectory.value / "test-resources", - // Disable scaladoc generation, it's way too slow and we'll replace it - // by dottydoc anyway. We still publish an empty -javadoc.jar to make - // sonatype happy. - sources in (Compile, doc) := Seq(), - // Prevent sbt from rewriting our dependencies scalaModuleInfo ~= (_.map(_.withOverrideScalaVersion(false))), @@ -244,7 +245,8 @@ object Build { crossPaths := false, // Do not depend on the Scala library autoScalaLibrary := false, - excludeFromIDE := true + excludeFromIDE := true, + disableDocSetting ) // Settings used when compiling dotty (both non-bootstrapped and bootstrapped) @@ -258,6 +260,8 @@ object Build { moduleName ~= { _.stripSuffix("-scala2") }, version := dottyVersion, target := baseDirectory.value / ".." / "out" / "scala-2" / name.value, + + disableDocSetting ) // Settings used when compiling dotty with the reference compiler @@ -267,6 +271,8 @@ object Build { version := dottyNonBootstrappedVersion, scalaVersion := referenceVersion, excludeFromIDE := true, + + disableDocSetting ) // Settings used when compiling dotty with a non-bootstrapped dotty @@ -328,6 +334,8 @@ object Build { }, // sbt-dotty defines `scalaInstance in doc` so we need to override it manually scalaInstance in doc := scalaInstance.value, + + disableDocSetting, ) lazy val commonBenchmarkSettings = Seq( @@ -1153,6 +1161,14 @@ object Build { lazy val `scala3-bench-bootstrapped` = project.in(file("bench")).asDottyBench(Bootstrapped) lazy val `scala3-bench-run` = project.in(file("bench-run")).asDottyBench(Bootstrapped) + val testcasesOutputDir = taskKey[String]("Root directory where tests classses are generated") + val testcasesSourceRoot = taskKey[String]("Root directory where tests sources are generated") + val generateSelfDocumentation = taskKey[Unit]("Generate example documentation") + val generateScala3Documentation = taskKey[Unit]("Generate documentation for dotty lib") + val generateTestcasesDocumentation = taskKey[Unit]("Generate documentation for testcases, usefull for debugging tests") + lazy val `scala3doc` = project.in(file("scala3doc")).asScala3doc + lazy val `scala3doc-testcases` = project.in(file("scala3doc-testcases")).asScala3docTestcases + // sbt plugin to use Dotty in your own build, see // https://github.com/lampepfl/scala3-example-project for usage. lazy val `sbt-dotty` = project.in(file("sbt-dotty")). @@ -1192,6 +1208,7 @@ object Build { publishLocal in `scala3-staging`, publishLocal in `scala3-tasty-inspector`, publishLocal in `scala3-doc-bootstrapped`, + publishLocal in `scala3doc`, publishLocal in `scala3-bootstrapped` // Needed because sbt currently hardcodes the dotty artifact ).evaluated ) @@ -1393,7 +1410,7 @@ object Build { def asDottyRoot(implicit mode: Mode): Project = project.withCommonSettings. aggregate(`scala3-interfaces`, dottyLibrary, dottyCompiler, tastyCore, dottyDoc, `scala3-sbt-bridge`). bootstrappedAggregate(`scala3-language-server`, `scala3-staging`, `scala3-tasty-inspector`, - `scala3-library-bootstrappedJS`). + `scala3-library-bootstrappedJS`, scala3doc). dependsOn(tastyCore). dependsOn(dottyCompiler). dependsOn(dottyLibrary). @@ -1440,6 +1457,70 @@ object Build { settings(commonBenchmarkSettings). enablePlugins(JmhPlugin) + def asScala3doc: Project = { + def generateDocumentation(targets: String, name: String, outDir: String, params: String = "") = Def.taskDyn { + val sourceMapping = "=https://github.com/lampepfl/dotty/tree/master#L" + run.in(Compile).toTask(s""" -d output/$outDir -t $targets -n "$name" -s $sourceMapping $params""") + } + + project.settings(commonBootstrappedSettings). + dependsOn(`scala3-compiler-bootstrapped`). + dependsOn(`scala3-tasty-inspector`). + settings( + // Needed to download dokka and its dependencies + resolvers += Resolver.jcenterRepo, + // Needed to download dokka-site + resolvers += Resolver.bintrayRepo("virtuslab", "dokka"), + libraryDependencies ++= Seq( + "com.virtuslab.dokka" % "dokka-site" % "0.1.9", + "com.vladsch.flexmark" % "flexmark-all" % "0.42.12", + "nl.big-o" % "liqp" % "0.6.7", + "args4j" % "args4j" % "2.33", + + "org.jetbrains.dokka" % "dokka-test-api" % "1.4.10.2" % "test", + "com.novocode" % "junit-interface" % "0.11" % "test", + ), + Test / test := (Test / test).dependsOn(compile.in(Compile).in(`scala3doc-testcases`)).value, + testcasesOutputDir.in(Test) := classDirectory.in(Compile).in(`scala3doc-testcases`).value.getAbsolutePath.toString, + testcasesSourceRoot.in(Test) := (baseDirectory.in(`scala3doc-testcases`).value / "src").getAbsolutePath.toString, + Compile / mainClass := Some("dotty.dokka.Main"), + // There is a bug in dokka that prevents parallel tests withing the same jvm + fork.in(test) := true, + generateSelfDocumentation := Def.taskDyn { + generateDocumentation(classDirectory.in(Compile).value.getAbsolutePath, "scala3doc", "self", "-p documentation") + }.value, + generateScala3Documentation := Def.taskDyn { + val dottyJars = Seq( + // All projects below will be used to generated documentation for Scala 3 + classDirectory.in(`scala3-interfaces`).in(Compile).value, + classDirectory.in(`tasty-core`).in(Compile).value, + classDirectory.in(`scala3-library`).in(Compile).value, + // TODO this one fails to load using TASTY + // classDirectory.in(`stdlib-bootstrapped`).in(Compile).value, + ) + val roots = dottyJars.map(_.toString).mkString(java.io.File.pathSeparator) + + if (dottyJars.isEmpty) Def.task { streams.value.log.error("Dotty lib wasn't found") } + else generateDocumentation(roots, "Scala 3", "stdLib", "-p dotty-docs/docs") + }.value, + generateTestcasesDocumentation := Def.taskDyn { + generateDocumentation(Build.testcasesOutputDir.in(Test).value, "Scala3doc testcases", "testcases") + }.value, + buildInfoKeys in Test := Seq[BuildInfoKey]( + Build.testcasesOutputDir.in(Test), + Build.testcasesSourceRoot.in(Test), + ), + buildInfoPackage in Test := "dotty.dokka", + BuildInfoPlugin.buildInfoScopedSettings(Test), + BuildInfoPlugin.buildInfoDefaultSettings, + // Uncomment to debug dokka processing (require to run debug in listen mode on 5005 port) + // javaOptions.in(run) += "-agentlib:jdwp=transport=dt_socket,server=n,address=localhost:5005,suspend=y" + ) + } + + def asScala3docTestcases: Project = + project.dependsOn(`scala3-compiler-bootstrapped`).settings(commonBootstrappedSettings) + def asDist(implicit mode: Mode): Project = project. enablePlugins(PackPlugin). withCommonSettings. diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/build.sbt b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/build.sbt new file mode 100644 index 000000000000..e50cbeb663d8 --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/build.sbt @@ -0,0 +1,3 @@ +scalaVersion := sys.props("plugin.scalaVersion") + +useScala3doc := true diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/project/plugins.sbt b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/project/plugins.sbt new file mode 100644 index 000000000000..c17caab2d98c --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % sys.props("plugin.version")) diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/AutoParamTupling.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/AutoParamTupling.scala new file mode 100644 index 000000000000..3636fd218ca3 --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/AutoParamTupling.scala @@ -0,0 +1,25 @@ +package example + +/** + * Automatic Tupling of Function Params: https://dotty.epfl.ch/docs/reference/other-new-features/auto-parameter-tupling.html + */ +object AutoParamTupling { + + def test: Unit = { + + /** + * In order to get thread safety, you need to put @volatile before lazy vals. + * https://dotty.epfl.ch/docs/reference/changed-features/lazy-vals.html + */ + @volatile lazy val xs: List[String] = List("d", "o", "t", "t", "y") + + /** + * Current behaviour in Scala 2.12.2 : + * error: missing parameter type + * Note: The expected type requires a one-argument function accepting a 2-Tuple. + * Consider a pattern matching anonymous function, `{ case (s, i) => ... }` + */ + xs.zipWithIndex.map((s, i) => println(s"$i: $s")) + + } +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/ContextQueries.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/ContextQueries.scala new file mode 100644 index 000000000000..4eb63b58b66c --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/ContextQueries.scala @@ -0,0 +1,48 @@ +package example + +import scala.concurrent.{ExecutionContext, Future} +import scala.util.Try + +/** + * Context Queries: + * - http://dotty.epfl.ch/docs/reference/contextual/query-types.html, + * - https://www.scala-lang.org/blog/2016/12/07/implicit-function-types.html + */ +object ContextQueries /* Formerly known as Implicit Function Types */ { + + object context { + // type alias Contextual + type Contextual[T] = ExecutionContext ?=> T + + // sum is expanded to sum(x, y)(ctx) + def asyncSum(x: Int, y: Int): Contextual[Future[Int]] = Future(x + y) + + def asyncMult(x: Int, y: Int)(using ctx: ExecutionContext) = Future(x * y) + } + + object parse { + + type Parseable[T] = ImpliedInstances.StringParser[T] ?=> Try[T] + + def sumStrings(x: String, y: String): Parseable[Int] = { + val parser = implicitly[ImpliedInstances.StringParser[Int]] + val tryA = parser.parse(x) + val tryB = parser.parse(y) + + for { + a <- tryA + b <- tryB + } yield a + b + } + } + + def test: Unit = { + import ExecutionContext.Implicits.global + context.asyncSum(3, 4).foreach(println) + context.asyncMult(3, 4).foreach(println) + + println(parse.sumStrings("3", "4")) + println(parse.sumStrings("3", "a")) + } + +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/Conversion.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/Conversion.scala new file mode 100644 index 000000000000..abaf0943a51a --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/Conversion.scala @@ -0,0 +1,39 @@ +package example + +import scala.language.implicitConversions + +/** + * Conversions: http://dotty.epfl.ch/docs/reference/contextual/conversions.html + */ +object Conversion { + + case class IntWrapper(a: Int) extends AnyVal + case class DoubleWrapper(b: Double) extends AnyVal + + def convert[T, U](x: T)(using converter: Conversion[T, U]): U = converter(x) + + given IntWrapperToDoubleWrapper as Conversion[IntWrapper, DoubleWrapper] = new Conversion[IntWrapper, DoubleWrapper] { + override def apply(i: IntWrapper): DoubleWrapper = new DoubleWrapper(i.a.toDouble) + } + + def useConversion(using f: Conversion[IntWrapper, DoubleWrapper]) = { + val y: IntWrapper = new IntWrapper(4) + val x: DoubleWrapper = y + x + } + + /* Not working anymore. + def useConversion(implicit f: A => B) = { + val y: A = ... + val x: B = a // error under Dotty + } + */ + + def test: Unit = { + println(useConversion) + println(convert(new IntWrapper(42))) + } + + + +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/EnumTypes.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/EnumTypes.scala new file mode 100644 index 000000000000..1432943b66d2 --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/EnumTypes.scala @@ -0,0 +1,44 @@ +package example + +/** + * Enum Types: http://dotty.epfl.ch/docs/reference/enums/adts.html + */ +object EnumTypes { + + enum ListEnum[+A] { + case Cons(h: A, t: ListEnum[A]) + case Empty + } + + enum Planet(mass: Double, radius: Double) { + private final val G = 6.67300E-11 + def surfaceGravity = G * mass / (radius * radius) + def surfaceWeight(otherMass: Double) = otherMass * surfaceGravity + + case Mercury extends Planet(3.303e+23, 2.4397e6) + case Venus extends Planet(4.869e+24, 6.0518e6) + case Earth extends Planet(5.976e+24, 6.37814e6) + case Mars extends Planet(6.421e+23, 3.3972e6) + case Jupiter extends Planet(1.9e+27, 7.1492e7) + case Saturn extends Planet(5.688e+26, 6.0268e7) + case Uranus extends Planet(8.686e+25, 2.5559e7) + case Neptune extends Planet(1.024e+26, 2.4746e7) + } + + def test: Unit = { + + val emptyList = ListEnum.Empty + val list = ListEnum.Cons(1, ListEnum.Cons(2, ListEnum.Cons(3, ListEnum.Empty))) + println(emptyList) + println(s"${list}\n") + + def calculateEarthWeightOnPlanets(earthWeight: Double) = { + val mass = earthWeight/Planet.Earth.surfaceGravity + for (p <- Planet.values) + println(s"Your weight on $p is ${p.surfaceWeight(mass)}") + } + + calculateEarthWeightOnPlanets(80) + } + +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/ImpliedInstances.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/ImpliedInstances.scala new file mode 100644 index 000000000000..8db09da27477 --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/ImpliedInstances.scala @@ -0,0 +1,41 @@ +package example + +import scala.util.{Success, Try} + +/** + * Implied Instances: + * - https://dotty.epfl.ch/docs/reference/contextual/instance-defs.html + */ +object ImpliedInstances { + + sealed trait StringParser[A] { + def parse(s: String): Try[A] + } + + object StringParser { + + def apply[A](using parser: StringParser[A]): StringParser[A] = parser + + private def baseParser[A](f: String ⇒ Try[A]): StringParser[A] = new StringParser[A] { + override def parse(s: String): Try[A] = f(s) + } + + given stringParser as StringParser[String] = baseParser(Success(_)) + given intParser as StringParser[Int] = baseParser(s ⇒ Try(s.toInt)) + + given optionParser[A](using parser: => StringParser[A]) as StringParser[Option[A]] = new StringParser[Option[A]] { + override def parse(s: String): Try[Option[A]] = s match { + case "" ⇒ Success(None) // implicit parser not used. + case str ⇒ parser.parse(str).map(x ⇒ Some(x)) // implicit parser is evaluated at here + } + } + } + + def test: Unit = { + println(implicitly[StringParser[Option[Int]]].parse("21")) + println(implicitly[StringParser[Option[Int]]].parse("")) + println(implicitly[StringParser[Option[Int]]].parse("21a")) + + println(implicitly[StringParser[Option[Int]]](StringParser.optionParser[Int]).parse("42")) + } +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/IntersectionTypes.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/IntersectionTypes.scala new file mode 100644 index 000000000000..dcec7e76242d --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/IntersectionTypes.scala @@ -0,0 +1,36 @@ +package example + +/** + * Intersection Types: https://dotty.epfl.ch/docs/reference/new-types/intersection-types.html + */ +object IntersectionTypes { + + sealed trait X { + def x: Double + def tpe: X + } + + sealed trait Y { + def y: Double + def tpe: Y + } + + type P = Y & X + type PP = X & Y + + final case class Point(x: Double, y: Double) extends X with Y { + override def tpe: X & Y = ??? + } + + def test: Unit = { + + def euclideanDistance(p1: X & Y, p2: X & Y) = { + Math.sqrt(Math.pow(p2.y - p1.y, 2) + Math.pow(p2.x - p1.x, 2)) + } + + val p1: P = Point(3, 4) + val p2: PP = Point(6, 8) + println(euclideanDistance(p1, p2)) + + } +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/Main.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/Main.scala new file mode 100644 index 000000000000..5accf1dfd2ab --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/Main.scala @@ -0,0 +1,39 @@ +package example + +object Main { + + def main(args: Array[String]): Unit = { + + runExample("Trait Params")(TraitParams.test) + + runExample("Enum Types")(EnumTypes.test) + + runExample("Context Queries")(ContextQueries.test) + + runExample("Implied Instances")(ImpliedInstances.test) + + runExample("Conversion")(Conversion.test) + + runExample("Union Types")(UnionTypes.test) + + runExample("Intersection Types")(IntersectionTypes.test) + + runExample("Type Lambda")(TypeLambdas.test) + + runExample("Multiversal Equality")(MultiversalEquality.test) + + runExample("Auto Param Tupling")(AutoParamTupling.test) + + runExample("Structural Types")(StructuralTypes.test) + + runExample("Pattern Matching")(PatternMatching.test) + + } + + private def runExample(name: String)(f: => Unit) = { + println(Console.MAGENTA + s"$name example:" + Console.RESET) + f + println() + } + +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/MultiversalEquality.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/MultiversalEquality.scala new file mode 100644 index 000000000000..5619a09cbe3b --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/MultiversalEquality.scala @@ -0,0 +1,40 @@ +package example + +import scala.language.strictEquality + +/** + * Multiversal Equality: https://dotty.epfl.ch/docs/reference/contextual/multiversal-equality.html + * scala.Eq definition: https://github.com/lampepfl/dotty/blob/master/library/src/scala/Eql.scala + */ +object MultiversalEquality { + + def test: Unit = { + + // Values of types Int and String cannot be compared with == or !=, + // unless we add the derived delegate instance like: + given Eql[Int, String] = Eql.derived + println(3 == "3") + + // By default, all numbers are comparable, because of; + // implicit def eqlNumber: Eql[Number, Number] = derived + println(3 == 5.1) + + // By default, all Sequences are comparable, because of; + // implicit def eqlSeq[T, U](implicit eq: Eql[T, U]): Eql[GenSeq[T], GenSeq[U]] = derived + println(List(1, 2) == Vector(1, 2)) + + class A(a: Int) + class B(b: Int) + + val a = new A(4) + val b = new B(4) + + // scala.language.strictEquality is enabled, therefore we need some extra delegate instances + // to compare instances of A and B. + given Eql[A, B] = Eql.derived + given Eql[B, A] = Eql.derived + + println(a != b) + println(b == a) + } +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/PatternMatching.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/PatternMatching.scala new file mode 100644 index 000000000000..7e310cf3ea5e --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/PatternMatching.scala @@ -0,0 +1,104 @@ +package example + +/** + * Pattern Matching: https://dotty.epfl.ch/docs/reference/changed-features/pattern-matching.html + */ +object PatternMatching { + + object booleanPattern { + + object Even { + def unapply(s: String): Boolean = s.length % 2 == 0 + } + + } + + object productPattern { + + class Person(name: String, age: Int) extends Product { + // if we not define that, it will give compile error. + // we change the order + def _1 = age + def _2 = name + + // Not used by pattern matching: Product is only used as a marker trait. + def canEqual(that: Any): Boolean = ??? + def productArity: Int = ??? + def productElement(n: Int): Any = ??? + } + + object Person { + def unapply(a: (String, Int)): Person = new Person(a._1, a._2) + } + + } + + object seqPattern { + + // adapted from http://danielwestheide.com/blog/2012/11/28/the-neophytes-guide-to-scala-part-2-extracting-sequences.html + object Names { + def unapplySeq(name: String): Option[Seq[String]] = { + val names = name.trim.split(" ") + if (names.size < 2) None + else Some(names.last :: names.head :: names.drop(1).dropRight(1).toList) + } + } + + } + + object namePattern { + + class Name(val name: String) { + def get: String = name + def isEmpty = name.isEmpty + } + + object Name { + def unapply(s: String): Name = new Name(s) + } + + } + + def test: Unit = { + + import booleanPattern._ + + "even" match { + case s @ Even() => println(s"$s has an even number of characters") + case s => println(s"$s has an odd number of characters") + } + + // http://dotty.epfl.ch/docs/reference/changed/vararg-patterns.html + def containsConsecutive(list: List[Int]): Boolean = list match { + case List(a, b, xs: _ *) => if (a == b) true else containsConsecutive(b :: xs.toList) + case List(a, _ : _*) => false + case Nil => false + } + + println(containsConsecutive(List(1, 2, 3, 4, 5))) + println(containsConsecutive(List(1, 2, 3, 3, 5))) + + import productPattern._ + ("john", 42) match { + case Person(n, a) => println(s"name: $n, age: $a") + } + + import seqPattern._ + + def greet(fullName: String) = fullName match { + case Names(lastName, firstName, _: _*) => "Good morning, " + firstName + " " + lastName + "!" + case _ => "Welcome! Please make sure to fill in your name!" + } + + println(greet("Alan Turing")) + println(greet("john")) + println(greet("Wolfgang Amadeus Mozart")) + + import namePattern._ + "alice" match { + case Name(n) => println(s"name is $n") + case _ => println("empty name") + } + + } +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/StructuralTypes.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/StructuralTypes.scala new file mode 100644 index 000000000000..83c94cf84992 --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/StructuralTypes.scala @@ -0,0 +1,29 @@ +package example + +/** + * Structural Types: https://dotty.epfl.ch/docs/reference/changed-features/structural-types.html + */ +object StructuralTypes { + + case class Record(elems: (String, Any)*) extends Selectable { + def selectDynamic(name: String): Any = elems.find(_._1 == name).get._2 + } + + type Person = Record { + val name: String + val age: Int + } + + val person = Record("name" -> "Emma", "age" -> 42, "salary" -> 320L).asInstanceOf[Person] + + val invalidPerson = Record("name" -> "John", "salary" -> 42).asInstanceOf[Person] + + def test: Unit = { + println(person.name) + println(person.age) + + println(invalidPerson.name) + // age field is java.util.NoSuchElementException: None.get + //println(invalidPerson.age) + } +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/TraitParams.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/TraitParams.scala new file mode 100644 index 000000000000..453fcaa6e2a9 --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/TraitParams.scala @@ -0,0 +1,23 @@ +package example + +/** + * Trait Parameters: https://dotty.epfl.ch/docs/reference/other-new-features/trait-parameters.html + */ +object TraitParams { + + trait Base(val msg: String) + class A extends Base("Hello") + class B extends Base("Dotty!") + + // Union types only exist in Dotty, so there's no chance that this will accidentally be compiled with Scala 2 + private def printMessages(msgs: (A | B)*) = println(msgs.map(_.msg).mkString(" ")) + + def test: Unit = { + + printMessages(new A, new B) + + // Sanity check the classpath: this won't run if the dotty jar is not present. + val x: Int => Int = z => z + x(1) + } +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/TypeLambdas.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/TypeLambdas.scala new file mode 100644 index 000000000000..87a5e60625b9 --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/TypeLambdas.scala @@ -0,0 +1,21 @@ +package example + +/** + * Type Lambdas: https://dotty.epfl.ch/docs/reference/new-types/type-lambdas.html + */ +object TypeLambdas { + + type T[+X, Y] = Map[Y, X] + + type Tuple = [X] =>> (X, X) + + def test: Unit = { + + val m: T[String, Int] = Map(1 -> "1") + println(m) + + val tuple: Tuple[String] = ("a", "b") + println(tuple) + } + +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/UnionTypes.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/UnionTypes.scala new file mode 100644 index 000000000000..b773c22f70fa --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/UnionTypes.scala @@ -0,0 +1,47 @@ +package example + +/** + * Union Types: https://dotty.epfl.ch/docs/reference/new-types/union-types.html + */ +object UnionTypes { + + sealed trait Division + final case class DivisionByZero(msg: String) extends Division + final case class Success(double: Double) extends Division + + // You can create type aliases for your union types (sum types). + type DivisionResult = DivisionByZero | Success + + sealed trait List[+A] + final case class Empty() extends List[Nothing] + final case class Cons[+A](h: A, t: List[A]) extends List[A] + + private def safeDivide(a: Double, b: Double): DivisionResult = { + if (b == 0) DivisionByZero("DivisionByZeroException") else Success(a / b) + } + + private def either(division: Division) = division match { + case DivisionByZero(m) => Left(m) + case Success(d) => Right(d) + } + + def test: Unit = { + + val divisionResultSuccess: DivisionResult = safeDivide(4, 2) + + // commutative + val divisionResultFailure: Success | DivisionByZero = safeDivide(4, 0) + + // calling `either` function with union typed value. + println(either(divisionResultSuccess)) + + // calling `either` function with union typed value. + println(either(divisionResultFailure)) + + val list: Cons[Int] | Empty = Cons(1, Cons(2, Cons(3, Empty()))) + val emptyList: Empty | Cons[Any] = Empty() + println(list) + println(emptyList) + + } +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/hello/Hello.scala b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/hello/Hello.scala new file mode 100644 index 000000000000..0b5f90467eff --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/src/main/scala/hello/Hello.scala @@ -0,0 +1,8 @@ +package hello +/** Hello, world! */ +object Hello { + def main(args: Array[String]): Unit = { + val dotty: Int | String = "dotty" + println(s"Hello $dotty!") + } +} diff --git a/sbt-dotty/sbt-test/sbt-dotty/scala3doc/test b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/test new file mode 100644 index 000000000000..96f0be01f7c3 --- /dev/null +++ b/sbt-dotty/sbt-test/sbt-dotty/scala3doc/test @@ -0,0 +1,2 @@ +> doc + diff --git a/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala b/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala index 125dbebab2e3..b0a0d91e5986 100644 --- a/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala +++ b/sbt-dotty/src/dotty/tools/sbtplugin/DottyPlugin.scala @@ -18,6 +18,9 @@ object DottyPlugin extends AutoPlugin { val isDotty = settingKey[Boolean]("Is this project compiled with Dotty?") val isDottyJS = settingKey[Boolean]("Is this project compiled with Dotty and Scala.js?") + val useScala3doc = settingKey[Boolean]("Use Scala3doc as the documentation tool") + val scala3docOptions = settingKey[Seq[String]]("Options for Scala3doc") + // NOTE: // - this is a def to support `scalaVersion := dottyLatestNightlyBuild` // - if this was a taskKey, then you couldn't do `scalaVersion := dottyLatestNightlyBuild` @@ -353,10 +356,30 @@ object DottyPlugin extends AutoPlugin { Def.valueStrict { scalaInstance.taskValue } }.value, - // We need more stuff on the classpath to run the `doc` task. + // Configuration for the doctool + resolvers ++= (if(!useScala3doc.value) Nil else Seq( + Resolver.jcenterRepo, + Resolver.bintrayRepo("virtuslab", "dokka"), + )), + useScala3doc := false, + scala3docOptions := Nil, + Compile / doc / scalacOptions := { + // We are passing scala3doc argument list as single argument to scala instance starting with magic prefix "--+DOC+" + val s3dOpts = scala3docOptions.value.map("--+DOC+" + _) + val s3cOpts = (Compile / doc / scalacOptions).value + if (isDotty.value && useScala3doc.value) { + s3dOpts ++ s3cOpts + } else { + s3cOpts + } + }, + // We need to add doctool classes to the classpath so they can be called scalaInstance in doc := Def.taskDyn { if (isDotty.value) - dottyScalaInstanceTask(scala3Artefact(scalaVersion.value, "doc")) + if (useScala3doc.value) + dottyScalaInstanceTask("scala3doc") + else + dottyScalaInstanceTask(scala3Artefact(scalaVersion.value, "doc")) else Def.valueStrict { (scalaInstance in doc).taskValue } }.value, diff --git a/scala3doc-testcases/src/example/Documentation2.scala b/scala3doc-testcases/src/example/Documentation2.scala new file mode 100644 index 000000000000..52eb51f348cc --- /dev/null +++ b/scala3doc-testcases/src/example/Documentation2.scala @@ -0,0 +1,12 @@ +package example + +class ReturnTypeClass[T] { +} + +class UserDocLinkingClass { + def linkMeFromUserDoc() = ??? +} + +object ReturnObjectWithType { + type returnType = Int +} \ No newline at end of file diff --git a/scala3doc-testcases/src/example/Inheritance.scala b/scala3doc-testcases/src/example/Inheritance.scala new file mode 100644 index 000000000000..dad9a2d5c630 --- /dev/null +++ b/scala3doc-testcases/src/example/Inheritance.scala @@ -0,0 +1,21 @@ +package example + +import example.level2.Documentation + +abstract class DocumentationInheritance[T, A <: Int, B >: String, -X, +Y] extends Documentation[T, A, B, X, Y] {} + +class DocumentationInheritanceMethod: + def wierdMethod[T, A <: Int, B >: String](t: T, a: A): B = ??? + def threOtherWay[A <: Nothing, B >: Any](a: A, c: B): Unit = ??? + +class A: + def this(s: String) = this() + def this(i: Int) = this() + type I = Int + given Unit = () + extension (u: Unit) def foo = "foo" + object X + class B extends C: + class D extends C + +class C extends A diff --git a/scala3doc-testcases/src/example/level2/Documentation.scala b/scala3doc-testcases/src/example/level2/Documentation.scala new file mode 100644 index 000000000000..e6f8a3f73f98 --- /dev/null +++ b/scala3doc-testcases/src/example/level2/Documentation.scala @@ -0,0 +1,161 @@ +package example +/** Test +*/ +package level2 + +import scala.collection._ +import scala.deprecated +import scala.annotation._ +import scala.math.{Pi, max} + +/** This class is used for testing tasty doc generation + * @constructor create new object + * @author Bryan Abate + * @param c1 class parameter 1 + * @param c2 class parameter 2 + * @tparam T class type parameter + */ +@strictfp +abstract class Documentation[T, A <: Int, B >: String, -X, +Y](c1: String, val c2: List[T]) extends Seq[T] with Product with Serializable{ + + /** Auxiliary constructor + * @param ac auxiliary parameter + */ + def this(ac: String) = this(ac, Nil) + + def this() = this("", Nil) + + def this(x: T) = this() + + class innerDocumentationClass { + + } + + sealed trait CaseImplementThis(id: Int) + case class IAmACaseClass(x: T, id: Int) extends CaseImplementThis(id) + case object IAmACaseObject extends CaseImplementThis(0) + + object testObject { + + } + + def defReturningInnerClass(): innerDocumentationClass = ??? + + /** Test methods with params + * + * @param x parameter 1 + * @param y parameter 2 + * + * @return something is returned + */ + def methodsWithParams(x : T, y: Int) : List[Map[Int, T]] = ??? + + def methodsWithImplicit(x: Int)(implicit imp: Int, notImp: String) = ??? + + def methodsWithCallByName(x: => Int) = ??? + + def methodsWithDefault(x: Int = 42) = ??? + + class Graph { + type Node = Int + } + def linkingGraph(g: Graph): g.Node = ??? + + val refinementTest: + Graph { + //def x(a: String, b: Double)(c: Float): Int + def x: Int + def x2: innerDocumentationClass + type Y = String + val z: Boolean + } + + /** Test value + */ + @showAsInfix + val v : Int = ??? + + protected def protectedMethod = ??? + private def privateMethod = ??? + + protected val protectedVal = ??? + private val privateVal = ??? + + def abstractDefinition : Int + + def apply(idx: Int) = ??? + def iterator = ??? + override def length = ??? + + /** method: [[example.UserDocLinkingClass.linkMeFromUserDoc]] + * + * method:[[example.level2.Documentation.apply]] + * + * class: [[example.UserDocLinkingClass]] + */ + def linkMethodInDoc() = ??? + + /** An example documention with markdown formatting + * + * **I'm bold** + * + * *I'm italic* + * + * `some code` + * ```scala + * def someScalaCode(x: String) = println("Hello " + x) + * ``` + * + *# Title of level 1 + *# Title of level 1 + * + * 1. I'm a list + * + * + * * Multilevel List + * 1. level 2 + * 1. level 2 2 + * * level 1 again + * + * * multilevel try2 + * * try2 level2 + */ + def docWithMd = ??? + + def functionWithType[U >: String]() : U + + val complexTypeVal : Int | List[List[T]] & String | (Double | Int, Double) | ((Int) => (String)) + + type typeExample[X] >: X <: String //TypeBound + + type abstractType + + def useOfOutsideType(): ReturnTypeClass[T] = ??? + def useOfOutsideTypeInsideObject(): ReturnObjectWithType.returnType = ??? + def useOfSameLevelOutsideType(): SameLevelTypeLinking = ??? + + protected[example] val valWithScopeModifier = ??? + protected[this] val valWithScopeModifierThis = ??? + + var iAmAVar = ??? +} + +/** Companion object + */ +object Documentation { + val valInsideDocObject = ??? +} + +sealed abstract class ClassExtendingDocumentation[T, A <: Int, B >: String, -X, +Y] extends Documentation[T, A, B, X, Y] {} + +trait TraitTest { + +} + +val valueInAPackage = 0 + +def defInAPackage(abc: String): List[Int] = ??? + +trait TraitWithCompanion{} + +object TraitWithCompanion{} \ No newline at end of file diff --git a/scala3doc-testcases/src/example/level2/SameLevelTypeLinking.scala b/scala3doc-testcases/src/example/level2/SameLevelTypeLinking.scala new file mode 100644 index 000000000000..5294185e487a --- /dev/null +++ b/scala3doc-testcases/src/example/level2/SameLevelTypeLinking.scala @@ -0,0 +1,5 @@ +package example.level2 + +class SameLevelTypeLinking { + +} \ No newline at end of file diff --git a/scala3doc-testcases/src/example/level2/level3/level4/ClassLevel4.scala b/scala3doc-testcases/src/example/level2/level3/level4/ClassLevel4.scala new file mode 100644 index 000000000000..40d962fc43e1 --- /dev/null +++ b/scala3doc-testcases/src/example/level2/level3/level4/ClassLevel4.scala @@ -0,0 +1,13 @@ +package example.level2.level3.level4 + +import example.level2.Documentation + +sealed abstract class ClassLevel4[T, A <: Int, B >: String, -X, +Y]() extends Documentation[T, A, B, X, Y] { + + /** + * [[example.level2.Documentation]] + * [[example.level2.Documentation$.valInsideDocObject]] + * [[example.level2.Documentation.abstractType]] + */ + def linkingToDocMethodInUserDoc = ??? +} \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/FilterTest.scala b/scala3doc-testcases/src/tests/FilterTest.scala new file mode 100644 index 000000000000..f3eba9e99539 --- /dev/null +++ b/scala3doc-testcases/src/tests/FilterTest.scala @@ -0,0 +1,127 @@ +package tests + +trait FilterTestBaseTrait: + /** doc */ + protected def protectetDefInheriteTrait(a: Int): String = ??? + /** doc */ + private def privateDefInheritedTrait(a: Int): String = ??? + /** doc */ + def publicDefInheritedTrait(a: Int): String = ??? + + /** doc */ + object PublicObjectInheritedTrait + /** doc */ + protected object ProtectedObjectInheritedTrait + + /** doc */ + protected val protectetValInheritedTrait = 123 + /** doc */ + private val privateValInheritedTrait = 344 + /** doc */ + val publicValInheritedTrait = 567 + +class FilterTestBase: + /** doc */ + sealed abstract class BInherited + /** doc */ + abstract case class CInherited(s: String) + /** doc */ + sealed case class DInherited(c: String) + /** doc */ + final case class EInherited(c: String) + /** doc */ + private class PrivateInherited + /** doc */ + protected class ProtectedInherited + /** doc */ + protected def protectetDefInherited(a: Int): String = ??? + /** doc */ + private def privateDefInherited(a: Int): String = ??? + /** doc */ + def publicDefInherited(a: Int): String = ??? + + /** doc */ + object PublicObjectInherited + /** doc */ + protected object ProtectedObjectInherited + + /** doc */ + protected val protectetValInherited = 123 + /** doc */ + private val privateValInherited = 344 + /** doc */ + val publicValInherited = 567 + + /** doc */ + protected type protectedTypeInherited = 123 + /** doc */ + private type privateTypeInherited = 344 + /** doc */ + type publicTypeInherited = 567 + + /** doc */ + protected given Set[String | Int] = Set(1, "ala") + /** doc */ + given Map[String, Double] = Map.empty + + /** doc */ + protected given namedSet as Set[String | Int] = Set(1, "ala") + /** doc */ + given namedMap as Map[String, Double] = Map.empty + +class FilterTest extends FilterTestBase with FilterTestBaseTrait: + /** doc */ + sealed abstract class B + /** doc */ + abstract case class C(s: String) + /** doc */ + sealed case class D(c: String) + /** doc */ + final case class E(c: String) + /** doc */ + private class Private + /** doc */ + protected class Protected + + /** doc */ + object PublicObject + /** doc */ + protected object ProtectedObject + + /** doc */ + protected def protectetDef(a: B): String = ??? + /** doc */ + private def privateDef(a: C): String = ??? + /** doc */ + def publicDef(a: D): FilterTest = ??? + + + /** doc */ + protected val protectetVal = 123 + /** doc */ + private val privateVal= 344 + /** doc */ + val publicVal = 567 + + /** doc */ + protected type protectedType = 123 + /** doc */ + private type privateType= 344 + /** doc */ + type publicType = 567 + + /** doc */ + protected given Seq[String | Int | Double] = List(1) + /** doc */ + given List[String] = "ula" :: Nil + + /** doc */ + given namedList as List[String] = "ula" :: Nil + /** doc */ + protected given namedSeq as Seq[String | Int | Double] = List(1) + +extension (e: FilterTest): + def extensionMethod(name: FilterTest): FilterTest = ??? + +extension (e: FilterTestBase): + def extensionMethodBase(name: FilterTest): FilterTest = ??? \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/annotations.scala b/scala3doc-testcases/src/tests/annotations.scala new file mode 100644 index 000000000000..6cbc58da107e --- /dev/null +++ b/scala3doc-testcases/src/tests/annotations.scala @@ -0,0 +1,28 @@ +package tests +package annotations + +import scala.annotation.StaticAnnotation + + +class SomeObject(val s: String) + +class MyAnnotation extends StaticAnnotation + +class AnnotationWithArg(val s: String, val o: SomeObject) extends StaticAnnotation + +class AnnotationWithMultiArg(val i: Int, val s: String, val c: Char*) extends StaticAnnotation + +class EnumAnnotation(val e: Enum) extends StaticAnnotation + +class ClassAnnotation[T](val c: Class[T]) extends StaticAnnotation + +@AnnotationWithMultiArg(2, "cda", 'a', 'b', 'c') @MyAnnotation class AnnotatedClass + + +class AnnotatedParams(@MyAnnotation val a: String, @AnnotationWithMultiArg(2, "cda", 'a', 'b', 'c') val b: Int) + +class AnnotatedMethods +{ + @MyAnnotation @AnnotationWithMultiArg(2, "cda", 'a', 'b', 'c') def a: String + = ??? +} \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/annotationsExample.scala b/scala3doc-testcases/src/tests/annotationsExample.scala new file mode 100644 index 000000000000..04055c5c840d --- /dev/null +++ b/scala3doc-testcases/src/tests/annotationsExample.scala @@ -0,0 +1,35 @@ +package tests +package annotationsExample + +import scala.annotation.StaticAnnotation + +enum Enum { + case A extends Enum + case B extends Enum + case C extends Enum +} + +class SomeObject(val s: String) + +class MyAnnotation extends StaticAnnotation + +class AnnotationWithArg(val s: String, val o: SomeObject) extends StaticAnnotation + +class AnnotationWithMultiArg(val i: Int, val s: String, val c: Char*) extends StaticAnnotation + +class EnumAnnotation(val e: Enum) extends StaticAnnotation + +class ClassAnnotation[T](val c: Class[T]) extends StaticAnnotation + +@MyAnnotation@AnnotationWithMultiArg(2, "cda", 'a', 'b', 'c')@EnumAnnotation(Enum.A)class AnnotatedClass + + +class AnnotatedParams(@MyAnnotation val a: String, @AnnotationWithMultiArg(2, "cda", 'a', 'b', 'c') val b: Int) + +class AnnotatedMethods +{ + @MyAnnotation + @AnnotationWithMultiArg(2, "cda", 'a', 'b', 'c') + def a: String + = ??? +} \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/classModifiers.scala b/scala3doc-testcases/src/tests/classModifiers.scala new file mode 100644 index 000000000000..a189c0e0c53b --- /dev/null +++ b/scala3doc-testcases/src/tests/classModifiers.scala @@ -0,0 +1,13 @@ +package tests + +package classModifiers + +sealed abstract class B + +abstract case class C(s: String) + +sealed case class D(c: String) + +final case class E(c: String) + +open class F diff --git a/scala3doc-testcases/src/tests/classSignatureTestSource.scala b/scala3doc-testcases/src/tests/classSignatureTestSource.scala new file mode 100644 index 000000000000..b14083c70a7c --- /dev/null +++ b/scala3doc-testcases/src/tests/classSignatureTestSource.scala @@ -0,0 +1,63 @@ +package tests.classSignatureTestSource + +import scala.collection._ +import scala.deprecated +import scala.annotation._ +import scala.math.{Pi, max} +import example.level2.Documentation + +abstract class Documentation[T, A <: Int, B >: String, -X, +Y](c1: String, val c2: List[T]) extends Seq[T] with Product with Serializable +{ + def this(ac: String) + = this(ac, Nil) + + def this() + = this("", Nil) + + def this(x: T) + = this() + + class innerDocumentationClass + { + + } + + sealed trait CaseImplementThis(id: Int) + + case class IAmACaseClass(x: T, id: Int) extends CaseImplementThis/*<-*/(id)/*->*/ + + case object IAmACaseObject extends CaseImplementThis/*<-*/(0)/*->*/ + + object testObject + + class Graph + { + type Node = Int + } + + type typeExample[X] >: X <: String + + type abstractType +} + +object Documentation +{ + // TODO We do not see members from companions val valInsideDocObject = ??? +} + +sealed abstract class ClassExtendingDocumentation[T, A <: Int, B >: String, -X, +Y] extends Documentation[T, A, B, X, Y] +{} + +trait TraitTest +{ + +} + +trait TraitWithCompanion{} //expected: trait TraitWithCompanion + +object TraitWithCompanion +{} + +// TODO #25 do we need to add 'val' in case class signatures? +case class ManyModifiers(/*<-*/val /*->*/x: Int, var y: Double, z: String) +class ManyModifiers2(val x: Int, var y: Double, z: String) \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/companionObjectSignatures.scala b/scala3doc-testcases/src/tests/companionObjectSignatures.scala new file mode 100644 index 000000000000..6e58794108ec --- /dev/null +++ b/scala3doc-testcases/src/tests/companionObjectSignatures.scala @@ -0,0 +1,37 @@ +package tests +package companionObjectSignatures + +case class CaseClass(parameterOfClassConstructor: String) +{ + val classProperty1: String + = ??? + val classProperty2: String + = ??? + def methodInClass1(): String + = ??? + + def methodInClass2(): CaseClass + = ??? +} + +case object CaseClass +{ + val parameterOfObject: String + = "asd" + + def methodInCompanion1(): String + = ??? + + def methodInCompanion2(): CaseClass + = ??? +} + +case class WithoutExplicitCompanion(parameter: Int) + +class StandardClass + +object StandardClass + +class StandardClassWithCaseCompanion + +case object StandardClassWithCaseCompanion \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/enumSignatures.scala b/scala3doc-testcases/src/tests/enumSignatures.scala new file mode 100644 index 000000000000..131d357ede96 --- /dev/null +++ b/scala3doc-testcases/src/tests/enumSignatures.scala @@ -0,0 +1,27 @@ +package tests + +package enumSignatures + +enum Enum1 +{ + case A + case B + case C +} +enum Enum2(val i: Int): + case A(val s: String) extends Enum2(1) + case B(val t: String) extends Enum2(2) + case C(val u: String) extends Enum2(3) + +enum Enum3(val param: Int): + case A extends Enum3(1) with A + case B extends Enum3(2) + case C extends Enum3(3) + +enum Enum4[+T]: + case G(s: String) + case B extends Enum4[Int] with A + case C[V](s:String) extends Enum4[V] + case D[T](s: String) extends Enum4[T] + +trait A \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/extensionMethodSignatures.scala b/scala3doc-testcases/src/tests/extensionMethodSignatures.scala new file mode 100644 index 000000000000..58d374670d05 --- /dev/null +++ b/scala3doc-testcases/src/tests/extensionMethodSignatures.scala @@ -0,0 +1,45 @@ +package tests +package extensionMethodSignatures + +class ClassOne +{ + // Commented cases won't work for now + // extension ClassTwoOps on (c: ClassTwo): + // def getA() = c.a + extension (c: ClassTwo) + def getB(): String + = c.b + + extension (c: ClassTwo) + def getGivenParams(a: Int, b: Int, d: Int)(e: String): Int + = 56 + + extension (c: ClassTwo) + def |||:(a: Int, b: Int, d: Int)(e: String): Int + = 56 + + extension (b: Int) + def secondGroup(): String + = ??? + + extension (c:ClassTwo) + : + def getString(a: String): String + = a + + def getInt(): Int + = 5 + + extension (s: String) + def someMethod(): String + = ??? + def otherMethod(a: Int): Int + = ??? +} + +case class ClassTwo(a: String, b: String) +{ + +} + +class ClassOneTwo extends ClassOne \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/fieldsSignatures.scala b/scala3doc-testcases/src/tests/fieldsSignatures.scala new file mode 100644 index 000000000000..a9bb4eb42e05 --- /dev/null +++ b/scala3doc-testcases/src/tests/fieldsSignatures.scala @@ -0,0 +1,41 @@ +package tests +package fieldsSignatures + +case class A(cA: String, var cB: Int) +{ + val A: Int + = 1 + val B: Int + = 2 + var other: Int + = 4 +} + +trait C +{ + val d: Int +} + +abstract class D extends C +{ + override val d: Int + = 1 +} + +trait C2 +{ + def d: Int +} + + +abstract class D2 extends C +{ + override val d: Int + = 1 +} + +object Documentation +{ + val valInsideDocObject: Nothing + = ??? +} \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/genericMethods.scala b/scala3doc-testcases/src/tests/genericMethods.scala new file mode 100644 index 000000000000..3ccadbcecc18 --- /dev/null +++ b/scala3doc-testcases/src/tests/genericMethods.scala @@ -0,0 +1,16 @@ +package tests.genericMethods + +class Types: + def orTypes(base: Int | String, nested: List[Int | Double] | Seq[String]): Unit + = ??? + def andTypes(base: Int & String, nested: List[Int & Double] & Seq[String]): Unit + = ??? + + def mixedAndAndOr(base: Int & String, nested: List[Int | Double] & Seq[String]): Unit + = ??? + + def literal(i: 1, d: 3.3, c: 'c'): 34 + = 34 + + def byName(a: => Int, b: => String | Int): Unit + = ??? \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/genericSignatures.scala b/scala3doc-testcases/src/tests/genericSignatures.scala new file mode 100644 index 000000000000..550bb6b55028 --- /dev/null +++ b/scala3doc-testcases/src/tests/genericSignatures.scala @@ -0,0 +1,17 @@ +package tests.genericSignatures + +class D[T] + +// TODO #26 this is not supported :( class D1[T ] + +class D2[T]() + +class DD[+T] + +class A + +class E[T] extends D[T] + +class F[+T, -F] extends DD[T] + +class A2[E, +T <: DD[E]] diff --git a/scala3doc-testcases/src/tests/givenSignatures.scala b/scala3doc-testcases/src/tests/givenSignatures.scala new file mode 100644 index 000000000000..4364a7e4c0a8 --- /dev/null +++ b/scala3doc-testcases/src/tests/givenSignatures.scala @@ -0,0 +1,44 @@ +package tests + +package givenSignatures + + + +class GivenClass { + trait B + trait C[T] + val r: Int = 5 + type R = Int + given R = r + trait Ord[T] { + def compare(x: T, y: T): Int + extension (x: T) def < (y: T) = compare(x, y) < 0 + extension (x: T) def > (y: T) = compare(x, y) > 0 + } + given intOrd as Ord[Int] { + def compare(x: Int, y: Int) = + if (x < y) -1 else if (x > y) +1 else 0 + } + + given asd(using int: Int) as B + + given asd2[T] as C[T] + + given listOrd[T](using ord: Ord[T]) as Ord[List[T]] { + + def compare(xs: List[T], ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = ord.compare(x, y) + if (fst != 0) fst else compare(xs1, ys1) + } + + given IntOps as Int.type = Int + + given GivenType = GivenType() + + class GivenType +} + diff --git a/scala3doc-testcases/src/tests/implicitConversions.scala b/scala3doc-testcases/src/tests/implicitConversions.scala new file mode 100644 index 000000000000..35da6a67a65f --- /dev/null +++ b/scala3doc-testcases/src/tests/implicitConversions.scala @@ -0,0 +1,73 @@ +package tests + +package implicitConversions + +given Conversion[A, B] { + def apply(a: A): B = ??? +} + +extension (a: A) def extended_bar(): String = ??? + +class A { + implicit def conversion(c: C): D = ??? + implicit def conversion: Conversion[C,D] = ??? + implicit val a: Conversion[C,D] = ??? + + extension (c: C) def extended_bar(): String = ??? + + class C { + def bar: String = ??? + } + + class D extends E() { + def bar2: String = ??? + + val string: String = ??? + + class Bar() + + type ImplicitType >: String + + extension (e: E) def extended_bar(): String = ??? + } + + class E { + def inherited: Int = ??? + } +} + +class B { + def foo: Int = ??? + + var b: String = ??? +} + +class C { + def extensionInCompanion: String = ??? +} + +object C { + implicit def companionConversion(c: C): B = ??? + + extension (c: C) def extensionInCompanion: String = ??? +} + +package nested { + extension (opt: Opt[Int]) def sum: Int = ??? + class Opt[A] + + class Lst[A] + object Lst { + extension (lst: Lst[Int]) def sum: Int = ??? + } + + object Wrapper { + class Foo + class Bar { + def bar = "bar" + } + implicit def foobar(foo: Foo): Bar = Bar() + } + + class Z +} \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/implicitMembers.scala b/scala3doc-testcases/src/tests/implicitMembers.scala new file mode 100644 index 000000000000..8dd04d947310 --- /dev/null +++ b/scala3doc-testcases/src/tests/implicitMembers.scala @@ -0,0 +1,20 @@ +package tests + +package implicitMembers + +class OuterClass: + class ImplicitMemberTarget + + object ImplicitMemberTarget: + extension (a: ImplicitMemberTarget): + def extensionFromCompanion: String = + "ImplicitMemberTarget" + + // does not work + extension (a: ImplicitMemberTarget): + def extensionFromOuterClass: String = + "ImplicitMemberTarget" + +extension (a: OuterClass#ImplicitMemberTarget): + def extensionFromPackage: String = + "ImplicitMemberTarget" \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/markdown-tests.scala b/scala3doc-testcases/src/tests/markdown-tests.scala new file mode 100644 index 000000000000..ba455fdb6be1 --- /dev/null +++ b/scala3doc-testcases/src/tests/markdown-tests.scala @@ -0,0 +1,44 @@ +package tests + +/** + * * a + * * b + * * c + * + * - a + * - b + * - c + * + * 1. a + * 1. b + * 1. c + * + * + * If the following list was indented one space less, it wouldn't parse + * properly. That is, the first nested list would not be nested. This is + * because ATTW we trim _up to two_ spaces between the star "gutter" and actual + * comment bodies. + * + * * a + * - a.a + * - a.b + * - a.c + * * b + * 1. b.1 + * 2. b.2 + * 3. b.3 + * * b.3.a + * * b.3.b + * * b.3.c + */ +class MdLists + +/** + * | day | time | spent | + * |:------------|:-----:|--------:| + * | nov. 2. tue | 10:00 | 4h 40m | + * | nov. 3. thu | 11:00 | 4h | + * | nov. 7. mon | 10:20 | 4h 20m | + * | total: || 13h | + */ +class MdTables diff --git a/scala3doc-testcases/src/tests/mergedPackage1.scala b/scala3doc-testcases/src/tests/mergedPackage1.scala new file mode 100644 index 000000000000..264936c03565 --- /dev/null +++ b/scala3doc-testcases/src/tests/mergedPackage1.scala @@ -0,0 +1,9 @@ +package tests + +package mergedPackage + +val propertyFromPackage1: String + = ??? + +def methodFromPackage1(): Int + = ??? \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/mergedPackage2.scala b/scala3doc-testcases/src/tests/mergedPackage2.scala new file mode 100644 index 000000000000..698278b07bd7 --- /dev/null +++ b/scala3doc-testcases/src/tests/mergedPackage2.scala @@ -0,0 +1,9 @@ +package tests + +package mergedPackage + +val propertyFromPackage2: Int + = ??? + +def methodFromPackage2(): String + = ??? \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/mergedPackage3.scala b/scala3doc-testcases/src/tests/mergedPackage3.scala new file mode 100644 index 000000000000..61eb99148061 --- /dev/null +++ b/scala3doc-testcases/src/tests/mergedPackage3.scala @@ -0,0 +1,8 @@ +package tests + +package object mergedPackage{ + val propertyFromPackageObject: Double + = ??? + def functionFromPackageObject(): String => String + = ??? +} \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/methodsAndConstructors.scala b/scala3doc-testcases/src/tests/methodsAndConstructors.scala new file mode 100644 index 000000000000..f30056fa6500 --- /dev/null +++ b/scala3doc-testcases/src/tests/methodsAndConstructors.scala @@ -0,0 +1,49 @@ +package tests.methodsAndConstructors + +class A +class B extends A +class C +class D[T] +class E[T] extends D[T] + +class Constructors(a: String): + def this() + = this("Ala") + + def this(a: A)(b: A) + = this("Ala") + +/** Some methods to tests */ +class Methods: + def nobraces: A + = ??? + + def simple(): B + = ??? + + def oneParam(a: A): B + = ??? + + def multipleParams(a: A, b: B): C + = ??? + + def vararg(a: A*): C + = ??? + + def multipleList(a: A)(b: B): C + = ??? + + def generic[T](a: D[T]): D[T] + = ??? + + def generic2[T, V](a: D[T], b: E[V]): D[T] + = ??? + + def primitives(a: Int, b: Double, c: Short): Byte + = 0 + + def strings(a: String): String + = "" + + def arrays(a: Array[String], b: Array[Int]): Array[Double] + = ??? diff --git a/scala3doc-testcases/src/tests/modifiersSignatureTestSource.scala b/scala3doc-testcases/src/tests/modifiersSignatureTestSource.scala new file mode 100644 index 000000000000..064720a582ad --- /dev/null +++ b/scala3doc-testcases/src/tests/modifiersSignatureTestSource.scala @@ -0,0 +1,40 @@ +package tests.modifiersSignatureTestSource + +case class Case() + +final class Final() + +abstract class Abstract() + +class Empty() + +sealed class Sealed() + +open class Open + +opaque type Opaque + = Open + +abstract class Methods() +{ + def method1(): Unit + + inline def inlineMethod(): Unit + = Unit + + implicit def toImplicitString(): String + = "asd" +} + +class ImplementedMethods() extends Methods/*<-*/()/*->*/ +{ + override def method1(): Unit + = ??? + +} + +case class ReimplementedMethods() extends ImplementedMethods/*<-*/()/*->*/ +{ + override def method1(): Unit + = ??? +} diff --git a/scala3doc-testcases/src/tests/nested.scala b/scala3doc-testcases/src/tests/nested.scala new file mode 100644 index 000000000000..2f5fe3931e95 --- /dev/null +++ b/scala3doc-testcases/src/tests/nested.scala @@ -0,0 +1,59 @@ +package tests.nested + +class A +{ + object B + { + def bb(): Int + = 123 + val a: Double + = 0.3 + } + + class C + { + def cc: Int + = 123 + } + + trait D + { + type AA = Int + } +} + +object R +{ + object B + { + def bb(): Int + = 123 + val a: Double + = 0.3 + } + + class C + { + def cc: Int + = 123 + } + + trait D + { + type AA = Int + } +} + +class X +{ + object Y +} + +// bug found in dotty code, still fails with type +sealed trait ErrorKind +object ErrorKind +{ + // This below produce some strange type + // case object Parser extends ErrorKind + // case object Typer extends ErrorKind +} \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/objectSignatures.scala b/scala3doc-testcases/src/tests/objectSignatures.scala new file mode 100644 index 000000000000..3ec55e1dffba --- /dev/null +++ b/scala3doc-testcases/src/tests/objectSignatures.scala @@ -0,0 +1,19 @@ +package tests +package objectSignatures + +class A[T] +{ + val a: String = "asd" + def method3() = "asd" +} + +object A + +trait C + +object Base + +object A2 extends A[String] with C + +// We are not going to add final below +// final object B diff --git a/scala3doc-testcases/src/tests/opaqueTypes.scala b/scala3doc-testcases/src/tests/opaqueTypes.scala new file mode 100644 index 000000000000..33cc7ab9ff91 --- /dev/null +++ b/scala3doc-testcases/src/tests/opaqueTypes.scala @@ -0,0 +1,9 @@ +package tests + +package opaqueTypes + +opaque type Permissions + = Int +opaque type PermissionChoice + = Int +//opaque type Permission <: Permissions & PermissionChoice = Int TODO: #112 \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/package.scala b/scala3doc-testcases/src/tests/package.scala new file mode 100644 index 000000000000..7fac475792cb --- /dev/null +++ b/scala3doc-testcases/src/tests/package.scala @@ -0,0 +1,4 @@ +/** This should be moved to its own project */ +package object tests { + +} diff --git a/scala3doc-testcases/src/tests/packageObjectSymbolSignatures.scala b/scala3doc-testcases/src/tests/packageObjectSymbolSignatures.scala new file mode 100644 index 000000000000..9151ca3de508 --- /dev/null +++ b/scala3doc-testcases/src/tests/packageObjectSymbolSignatures.scala @@ -0,0 +1,18 @@ +package tests + +package object packageObjectSymbolSignatures +{ + val property1: String + = ??? + + type MyType = String + + val property2: Int + = ??? + + def method1(): Int + = ??? + + def method2(): String + = ??? +} \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/packageSymbolSignatures.scala b/scala3doc-testcases/src/tests/packageSymbolSignatures.scala new file mode 100644 index 000000000000..13ec6149c88e --- /dev/null +++ b/scala3doc-testcases/src/tests/packageSymbolSignatures.scala @@ -0,0 +1,19 @@ +package tests + +package packageSymbolSignatures + +val packageSymbol1: String + = ??? + +val packageSymbol2: Int + = ??? + +def packageMethod1: String + = ??? + +def packageMethod2: Int + = ??? + +type PackageInteger = Int + +class A diff --git a/scala3doc-testcases/src/tests/signatureTestSource.scala b/scala3doc-testcases/src/tests/signatureTestSource.scala new file mode 100644 index 000000000000..7ab9e436d833 --- /dev/null +++ b/scala3doc-testcases/src/tests/signatureTestSource.scala @@ -0,0 +1,39 @@ +package tests.signatureTestSource +class A +class B extends A +class C +class D[T] +class E[T] extends D[T] + +class SignatureTestClass(a: String) +: + def simple(): B + = ??? + def oneParam(a: A): B + = ??? + def multipleParams(a: A, b: B): C + = ??? + def likeVararg(a: Seq[A]): C + = ??? + def vararg(a: A*): C + = ??? + def multipleList(a: A)(b: B): C + = ??? + + def generic[T](a: D[T]): D[T] + = ??? + def generic2[T, V](a: D[T], b: E[V]): D[T] + = ??? + + def primitives(a: Int, b: Double, c: Short): Byte + = 0 + def strings(a: String): String + = "" + def arrays(a: Array[String], b: Array[Int]): Array[Double] + = ??? + def bounds1[T <: String](a: T, b: T): Unit + = ??? + def bounds2[T >: String](a: T, b: T): Unit + = ??? + def this() + = this("Ala") \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/structuralTypes.scala b/scala3doc-testcases/src/tests/structuralTypes.scala new file mode 100644 index 000000000000..8f9a1a9ecf11 --- /dev/null +++ b/scala3doc-testcases/src/tests/structuralTypes.scala @@ -0,0 +1,14 @@ +package tests + +package structuralTypes + +type Person = Record { val name: String; val age: Int; type Height = Int; def sth(a: Int, b: String): Int; } + +type R = { type T; val x: Int; type U <: this.T; def foo(): Int; } + +class Record(elems: (String, Any)*) extends Selectable { + val fields: Map[String, Any] + = elems.toMap + def selectDynamic(name: String): Any + = fields(name) +} \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/tests.scala b/scala3doc-testcases/src/tests/tests.scala new file mode 100644 index 000000000000..cdaf7947eae5 --- /dev/null +++ b/scala3doc-testcases/src/tests/tests.scala @@ -0,0 +1,190 @@ +package tests + +/** # AN IMPORTANT TEST CLASS + * + * Very important, in fact + * ----------------------- + * + * ### So important it gets three headers + * + * This is an *important* _test_ class. + * Important enough to get multiple sentences in its summary. + * + * And `this` is inline code. + * + * And this is the **strong** __emphasis__ test. + * + * And this + * ```scala + * is.an("actual code block") + * with.multiple("lines") + * ``` + * + * And this + * + * is.an("indented code block") + * with.multiple("lines") + * + * And this + * > is + * > > a + * > blockquote + * + * And this is a link: [[method]]. + * + * This is another way to spell the same link: [[#method]]. + * + * And this is another link: [[AA]]. + * + * And this is another link: [[AA$]]. + * + * And this is yet another link: [[tests.Methods]]. + * + * Yet another: [[tests.Methods.simple]]. + * + * And yet another: [[example.level2.Documentation]]. + * + * This is my friend: [[tests\.B\]]]. + * + * And this is his companion: [[tests.B$ link to the companion]]. + * + * @author Gal Anonim + * @version 1.0.0 + * @result A class doesn't actually have a result. + * @constructor A class has a constructor, and this one is important. + */ +class A { + + /** This is a method. + * + * This is a link: [[AA]]. + * + * This is another link: [[AA$]]. + * + * And yet another: [[B]]. + */ + def method(s: String): String = s + + class AA + + object AA +} + +/** Companion object to test linking */ +object A + +/** = An important Wiki test class = + * + * == Very important, in fact == + * + * === So important it gets three headers === + * + * This is an ''important'' '''test''' __class__. And `this` is inline code. + * + * While + * {{{ + * this.is("a code block") + * }}} + * + * And this is a link: [[otherMethod]]. + * + * And this is another link: [[BB]]. + * + * And this is yet another link: [[tests.Methods]]. + * + * Yet another: [[tests.Methods.simple]]. + * + * And yet another: [[example.level2.Documentation]]. + * + * This is my friend: [[tests.A]]. + * + * And this is his companion: [[tests.A$]]. + * @syntax wiki + */ +class B extends A { + /** This is a method. */ + def otherMethod(s: String): String = s + + class BB +} + +/** Companion object to test linking. + * + * This is my member: [[B$.Z]] + * + * And this is my term member: [[B$.Z$]] + * + * This is my member, addressed differently: [[this.Z]] + * + * And this is my term member, addressed differently: [[this.Z$]] + */ +object B { + type Z = Int + val Z: Int = 0 +} + +class C { + object CC + class CC +} +class D[T] +class E[T] extends D[T] + +/** A class with a semi-non-trivial constructor. + * + * @param a Hello! + */ +class Constructors(a: String): + def this() = this("Ala") + def this(a: A)(b: A) = this("Ala") + +/** Some methods to tests */ +class Methods: + def nobraces: A = ??? + /** Class doc test. + */ + def simple(): B = ??? + def oneParam(a: A): B = ??? + def multipleParams(a: A, b: B): C = ??? + def vararg(a: A*): C = ??? + def multipleList(a: A)(b: B): C = ??? + def generic[T](a: D[T]): D[T] = ??? + + /** A generic method. + * + * @author Gal Anonim + * @author Gol Anonim + * @version 1.0.0 + * @since 0.1.0 + * @todo Consider cleaning up this documentation + * @todo Add more documentation + * @note This method is more important than it looks. + * @note Much more important. + * @param a A param! + * @param b Another param. + * @tparam T A type param! + * @tparam V + * Another type param. + * ``` + * with.a("near-pathological").description + * ``` + * But, surprisingly, still displayed quite well. + * + * Even though this line should be separated from previous one. + * + * @throws scala.Error Throws errors. + * @example + * ``` + * (m : Methods).generic2(d(), e()): B + * ``` + * @example + * ``` + * (m : Methods).generic2(d2(), e2()): B + * ``` + * @return Nothing at all! + */ + def generic2[T, V](a: D[T], b: E[V]): D[T] = ??? + + def primitives(a: Int, b: Double, c: Short): Byte = 0 + def strings(a: String): String = "" + def arrays(a: Array[String], b: Array[Int]): Array[Double] = ??? diff --git a/scala3doc-testcases/src/tests/traitSignatures.scala b/scala3doc-testcases/src/tests/traitSignatures.scala new file mode 100644 index 000000000000..1ac63e3b1787 --- /dev/null +++ b/scala3doc-testcases/src/tests/traitSignatures.scala @@ -0,0 +1,10 @@ +package tests +package traitSignatures + +trait A + +trait B extends A + +trait C(a: Int) + +trait D(b: Double) extends C with A \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/typeLambdas.scala b/scala3doc-testcases/src/tests/typeLambdas.scala new file mode 100644 index 000000000000..78e270aae76f --- /dev/null +++ b/scala3doc-testcases/src/tests/typeLambdas.scala @@ -0,0 +1,13 @@ +package tests + +package typeLambdas + +type L1 = Int +type U1 = String +type R1 = Double + +type Id[T <: AnyKind] + +type TL1 = Id[[X, Y] =>> Map[X,Y]] + +type TL2 = Id[[X >: Int] =>> [Y <: String] =>> Map[X, Y]] \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/typesSignatures.scala b/scala3doc-testcases/src/tests/typesSignatures.scala new file mode 100644 index 000000000000..2c8caf314080 --- /dev/null +++ b/scala3doc-testcases/src/tests/typesSignatures.scala @@ -0,0 +1,51 @@ +package tests +package typesSignatures + +class A +{ + type A = Int + type B[+T] = Seq[T] + type C[A, B <: A] = Seq[B] +} + +trait V +{ + type Ala[+J] <: Int + type Ola[+T] + type X +} + +class Generic[T] + +class Base +{ + type A + type B = Int + + // Tests not support multiline signatures + type MatchT[T] = T match { case String => Char case Int => Byte } + + // Tests do not support multiline signatures + type Elem[X] = X match { case String => Char case Array[t] => t case Iterable[t] => t } + + type F = [X] => (x: X) => List[X] + + type G = Int => Int + + type H = () => String + + type I = (Int, String, Int) => (String, Int) +} + +class Operators +{ + type Binary = String =:= Int + + // Infix annotation is not well supported in Dotty + // import scala.annotation.infix + // @infix type op[A, B] = Int + // type Binary2 = String op Int + + import scala.compiletime.ops.boolean._ + type Unary = ![true] +} \ No newline at end of file diff --git a/scala3doc-testcases/src/tests/visibility.scala b/scala3doc-testcases/src/tests/visibility.scala new file mode 100644 index 000000000000..a87dce1171b9 --- /dev/null +++ b/scala3doc-testcases/src/tests/visibility.scala @@ -0,0 +1,72 @@ +package tests +package visibility + +private object PrivateTopLevelObject //unexpected + +private[tests] object PrivateInOuterPackageTopLevelObject //unexpected + +private[visibility] object PrivateInInnerPackageTopLevelObject //unexpected + +private[this] object LocallyPrivateTopLevelObject //unexpected + +protected object ProtectedTopLevelObject //unexpected + +protected[tests] object ProtectedInOuterPackageTopLevelObject //unexpected + +protected[visibility] object ProtectedInInnerPackageTopLevelObject //unexpected + +protected[this] object LocallyProtectedTopLevelObject //unexpected + +private def privateTopLevelMethod: Int //unexpected + = 1 + +protected def protectedTopLevelMethod: Int //unexpected + = 1 + +class InClassVisibility() +{ + private def privateMethod: Int //unexpected + = ??? + + private[tests] def privateInOuterPackageMethod: Int //unexpected + = ??? + + private[visibility] def privateInInnerPackageMethod: Int //unexpected + = ??? + + private[InClassVisibility] def privateInClassMethod: Int //unexpected + = ??? + + private[this] def locallyPrivateMethod: Int //unexpected + = ??? + + protected def protectedMethod: Int + = ??? + + protected[tests] def protectedInOuterPackageMethod: Int //unexpected + = ??? + + protected[visibility] def protectedInInnerPackageMethod: Int //unexpected + = ??? + + protected[InClassVisibility] def protectedInClassMethod: Int + = ??? + + protected[this] def locallyProtectedMethod: Int //unexpected + = ??? +} + +trait InTraitVisibility +{ + protected[InTraitVisibility] def protectedInTraitMethod: Int + = ??? +} + +object InObjectVisibility +{ + protected def protectedObjectMethod: Int //unexpected + = ??? + + protected[InObjectVisibility] def protectedInObjectScopeMethod: Int //unexpected + = ??? +} diff --git a/scala3doc-testcases/src/tests/wiki-tests.scala b/scala3doc-testcases/src/tests/wiki-tests.scala new file mode 100644 index 000000000000..e2fcd005feea --- /dev/null +++ b/scala3doc-testcases/src/tests/wiki-tests.scala @@ -0,0 +1,36 @@ +package tests + +/** Wiki list syntax tests. + * + * - a + * - b + * - c + * + * 1. a + * 1. b + * 1. c + * + * a. a + * a. b + * a. c + * + * i. a + * i. b + * i. c + * + * + * - a + * - a.a + * - a.b + * - a.c + * - b + * 1. b.1 + * 1. b.2 + * 1. b.3 + * a. b.3.a + * a. b.3.b + * a. b.3.c + * + * @syntax wiki + */ +class WikiLists diff --git a/scala3doc/.gitignore b/scala3doc/.gitignore new file mode 100644 index 000000000000..893c4fbd144f --- /dev/null +++ b/scala3doc/.gitignore @@ -0,0 +1,2 @@ +# root where we generate documentation +output \ No newline at end of file diff --git a/scala3doc/.scalafmt.conf b/scala3doc/.scalafmt.conf new file mode 100644 index 000000000000..9df0d88b69ce --- /dev/null +++ b/scala3doc/.scalafmt.conf @@ -0,0 +1 @@ +version = "2.6.3" diff --git a/scala3doc/README.md b/scala3doc/README.md new file mode 100644 index 000000000000..19cb405e35e1 --- /dev/null +++ b/scala3doc/README.md @@ -0,0 +1,164 @@ +# Scala3doc + +Scala3doc (name subject to change) is the documentation tool for +[Dotty](https://github.com/lampepfl/dotty), which is scheduled to become +Scala 3. It's based on [Dokka](https://github.com/Kotlin/dokka), the +documentation tool for Kotlin. It uses the TastyInspector to access definitions, +which is an officially supported way to access Dotty's perspective of a +codebase. + +We're aiming to support all the features Scaladoc did, plus new and exciting ones such as: + +- Markdown syntax! +- displaying project and API documentation together on one site! +- and more! + +## Running the project + +Use the following commands to generate documentation for this project and for Dotty, respectively: + +``` +sbt generateSelfDocumentation +sbt generateDottyLibDocumentation +``` + +To actually view the documentation, the easiest way is to run the following in project root: + +``` +cd output +python3 -m http.server 8080 +``` + +And afterwards point your browser to `http://localhost:8080/self` or +`http://localhost:8080/stdLib` for this project and for Dotty documentation +respectively. + +It's not strictly necessary to go through an HTTP server, but because of CORS +the documentation won't work completely if you don't. + +## Developing + +At least two of our contributors use [Metals](https://scalameta.org/metals/) to +work on the project. + +For every PR, we build documentation for Scala3doc and Dotty. For example, for +PR 123 you can find them at: + ++ https://scala3doc.s3.eu-central-1.amazonaws.com/pr-123/self/main/index.html ++ https://scala3doc.s3.eu-central-1.amazonaws.com/pr-123/stdLib/main/index.html + +Note that these correspond to the contents of `output` directory - that's +precisely what they are. + +You can also find the result of building the same sites for latest `master` at: + ++ https://scala3doc.s3.eu-central-1.amazonaws.com/pr-master/self/main/index.html ++ https://scala3doc.s3.eu-central-1.amazonaws.com/pr-master/stdLib/main/index.html + +### Testing + +Most tests rely on comparing signatures (of classes, methods, objects etc.) extracted from the generated documentation +to signatures found in source files. Such tests are defined using [MultipleFileTest](src/test/scala/dotty/dokka/MultipleFileTest.scala) class +and its subtypes (such as [SingleFileTest](src/test/scala/dotty/dokka/SingleFileTest.scala)) + +WARNING: As the classes mentioned above are likely to evolve, the description below might easily get out of date. +In case of any discrepancies rely on the source files instead. + +`MultipleFileTest` requires that you specify the names of the files used to extract signatures, +the names of directories containing corresponding TASTY files +and the kinds of signatures from source files (corresponding to keywords used to declare them like `def`, `class`, `object` etc.) +whose presence in the generated documentation will be checked (other signatures, when missing, will be ignored). +The mentioned source files should be located directly inside `src/main/scala/tests` directory +but the file names passed as parameters should contain neither this path prefix nor `.scala` suffix. +The TASTY folders are expected to be located in `target/${dottyVersion}/classes/tests` (after successful compilation of the project) +and similarly only their names relative to this path should be provided as tests' parameters. +For `SingleFileTest` the name of the source file and the TASTY folder are expected to be the same. + +By default it's expected that all signatures from the source files will be present in the documentation +but not vice versa (because the documentation can contain also inherited signatures). +To validate that a signature present in the source does not exist in the documentation +(because they should be hidden from users) add `//unexpected` comment after the signature in the same line. +This will cause an error if a signature with the same name appears in the documentation +(even if some elements of the signature are slightly different - to avoid accidentally passing tests). +If the signature in the documentation is expected to slightly differ from how it's defined in the source code +you can add a `//expected: ` comment (also in the same line and followed by a space) followed by the expected signature. +Alternatively you can use `/*<-*/` and `/*->*/` as opening and closing parentheses for parts of a signature present in the source but undesired in the documentation (at least at the current stage of development), e.g. + +``` +def foo/*<-*/()/*->*/: Int +``` + +will make the expected signature be + +``` +def foo: Int +``` + +instead of + +``` +def foo(): Int +``` + + +Because of the way how signatures in source are parsed, they're expected to span until the end of a line (including comments except those special ones mentioned above, which change the behaviour of tests) so if a definition contains an implementation, it should be placed in a separate line, e.g. + +``` +def foo: Int + = 1 + +class Bar +{ + //... +} +``` + +Otherwise the implementation would be treated as a part of the signature. + +## Roadmap + +1. Publish an initial version of the tool together with an SBT plugin +1. Replace Dottydoc as the dedicated tool for documenting Dotty code + + This includes: + + supporting Dotty's doc pages + + releasing together with Dotty as the dedicated documentation tool + +1. Support all kinds of Dotty definition and generate documentation for the + standard library +1. Reach feature parity with Scaladoc + +## Contributing + +We're happy that you'd like to help us! + +We have two issue labels you should take a look at: `good first issue` and +`self-contained`. First is easy pickings: you'll be able to contribute without +needing to dive too deep into the project. Second is reverse: it's an issue +that's you may find interesting, complex and self-contained enough that you can +continue chipping away at it without needing to worry too much about merge +conflicts. + +To contribute to the project with your code, fork this repo and create a pull request from a fresh branch from there. +To keep the history of commits clean, make sure your commits are squashed into one +and all your changes are applied on top of the latest master branch (if not - rebase on it instead of merging it). +Make sure all the tests pass (simply run `sbt test` to verify that). + +## FAQ + +### Why depend on Dokka? + +We have two primary reasons for depending on Dokka. One of them is division of +labour - Dokka already has a team of maintainers, and it supports an excellent +API which already allowed us to quite easily generate documentation with it. By +depending on Dokka, we will be able to share a large portion of the maintenance +burden. The second reason is very pragmatic - on our own, it'd be difficult for +us to reach even feature parity with Scaladoc, simply because of workforce +constraints. Meanwhile, Dokka maintainers from VirtusLab reached out to us with +an offer of help, which we were happy to take. + +### Why use TASTy? + +A documentation tool needs to access compiler information about the project - it +needs to list all definitions, resolve them by name, and query their members. +Tasty Reflect is the dedicated way in Scala 3 of accessing this information. diff --git a/scala3doc/documentation/docs/index.md b/scala3doc/documentation/docs/index.md new file mode 120000 index 000000000000..fe840054137e --- /dev/null +++ b/scala3doc/documentation/docs/index.md @@ -0,0 +1 @@ +../../README.md \ No newline at end of file diff --git a/scala3doc/documentation/index.md b/scala3doc/documentation/index.md new file mode 100644 index 000000000000..c4e9b0ce3b6d --- /dev/null +++ b/scala3doc/documentation/index.md @@ -0,0 +1,27 @@ +# Scala3doc + +**Documentation tool for Scala 3** + +We are using [TASTy](https://github.com/lampepfl/dotty/blob/master/tasty/src/dotty/tools/tasty/TastyFormat.scala) to generate documentation. We aim to has all known and loved feature from scaladoc as well as new feature such as : + +- integrated documentation and API +- has option for basic pluggablity +- and much more + +**Yes, this page was generated using scala3doc** + +You can learn more from out [documentation](scala3doc/index.html). + +## Getting started + +For now the recommended way to try out our project would be: + - Clone our [repository](https://github.com/lampepfl/scala3doc) + - Run `sbt main -n -o -t -cp -s { } ` where + - ``: name of module in generated documentation + - ``: location where documentation should be created + - ``: is list of dirs or jars that contains tasty files that should be documented + - ``: classpath that was used to generate tasty files + - ``: links to source files of module that are used to link symbols on pages to their source file. They need to be supplied in form: + local_dir=remote_dir#line_suffix e.g. src/main/scala=https://github.com/lampepfl/scala3doc/tree/master/src/main/scala#L + + We also support `-d ` argument to provide static documentation. You can find more about that feature [here](static-page.html). diff --git a/scala3doc/dotty-docs/docs/.gitignore b/scala3doc/dotty-docs/docs/.gitignore new file mode 100644 index 000000000000..6f8e43ce5ec4 --- /dev/null +++ b/scala3doc/dotty-docs/docs/.gitignore @@ -0,0 +1,6 @@ +# Jekyll specific ignores +vendor/ +.bundle/ +Gemfile.lock +_site/ +.sass-cache/ diff --git a/scala3doc/dotty-docs/docs/_layouts/base.html b/scala3doc/dotty-docs/docs/_layouts/base.html new file mode 100644 index 000000000000..72b8c37e6c37 --- /dev/null +++ b/scala3doc/dotty-docs/docs/_layouts/base.html @@ -0,0 +1,31 @@ +--- +extraCSS: + - css/bootstrap.min.css + - css/dottydoc.css + - css/color-brewer.css +extraJS: + - js/jquery.min.js + - js/highlight.pack.js + - js/bootstrap.min.js +--- + + + + + + + +{{ content }} + + \ No newline at end of file diff --git a/scala3doc/dotty-docs/docs/_layouts/blog-page.html b/scala3doc/dotty-docs/docs/_layouts/blog-page.html new file mode 100644 index 000000000000..62724cd9c443 --- /dev/null +++ b/scala3doc/dotty-docs/docs/_layouts/blog-page.html @@ -0,0 +1,35 @@ +--- +layout: main +--- +
+
+

{{ page.title }}

+ + {% if page.subTitle %} +
+ {{ page.subTitle }} +
+ {% endif %} +
+ + {{ content }} + + {% if page.author and page.authorImg %} +
+
+ + + {{ page.author }} + +
+ {% endif %} +
diff --git a/scala3doc/dotty-docs/docs/_layouts/doc-page.html b/scala3doc/dotty-docs/docs/_layouts/doc-page.html new file mode 100644 index 000000000000..694da12f1f20 --- /dev/null +++ b/scala3doc/dotty-docs/docs/_layouts/doc-page.html @@ -0,0 +1,15 @@ +--- +layout: main +--- +
+
+

{{ page.title }}

+ +
+ {{ content }} +
diff --git a/scala3doc/dotty-docs/docs/_layouts/index.html b/scala3doc/dotty-docs/docs/_layouts/index.html new file mode 100644 index 000000000000..5be2d9794299 --- /dev/null +++ b/scala3doc/dotty-docs/docs/_layouts/index.html @@ -0,0 +1,4 @@ +--- +layout: main +--- +

{{ content }}

diff --git a/scala3doc/dotty-docs/docs/_layouts/main.html b/scala3doc/dotty-docs/docs/_layouts/main.html new file mode 100644 index 000000000000..9637365c0231 --- /dev/null +++ b/scala3doc/dotty-docs/docs/_layouts/main.html @@ -0,0 +1,12 @@ +--- +layout: base +--- +
+ {{ content }} +
+ + diff --git a/scala3doc/dotty-docs/docs/_layouts/search.html b/scala3doc/dotty-docs/docs/_layouts/search.html new file mode 100644 index 000000000000..355a7e83eeb4 --- /dev/null +++ b/scala3doc/dotty-docs/docs/_layouts/search.html @@ -0,0 +1,197 @@ +--- +layout: main +title: Search +--- + +
+ +

+ + + +
+
+

Entity Results

+
    +
    +
    +

    Member Results

    +
      +
      +
      +
      + + diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2015-10-23-dotty-compiler-bootstraps.md b/scala3doc/dotty-docs/docs/blog/_posts/2015-10-23-dotty-compiler-bootstraps.md new file mode 100644 index 000000000000..9d49fb1eee47 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2015-10-23-dotty-compiler-bootstraps.md @@ -0,0 +1,72 @@ +--- +layout: blog-page +author: Martin Odersky and Dmitry Petrashko +title: "We got liftoff!" +subTitle: The Dotty compiler for Scala bootstraps. +excerpt_separator: +date: 2015-10-23 +--- + +The [Dotty project](https://github.com/lampepfl/dotty) +is a platform to develop new technology for Scala +tooling and to try out concepts of future Scala language versions. +Its compiler is a new design intended to reflect the +lessons we learned from work with the Scala compiler. A clean redesign +today will let us iterate faster with new ideas in the future. + +Today we reached an important milestone: the Dotty compiler can +compile itself, and the compiled compiler can act as a drop-in for the +original one. This is what one calls a *bootstrap*. + + + +## Why is this important? + +The main reason is that this gives us a some validation of the +*trustworthiness* of the compiler itself. Compilers are complex beasts, +and many things can go wrong. By far the worst things that can go +wrong are bugs where incorrect code is produced. It's not fun debugging code that looks perfectly +fine, yet gets translated to something subtly wrong by the compiler. + +Having the compiler compile itself is a good test to demonstrate that +the generated code has reached a certain level of quality. Not only is +a compiler a large program (44k lines in the case of Dotty), it is +also one that exercises a large part of the language in quite +intricate ways. Moreover, bugs in the code of a compiler don't tend to +go unnoticed, precisely because every part of a compiler feeds into +other parts and all together are necessary to produce a correct +translation. + +## Are we done yet? + +Far from it! The compiler is still very rough. A lot more work is +needed to + + - make it more robust, in particular when analyzing incorrect programs, + - improve error messages and warnings, + - improve the efficiency of some of the generated code, + - improve compilation speed, + - embed it in external tools such as sbt, REPL, IDEs, + - remove restrictions on what Scala code can be compiled, + - help in migrating Scala code that will have to be changed. + +## What are the next steps? + +Over the coming weeks and months, we plan to work on the following topics: + + - Make snapshot releases. + - Work on SBT integration of the compiler. + - Work on IDE support. + - Investigate the best way to obtaining a REPL. + - Work on the build infrastructure. + +If you want to get your hands dirty with any of this, now is a good +moment to get involved! Join the team of contributors, including +Dmitry Petrashko ([@DarkDimius](https://github.com/DarkDimius)), +Guillaume Martres ([@smarter](https://github.com/smarter)), +Ondrey Lhotak ([@olhotak](https://github.com/olhotak)), +Samuel Gruetter ([@samuelgruetter](https://github.com/samuelgruetter)), +Vera Salvis ([@vsalvis](https://github.com/vsalvis)), +and Jason Zaugg ([@retronym](https://github.com/retronym)). + +To get started: . diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2016-01-02-new-year-resolutions.md b/scala3doc/dotty-docs/docs/blog/_posts/2016-01-02-new-year-resolutions.md new file mode 100644 index 000000000000..f5115687732a --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2016-01-02-new-year-resolutions.md @@ -0,0 +1,66 @@ +--- +layout: blog-page +title: New Year Resolutions +author: Martin Odersky +authorImg: images/martin.jpg +date: 2016-01-02 +--- + +For most of us, the change of the year is an occasion for thinking +about what we missed doing last year and where we want to improve. I decided +there are a couple of things where I would like to do better in 2016 +than in 2015. The first is that I would like to do more blogging and +writing in general. I have been pretty silent for most of the last +year. This was mostly caused by the fact that I had been heads down to +work on DOT, Scala's foundations, and _Dotty_, the new Scala compiler +platform we are working on. It's been a lot of work, but we are finally +getting good results. DOT now has a mechanized proof of type soundness +and the Dotty compiler [can now compile +itself](http://www.scala-lang.org/blog/2015/10/23/dotty-compiler-bootstraps.html) +as well as large parts of Scala's standard library. + +The Dotty compiler has a completely new and quite unusual +architecture, which makes it resemble a functional database or a +functional reactive program. My [talk at the JVM language +summit](https://www.youtube.com/watch?v=WxyyJyB_Ssc) gives an +overview. In the coming months I want to write together with my +collaborators a series of blog posts + that explain details of the code base. The +aim of these posts will be to present the new architectural patterns +to a larger audience and also to help existing and potential +contributors get familiar with the code base. + +My second resolution is to take a larger effort to promote simplicity +in Scala. I believe the recent [blog post by Jim +Plush](http://jimplush.com/talk/2015/12/19/moving-a-team-from-scala-to-golang/) should be a wakeup call for our +community. Scala is a very powerful and un-opinionated language. This +means we have a large spectrum of choice how to write a Scala +application or library. It's very important for all of us to use this +power wisely, and to promote simplicity of usage wherever possible. +Unfortunately, most of us fall all too easily into the complexity +trap, as Alex Payne's tweet sums it up very nicely. + + + + +I have been as guilty of complication as everybody else. Is +`CanBuildFrom` the most appropriate solution to deal with the +constraints of embedding special types such as arrays and strings in a +collection library? It achieves its purpose of providing a uniform +user-level API on disparate datatypes. But I now think with more +effort we might be able come up with a solution that works as well and +is simpler. Another example, where I have doubts if not regrets are +the `/:` and `:\` operators in scala.collections. They are cute +synonyms for folds, and I am still fond of the analogy with falling +dominoes they evoke. But in retrospect I think maybe they did give a +bad example for others to go overboard with symbolic operators. + +So my main agenda for the coming year is to work on making Scala +simpler: The language, its foundations, its libraries. I hope you +will join me in that venture. + +With that thought, I wish you a happy new year 2016. + + + + diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2016-02-03-essence-of-scala.md b/scala3doc/dotty-docs/docs/blog/_posts/2016-02-03-essence-of-scala.md new file mode 100644 index 000000000000..428b104b0569 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2016-02-03-essence-of-scala.md @@ -0,0 +1,145 @@ +--- +layout: blog-page +title: The Essence of Scala +author: Martin Odersky +authorImg: images/martin.jpg +date: 2016-02-03 +--- + +What do you get if you boil Scala on a slow flame and wait until all +incidental features evaporate and only the most concentrated essence +remains? After doing this for 8 years we believe we have the answer: +it's DOT, the calculus of dependent object types, that underlies Scala. + +A [paper on DOT](http://infoscience.epfl.ch/record/215280) will be +presented in April at [Wadlerfest](http://events.inf.ed.ac.uk/wf2016), +an event celebrating Phil Wadler's 60th birthday. There's also a prior +technical report ([From F to DOT](http://arxiv.org/abs/1510.05216)) +by Tiark Rompf and Nada Amin describing a slightly different version +of the calculus. Each paper describes a proof of type soundness that +has been machine-checked for correctness. + +## The DOT calculus + +A calculus is a kind of mini-language that is small enough to be +studied formally. Translated to Scala notation, the language covered +by DOT is described by the following abstract grammar: +``` +Value v = (x: T) => t Function + new { x: T => ds } Object + +Definition d = def a = t Method definition + type A = T Type + +Term t = v Value + x Variable + t1(t2) Application + t.a Selection + { val x = t1; t2 } Local definition + +Type T = Any Top type + Nothing Bottom type + x.A Selection + (x: T1) => T2 Function + { def a: T } Method declaration + { type T >: T1 <: T2 } Type declaration + T1 & T2 Intersection + { x => T } Recursion +``` +The grammar uses several kinds of names: +``` +x for (immutable) variables +a for (parameterless) methods +A for types +``` +The full calculus adds to this syntax formal _typing rules_ that +assign types `T` to terms `t` and formal _evaluation rules_ that +describe how a program is evaluated. The following _type soundness_ +property was shown with a mechanized, (i.e. machine-checked) proof: + +> If a term `t` has type `T`, and the evaluation of `t` terminates, then + the result of the evaluation will be a value `v` of type `T`. + +## Difficulties + +Formulating the precise soundness theorem and proving it was unexpectedly hard, +because it uncovered some technical challenges that had not been +studied in depth before. In DOT - as well as in many programming languages - +you can have conflicting definitions. For instance, you might have an abstract +type declaration in a base class with two conflicting aliases in subclasses: +```scala +trait Base { type A } +trait Sub1 extends Base { type A = String } +trait Sub2 extends Base { type A = Int } +trait Bad extends Sub1 with Sub2 +``` +Now, if you combine `Sub1` and `Sub2` in trait `Bad` you get a conflict, +since the type `A` is supposed to be equal to both `String` and `Int`. If you do +not detect the conflict and assume the equalities at face value you +get `String = A = Int`, hence by transitivity `String = Int`! Once you +are that far, you can of course engineer all sorts of situations where +a program will typecheck but cause a wrong execution at runtime. In +other words, type soundness is violated. + +Now, the problem is that one cannot always detect these +inconsistencies, at least not by a local analysis that does not need +to look at the whole program. What's worse, once you have an +inconsistent set of definitions you can use these definitions to +"prove" their own consistency - much like a mathematical theory that +assumes `true = false` can "prove" every proposition including its own +correctness. + +The crucial reason why type soundness still holds is this: If one +compares `T` with an alias, one does so always relative to some _path_ +`x` that refers to the object containing `T`. So it's really `x.T = +Int`. Now, we can show that during evaluation every such path refers +to some object that was created with a `new`, and that, furthermore, +every such object has consistent type definitions. The tricky bit is +to carefully distinguish between the full typing rules, which allow +inconsistencies, and the typing rules arising from runtime values, +which do not. + +## Why is This Important? + +There are at least four reasons why insights obtained in the DOT +project are important. + + 1. They give us a well-founded explanation of _nominal typing_. + Nominal typing means that a type is distinguished from others + simply by having a different name. + For instance, given two trait definitions + ```scala + trait A extends AnyRef { def f: Int } + trait B extends AnyRef { def f: Int } + ``` + we consider `A` and `B` to be different types, even though both + traits have the same parents and both define the same members. + The opposite of + nominal typing is structural typing, which treats types + that have the same structure as being the same. Most programming + languages are at least in part nominal whereas most formal type systems, + including DOT, are structural. But the abstract types in DOT + provide a way to express nominal types such as classes and traits. + The Wadlerfest paper contains examples that show how + one can express classes for standard types such as `Boolean` and `List` in DOT. + + 2. They give us a stable basis on which we can study richer languages + that resemble Scala more closely. For instance, we can encode + type parameters as type members of objects in DOT. This encoding + can give us a better understanding of the interactions of + subtyping and generics. It can explain why variance rules + are the way they are and what the precise typing rules for + wildcard parameters `[_ <: T]`, `[_ >: T]` should be. + + 3. DOT also provides a blueprint for Scala compilation. The new Scala + compiler _Dotty_ has internal data structures that closely resemble DOT. + In particular, type parameters are immediately mapped to type members, + in the way we propose to encode them also in the calculus. + + 4. Finally, the proof principles explored in the DOT work give us guidelines + to assess and treat other possible soundness issues. We now know much + better what conditions must be fulfilled to ensure type soundness. + This lets us put other constructs of the Scala language to the test, + either to increase our confidence that they are indeed sound, or + to show that they are unsound. In my next blog I will + present some of the issues we have discovered through that exercise. diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2016-02-17-scaling-dot-soundness.md b/scala3doc/dotty-docs/docs/blog/_posts/2016-02-17-scaling-dot-soundness.md new file mode 100644 index 000000000000..7619545b844e --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2016-02-17-scaling-dot-soundness.md @@ -0,0 +1,156 @@ +--- +layout: blog-page +title: Scaling DOT to Scala - Soundness +author: Martin Odersky +authorImg: images/martin.jpg +date: 2016-02-17 +--- + +In my [last +blog post](http://www.scala-lang.org/blog/2016/02/03/essence-of-scala.html) +I introduced DOT, a minimal calculus that underlies much of Scala. +DOT is much more than an academic exercise, because it gives us +guidelines on how to design a sound type system for full Scala. + +## Recap: The Problem of Bad Bounds + +As was argued in the previous blog post, the danger a path-dependent type +system like Scala's faces is inconsistent bounds or aliases. For +instance, you might have a type alias +```scala +type T = String +``` +in scope in some part of the program, but in another part the same +type member `T` is known as +```scala +type T = Int +``` +If you connect the two parts, you end up allowing assigning a `String` +to an `Int` and vice versa, which is unsound - it will crash at +runtime with a `ClassCastException`. The problem is that there +is no obvious, practical, compile time analysis for DOT or +Scala that ensures that all types have good bounds. Types can contain +abstract type members with bounds that can be refined elsewhere and +several independent refinements might lead together to a bad bound +problem. Barring a whole program analysis there is no specific +point in the program where we can figure this out straightforwardly. + +In DOT, the problem is resolved by insisting that every path prefix `p` +of a type `p.T` is at runtime a concrete value. That way, we only have +to check for good bounds when objects are _created_ with `new`, and +that check is easy: When objects are created, we know their class and +we can insist that all nested types in that class are aliases or +have consistent bounds. So far so good. + +## Loopholes Caused by Scaling Up + +But if we want to scale up the DOT result for full Scala, several +loopholes open up. These come all down to the fact that the prefix of +a type selection might _not_ be a value that's constructed with a +`new` at run time. The loopholes can be classified into three +categories: + + 1. The prefix value might be lazy, and never instantiated to anything, as in: + ```scala + lazy val p: S = p + ... p.T ... + ``` + Note that trying to access the lazy value `p` would result in an infinite loop. But using `p` in a type does not force its evaluation, so we might never evaluate `p`. Since `p` is not initialized with a `new`, bad bounds for `T` would go undetected. + + 2. The prefix value might be initialized to `null`, as in + ```scala + val p: S = null + ... p.T ... + ``` + The problem here is similar to the first one. `p` is not initialized + with a `new` so we know nothing about the bounds of `T`. + + 3. The prefix might be a type `T` in a type projection `T # A`, where `T` + is not associated with a runtime value. + +We can in fact construct soundness issues in all of these cases. Look +at the discussion for issues [#50](https://github.com/lampepfl/dotty/issues/50) +and [#1050](https://github.com/lampepfl/dotty/issues/1050) in the +[Dotty](https://github.com/lampepfl/dotty/issues/1050) repository +on GitHub. All issues work fundamentally in the same way: Construct a type `S` +which has a type member `T` with bad bounds, say: + +```scala +Any <: T <: Nothing +``` + +Then, use the left subtyping to turn an expression of type `Any` into +an expression of type `T` and use the right subtyping to turn that +expression into an expression of type `Nothing`: + +```scala +def f(x: Any): p.T = x +def g(x: p.T): Nothing = x +``` + +Taken together, `g(f(x))` will convert every expression into an +expression of type `Nothing`. Since `Nothing` is a subtype of every +other type, this means you can convert an arbitrary expression to have +any type you choose. Such a feat is an impossible promise, of +course. The promise is usually broken at run-time by failing with a +`ClassCastException`. + +## Plugging the Loopholes + +To get back to soundness we need to plug the loopholes. Some of the +necessary measures are taken in pull request [#1051](https://github.com/lampepfl/dotty/issues/1051). +That pull request + + - tightens the rules for overrides of lazy values: lazy values + cannot override or implement non-lazy values, + - tightens the rules which lazy values can appear in paths: they + must be final and must have concrete types with known consistent bounds, + - allows type projections `T # A` only if `T` is a concrete type + with known consistent bounds. + +It looks like this is sufficient to plug soundness problems (1) and +(3). To plug (2), we need to make the type system track nullability in +more detail than we do it now. Nullability tracking is a nice feature +in its own right, but now we have an added incentive for implementing +it: it would help to ensure type soundness. + +There's one sub-case of nullability checking which is much harder to do +than the others. An object reference `x.f` might be `null` at run time +because the field `f` is not yet initialized. This can lead to a +soundness problem, but in a more roundabout way than the other issues +we have identified. In fact, Scala guarantees that in a program that +runs to completion without aborting, every field will eventually be +initialized, so every non-null field will have good bounds. Therefore, +the only way an initialized field `f` could cause a soundness problem +is if the program in question would never get to initialize `f`, +either because it goes into an infinite loop or because it aborts with +an exception or `System.exit` call before reaching the initialization +point of `f`. It's a valid question whether type soundness guarantees +should extend to this class of "strange" programs. We might want to +draw the line here and resort to runtime checks or exclude "strange" +programs from any soundness guarantees we can give. The research community +has coined the term [soundiness](http://soundiness.org/) for +this kind of approach and has [advocated](http://cacm.acm.org/magazines/2015/2/182650-in-defense-of-soundiness/fulltext) for it. + +The necessary restrictions on type projection `T # A` are problematic +because they invalidate some idioms in type-level programming. For +instance, the cute trick of making Scala's type system Turing complete +by having it [simulate SK +combinators](https://michid.wordpress.com/2010/01/29/scala-type-level-encoding-of-the-ski-calculus/) +would no longer work since that one relies on unrestricted type +projections. The same holds for some of the encodings of type-level +arithmetic. + +To ease the transition, we will continue for a while to allow unrestricted type +projections under a flag, even though they are potentially +unsound. In the current Dotty compiler, that flag is a language import +`-language:Scala2Compat`, but it could be something different for other +compilers, e.g. `-unsafe`. Maybe we can find rules that are less +restrictive than the ones we have now, and are still sound. But one +aspect should be non-negotiable: Any fundamental deviations from the +principles laid down by DOT needs to be proven mechanically correct +just like DOT was. We have achieved a lot with the DOT proofs, so we +should make sure not to back-slide. And if the experience of the past +10 years has taught us one thing, it is that the meta theory of type +systems has many more surprises in store than one might think. That's +why mechanical proofs are essential. diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2016-05-05-multiversal-equality.md b/scala3doc/dotty-docs/docs/blog/_posts/2016-05-05-multiversal-equality.md new file mode 100644 index 000000000000..236225eec318 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2016-05-05-multiversal-equality.md @@ -0,0 +1,90 @@ +--- +layout: blog-page +title: Multiversal Equality for Scala +author: Martin Odersky +authorImg: images/martin.jpg +date: 2016-05-05 +--- + +I have been working recently on making equality tests using `==` and `!=` safer in Scala. This has led to a [Language Enhancement Proposal](https://github.com/lampepfl/dotty/issues/1247) which I summarize in this blog. + +## Why Change Equality? + +Scala prides itself of its strong static type system. Its type discipline is particularly useful when it comes to refactoring. Indeed, it's possible to write programs in such a way that refactoring problems show up with very high probability as type errors. This is essential for being able to refactor with the confidence that nothing will break. And the ability to do such refactorings is in turn very important for keeping code bases from rotting. + +Of course, getting such a robust code base requires the cooperation of the developers. They should avoid type `Any`, casts, [stringly typed](http://c2.com/cgi/wiki?StringlyTyped) logic, and more generally any operation over loose types that do not capture the important properties of a value. Unfortunately, there is one area in Scala where such loose types are very hard to avoid: That's equality. Comparisons with `==` and `!=` are _universal_. They compare any two values, no matter what their types are. This causes real problems for writing code and more problems for refactoring it. + +For instance, one might want to introduce a proxy for some data structure so that instead of accessing the data structure directly one goes through the proxy. The proxy and the underlying data would have different types. Normally this should be an easy refactoring. If one passes by accident a proxy for the underlying type or _vice versa_ the type checker will flag the error. However, if one accidentally compares a proxy with the underlying type using `==` or a pattern match, the program is still valid, but will just always say `false`. This is a real worry in practice. I recently abandoned a desirable extensive refactoring because I feared that it would be too hard to track down such errors. + +## Where Are We Today? + +The problems of universal equality in Scala are of course well-known. Some libraries have tried to fix it by adding another equality operator with more restricted typing. Most often this safer equality is written `===`. While `===` is certainly useful, I am not a fan of adding another equality operator to the language and core libraries. It would be much better if we could fix `==` instead. This would be both simpler and would catch all potential equality problems including those related to pattern matching. + +How can `==` be fixed? It looks much harder to do this than adding an alternate equality operator. First, we have to keep backwards compatibility. The ability to compare everything to everything is by now baked into lots of code and libraries. Second, with just one equality operator we need to make this operator work in all cases where it makes sense. An alternative `===` operator can choose to refuse some comparisons that should be valid because there's always `==` to fall back to. With a unique `==` operator we do not have this luxury. + +The current status in Scala is that the compiler will give warnings for _some_ comparisons that are always `false`. But the coverage is weak. For instance this will give a warning: + +```scala +scala> 1 == "abc" +:12: warning: comparing values of types Int and String using `==' will always yield false +``` + +But this will not: + +```scala +scala> "abc" == 1 +res2: Boolean = false +``` + +There are also cases where a warning is given for a valid equality test that actually makes sense because the result could be `true`. In summary, the current checking catches some obvious bugs, which is nice. But it is far too weak and fickle to be an effective refactoring aid. + + +## What's Proposed? + +I believe to do better, we need to enlist the cooperation of developers. Ultimately it's the developer who provides implementations of equality methods and who is therefore best placed to characterize which equalities make sense. Sometimes this characterization can be involved. For instance, an `Int` can be compared to other primitive numeric values or to instances of type `java.lang.Number` but any other comparison will always yield `false`. Or, it makes sense to compare two `Option` values if and only if it makes sense to compare the optional element values. + +The best known way to characterize such relationships is with type classes. Implicit values of a trait `Eq[T, U]` can capture the property that values of type `T` can be compared to values of type `U`. Here's the definition of `Eq` + +```scala +package scala + +trait Eq[-T, -U] +``` + +That is, `Eq` is a pure marker trait with two type parameters and without any members. Developers can define equality classes by giving implicit `Eq` instances. Here is a simple one: + +```scala +implicit def eqString: Eq[String, String] = Eq +``` + +This states that strings can be only compared to strings, not to values of other types. Here's a more complicated `Eq` instance: + +```scala +implicit def eqOption[T, U](implicit _eq: Eq[T, U]): Eq[Option[T], Option[U]] = Eq +``` + +This states that `Option` values can be compared if their elements can be compared. + +It's foreseen that such `Eq` instances can be generated automatically. If we add an annotation `@equalityClass` to `Option` like this + +```scala +@equalityClass class Option[+T] { ... } +``` + +then the `eqOption` definition above would be generated automatically in `Option`'s companion object. + +Given a set of `Eq` instances, the idea is that the Scala compiler will check every time it encounters a _potentially problematic_ comparison between values of types `T` and `U` that there is an implicit instance of `Eq[T, U]`. A comparison is _potentially problematic_ if it is between incompatible types. As long as `T <: U` or `U <: T` the equality could make sense because both sides can potentially be the same value. + +So this means we still keep universal equality as it is in Scala now - we don't have a choice here anyway, because of backwards compatibility. But we render it safe by checking that for each comparison the corresponding `Eq` instance exists. + +What about types for which no `Eq` instance exists? To maintain backwards compatibility, we allow comparisons of such types as well, by means of a fall-back `eqAny` instance. But we do not allow comparisons between types that have an `Eq` instance and types that have none. Details are explained in the [proposal](https://github.com/lampepfl/dotty/issues/1247). + +## Properties + +Here are some nice properties of the proposal + +1. It is _opt-in_. To get safe checking, developers have to annotate with `@equalityClass` classes that should allow comparisons only between their instances, or they have to define implicit `Eq` instances by hand. 2. It is backwards compatible. Without developer-provided `Eq` instances, equality works as before. +3. It carries no run-time cost compared to universal equality. Indeed the run-time behavior of equality is not affected at all. +4. It has no problems with parametricity, variance, or bottom types. 5. Depending on the actual `Eq` instances given, it can be very precise. That is, no comparisons that might yield `true` need to be rejected, and most comparisons that will always yield `false` are in fact rejected. + +The scheme effectively leads to a partition of the former universe of types into sets of types. Values with types in the same partition can be compared among themselves but values with types in different partitions cannot. An `@equalityClass` annotation on a type creates a new partition. All types that do not have any `Eq` instances (except `eqAny`, that is) form together another partition. So instead of a single _universe_ of values that can be compared to each other we get a _multiverse_ of partitions. Hence the name of the proposal: **Multiversal Equality**. diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2016-12-05-implicit-function-types.md b/scala3doc/dotty-docs/docs/blog/_posts/2016-12-05-implicit-function-types.md new file mode 100644 index 000000000000..ba28159c0fa3 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2016-12-05-implicit-function-types.md @@ -0,0 +1,365 @@ +--- +layout: blog-page +title: Implicit Function Types +author: Martin Odersky +authorImg: images/martin.jpg +date: 2016-12-05 +--- + +I just made the [first pull request](https://github.com/lampepfl/dotty/pull/1775) to add _implicit function types_ to +Scala. I am pretty excited about it, because - citing the explanation +of the pull request - "_This is the first step to bring contextual +abstraction to Scala_". What do I mean by this? + +**Abstraction**: The ability to name a concept and use just the name afterwards. + +**Contextual**: A piece of a program produces results or outputs in +some context. Our programming languages are very good at describing +and abstracting what outputs are produced. But there's hardly anything +yet available to abstract over the inputs that programs get from their +context. Many interesting scenarios fall into that category, +including: + + - passing configuration data to the parts of a system that need them, + - managing capabilities for security critical tasks, + - wiring components up with dependency injection, + - defining the meanings of operations with type classes, + - more generally, passing any sort of context to a computation. + +Implicit function types are a surprisingly simple and general way to +make coding patterns solving these tasks abstractable, reducing +boilerplate code and increasing applicability. + +**First Step**: My pull request is a first implementation. It solves the + problem in principle, but introduces some run-time overhead. The + next step will be to eliminate the run-time overhead through some + simple optimizations. + +## Implicit Parameters + +In a functional setting, the inputs to a computation are most +naturally expressed as _parameters_. One could simply augment +functions to take additional parameters that represent configurations, +capabilities, dictionaries, or whatever contextual data the functions +need. The only downside with this is that often there's a large +distance in the call graph between the definition of a contextual +element and the site where it is used. Consequently, it becomes +tedious to define all those intermediate parameters and to pass them +along to where they are eventually consumed. + +Implicit parameters solve one half of the problem. Implicit +parameters do not have to be propagated using boilerplate code; the +compiler takes care of that. This makes them practical in many +scenarios where plain parameters would be too cumbersome. For +instance, type classes would be a lot less popular if one would have +to pass all dictionaries by hand. Implicit parameters are also very +useful as a general context passing mechanism. For instance in the +_Dotty_ compiler, almost every function takes an implicit context +parameter which defines all elements relating to the current state of +the compilation. This is in my experience much better than the cake +pattern because it is lightweight and can express context changes in a +purely functional way. + +The main downside of implicit parameters is the verbosity of their +declaration syntax. It's hard to illustrate this with a smallish example, +because it really only becomes a problem at scale, but let's try anyway. + +Let's say we want to write some piece of code that's designed to run +in a transaction. For the sake of illustration here's a simple transaction class: +```scala +class Transaction { + private val log = new ListBuffer[String] + def println(s: String): Unit = log += s + + private var aborted = false + private var committed = false + + def abort(): Unit = { aborted = true } + def isAborted = aborted + + def commit(): Unit = + if (!aborted && !committed) { + Console.println("******* log ********") + log.foreach(Console.println) + committed = true + } +} +``` +The transaction encapsulates a log, to which one can print messages. +It can be in one of three states: running, committed, or aborted. +If the transaction is committed, it prints the stored log to the console. + +The `transaction` method lets one run some given code `op` inside +a newly created transaction: +```scala + def transaction[T](op: Transaction => T) = { + val trans: Transaction = new Transaction + op(trans) + trans.commit() + } +``` +The current transaction needs to be passed along a call chain to all +the places that need to access it. To illustrate this, here are three +functions `f1`, `f2` and `f3` which call each other, and also access +the current transaction. The most convenient way to achieve this is +by passing the current transaction as an implicit parameter. +```scala + def f1(x: Int)(implicit thisTransaction: Transaction): Int = { + thisTransaction.println(s"first step: $x") + f2(x + 1) + } + def f2(x: Int)(implicit thisTransaction: Transaction): Int = { + thisTransaction.println(s"second step: $x") + f3(x * x) + } + def f3(x: Int)(implicit thisTransaction: Transaction): Int = { + thisTransaction.println(s"third step: $x") + if (x % 2 != 0) thisTransaction.abort() + x + } +``` +The main program calls `f1` in a fresh transaction context and prints +its result: +```scala + def main(args: Array[String]) = { + transaction { + implicit thisTransaction => + val res = f1(args.length) + println(if (thisTransaction.isAborted) "aborted" else s"result: $res") + } + } +``` +Two sample calls of the program (let's call it `TransactionDemo`) are here: +```shell +scala TransactionDemo 1 2 3 +result: 16 +******* log ******** +first step: 3 +second step: 4 +third step: 16 + +scala TransactionDemo 1 2 3 4 +aborted +``` +So far, so good. The code above is quite compact as far as expressions +are concerned. In particular, it's nice that, being implicit +parameters, none of the transaction values had to be passed along +explicitly in a call. But on the definition side, things are less +rosy: Every one of the functions `f1` to `f3` needed an additional +implicit parameter: +```scala +(implicit thisTransaction: Transaction) +``` +A three-times repetition might not look so bad here, but it certainly +smells of boilerplate. In real-sized projects, this can get much worse. +For instance, the _Dotty_ compiler uses implicit abstraction +over contexts for most of its parts. Consequently it ends up with currently +no fewer than 2641 occurrences of the text string +```scala +(implicit ctx: Context) +``` +It would be nice if we could get rid of them. + +## Implicit Functions + +Let's massage the definition of `f1` a bit by moving the last parameter section to the right of the equals sign: +```scala + def f1(x: Int) = { implicit thisTransaction: Transaction => + thisTransaction.println(s"first step: $x") + f2(x + 1) + } +``` +The right hand side of this new version of `f1` is now an implicit +function value. What's the type of this value? Previously, it was +`Transaction => Int`, that is, the knowledge that the function has an +implicit parameter got lost in the type. The main extension implemented by +the pull request is to introduce implicit function types that mirror +the implicit function values which we have already. Concretely, the new +type of `f1` is: +```scala +implicit Transaction => Int +``` +Just like the normal function type syntax `A => B`, desugars to `scala.Function1[A, B]` +the implicit function type syntax `implicit A => B` desugars to `scala.ImplicitFunction1[A, B]`. +The same holds at other function arities. With Dotty's [pull request #1758](https://github.com/lampepfl/dotty/pull/1758) +merged there is no longer an upper limit of 22 for such functions. + +The type `ImplicitFunction1` can be thought of being defined as follows: +```scala +trait ImplicitFunction1[-T0, R] extends Function1[T0, R] { + override def apply(implicit x: T0): R +} +``` +However, you won't find a classfile for this trait because all implicit function traits +get mapped to normal functions during type erasure. + +There are two rules that guide type checking of implicit function types. +The first rule says that an implicit function is applied to implicit arguments +in the same way an implicit method is. More precisely, if `t` is an expression +of an implicit function type +```scala +t: implicit (T1, ..., Tn) => R +``` +such that `t` is not an implicit closure itself and `t` is not the +prefix of a call `t.apply(...)`, then an `apply` is implicitly +inserted, so `t` becomes `t.apply`. We have already seen that the +definition of `t.apply` is an implicit method as given in the +corresponding implicit function trait. Hence, it will in turn be +applied to a matching sequence of implicit arguments. The end effect is +that references to implicit functions get applied to implicit arguments in the +same way as references to implicit methods. + +The second rule is the dual of the first. If the expected type +of an expression `t` is an implicit function type +```scala +implicit (T1, ..., Tn) => R +``` +then `t` is converted to an implicit closure, unless it is already one. +More precisely, `t` is mapped to the implicit closure +```scala +implicit ($ev1: T1, ..., $evn: Tn) => t +``` +The parameter names of this closure are compiler-generated identifiers +which should not be accessed from user code. That is, the only way to +refer to an implicit parameter of a compiler-generated function is via +`implicitly`. + +It is important to note that this second conversion needs to be applied +_before_ the expression `t` is typechecked. This is because the +conversion establishes the necessary context to make type checking `t` +succeed by defining the required implicit parameters. + +There is one final tweak to make this all work: When using implicit parameters +for nested functions it was so far important to give all implicit parameters +of the same type the same name, or else one would get ambiguities. For instance, consider the +following fragment: +```scala +def f(implicit c: C) = { + def g(implicit c: C) = ... implicitly[C] ... + ... +} +``` +If we had named the inner parameter `d` instead of `c` we would +have gotten an implicit ambiguity at the call of `implicitly` because +both `c` and `d` would be eligible: +```scala +def f(implicit c: C) = { + def g(implicit d: C) = ... implicitly[C] ... // error! + ... +} +``` +The problem is that parameters in implicit closures now have +compiler-generated names, so the programmer cannot enforce the proper +naming scheme to avoid all ambiguities. We fix the problem by +introducing a new disambiguation rule which makes nested occurrences +of an implicit take precedence over outer ones. This rule, which +applies to all implicit parameters and implicit locals, is conceptually +analogous to the rule that prefers implicits defined in companion +objects of subclasses over those defined in companion objects of +superclass. With that new disambiguation rule the example code above +now compiles. + +That's the complete set of rules needed to deal with implicit function types. + +## How to Remove Boilerplate + +The main advantage of implicit function types is that, being types, +they can be abstracted. That is, one can define a name for an implicit +function type and then use just the name instead of the full type. +Let's revisit our previous example and see how it can be made more +concise using this technique. + +We first define a type `Transactional` for functions that take an implicit parameter of type `Transaction`: +```scala +type Transactional[T] = implicit Transaction => T +``` +Making the return type of `f1` to `f3` a `Transactional[Int]`, we can +eliminate their implicit parameter sections: +```scala + def f1(x: Int): Transactional[Int] = { + thisTransaction.println(s"first step: $x") + f2(x + 1) + } + def f2(x: Int): Transactional[Int] = { + thisTransaction.println(s"second step: $x") + f3(x * x) + } + def f3(x: Int): Transactional[Int] = { + thisTransaction.println(s"third step: $x") + if (x % 2 != 0) thisTransaction.abort() + x + } +``` +You might ask, how does `thisTransaction` typecheck, since there is no +longer a parameter with that name? In fact, `thisTransaction` is now a +global definition: +```scala + def thisTransaction: Transactional[Transaction] = implicitly[Transaction] +``` +You might ask: a `Transactional[Transaction]`, is that not circular? To see more clearly, let's expand +the definition according to the rules given in the last section. `thisTransaction` +is of implicit function type, so the right hand side is expanded to the +implicit closure +```scala + implicit ($ev0: Transaction) => implicitly[Transaction] +``` +The right hand side of this closure, `implicitly[Transaction]`, needs +an implicit parameter of type `Transaction`, so the closure is further +expanded to +```scala + implicit ($ev0: Transaction) => implicitly[Transaction]($ev0) +``` +Now, `implicitly` is defined in `scala.Predef` like this: +```scala + def implicitly[T](implicit x: T) = x +``` +If we plug that definition into the closure above and simplify, we get: +```scala + implicit ($ev0: Transaction) => $ev0 +``` +So, `thisTransaction` is just the implicit identity function on `transaction`! +In other words, if we use `thisTransaction` in the body of `f1` to `f3`, it will +pick up and return the unnamed implicit parameter that's in scope. + +Finally, here are the `transaction` and `main` method that complete +the example. Since `transactional`'s parameter `op` is now a +`Transactional`, we can eliminate the `Transaction` argument to `op` +and the `Transaction` lambda in `main`; both will be added by the compiler. +```scala + def transaction[T](op: Transactional[T]) = { + implicit val trans: Transaction = new Transaction + op + trans.commit() + } + def main(args: Array[String]) = { + transaction { + val res = f1(args.length) + println(if (thisTransaction.isAborted) "aborted" else s"result: $res") + } + } +``` +## Categorically Speaking + +There are many interesting connections with category theory to explore +here. On the one hand, implicit functions are used for tasks that are +sometimes covered with monads such as the reader monad. There's an +argument to be made that implicits have better composability than +monads and why that is. + +On the other hand, it turns out that implicit functions can also be +given a co-monadic interpretation, and the interplay between monads and +comonads is very interesting in its own right. + +But these discussions will have to wait for another time, as +this blog post is already too long. + +## Conclusion + +Implicit function types are unique way to abstract over the context in +which some piece of code is run. I believe they will deeply influence +the way we write Scala in the future. They are very powerful +abstractions, in the sense that just declaring a type of a function +will inject certain implicit values into the scope of the function's +implementation. Can this be abused, making code more obscure? +Absolutely, like every other powerful abstraction technique. To keep +your code sane, please keep the [Principle of Least Power](http://www.lihaoyi.com/post/StrategicScalaStylePrincipleofLeastPower.html) in mind. diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2017-05-31-first-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2017-05-31-first-dotty-milestone-release.md new file mode 100644 index 000000000000..b3c6ae9771ae --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2017-05-31-first-dotty-milestone-release.md @@ -0,0 +1,121 @@ +--- +layout: blog-page +title: Announcing Dotty 0.1.2-RC1, a major step towards Scala 3 +author: Dmytro Petrashko +authorImg: images/petrashko.jpg +date: 2017-05-31 +--- + +Today, we are excited to release Dotty version 0.1.2-RC1. This release +serves as a technology preview that demonstrates new language features +and the compiler supporting them. + + + + +## Why is this important? + +_Dotty_ is the project name for a language and compiler that +is slated to become Scala 3.0. This is an ongoing development, +transparently developed as open source software. The Dotty project +started more than 4 years ago. It reached a major milestone in 2015 by +achieving +[bootstrap](https://dotty.epfl.ch/blog/2015/10/23/dotty-compiler-bootstraps.html), +that is, showing that the new compiler could compile itself. Today we +have reached another milestone with this first release of the +codebase. Developments will not stop here, but they will in the future +all be part of regular time-based releases. + +## How can you try it out? + +You have several alternatives: use the `sbt-dotty` plugin, get a standalone +installation, or try it online on [Scastie]. + +### sbt +Using sbt 0.13.13 or newer, do: + +```shell +sbt new lampepfl/dotty.g8 +``` + +This will setup a new sbt project with Dotty as compiler. For more details on +using Dotty with sbt, see the +[example project](https://github.com/lampepfl/dotty-example-project). + +### Standalone installation + +Releases are available for download on the _Releases_ +section of the Dotty repository: +https://github.com/lampepfl/dotty/releases + +We also provide a [homebrew](https://brew.sh/) package that can be installed by running + +```shell +brew install lampepfl/brew/dotty +``` + +### Scastie + +[Scastie], the online Scala playground, +supports Dotty. +You can try it out there without installing anything. + +# What’s in the 0.1.2-RC1 technology preview? +This technology preview demonstrates new language features planned for Scala 3: + + - [Intersection Types](https://dotty.epfl.ch/docs/reference/new-types/intersection-types.html) + - [Union Types](https://dotty.epfl.ch/docs/reference/new-types/union-types.html) + - [Trait Parameters](https://dotty.epfl.ch/docs/reference/other-new-features/trait-parameters.html) + - [Enumerations](https://dotty.epfl.ch/docs/reference/enums/enums.html) + - [Algebraic Data Types](https://dotty.epfl.ch/docs/reference/enums/adts.html) + - [By-Name Implicits](https://dotty.epfl.ch/docs/reference/other-new-features/implicit-by-name-parameters.html) + +We also ship with tools that help you try out the Dotty platform: + + - [IDE features for Visual Studio Code](https://dotty.epfl.ch/docs/usage/ide-support.html) + - [sbt support, including retro-compatibility with Scala 2](https://github.com/lampepfl/dotty-example-project) + +See here for the full [release notes](https://dotty.epfl.ch/docs/release-notes/0.1.2.html). + +## Release schedule + +Starting from this release, we are adopting a time-based release schedule: +- Nightly builds will be published, for those wanting to be at the forefront of + development. +- Every 6 weeks, a release candidate will be cut based on the latest nightly build. +- Every 6 weeks, the latest release candidate becomes a release. + +At the end of 6 weeks, the milestone will be promoted to a stable release. +See here for the full [version number explanation](https://dotty.epfl.ch/docs/usage/version-numbers.html). + +## What are the next steps? + +Over the coming weeks and months, we plan to work on the following topics: + + - [Integrate Local optimizations developed in Dotty linker](https://github.com/lampepfl/dotty/pull/2513); + - [Add Language-level support for HMaps and HLists](https://github.com/lampepfl/dotty/pull/2199); + - [Port global optimizations from Dotty linker](https://github.com/lampepfl/dotty/pull/1840). + +If you want to get your hands dirty with any of this, now is a good +moment to get involved! Join the team of contributors, including +Martin Odersky ([@odersky](https://twitter.com/odersky)) +Dmitry Petrashko ([@DarkDimius](https://twitter.com/DarkDimius)), +Guillaume Martres ([@smarter](https://github.com/smarter)), +Felix Mulder ([@felixmulder](https://twitter.com/felixmulder)), +Nicolas Stucki ([@nicolasstucki](https://github.com/nicolasstucki)), +Liu Fengyun ([@liufengyun](https://github.com/liufengyun)), +Olivier Blanvillain ([@OlivierBlanvillain](https://github.com/OlivierBlanvillain)), +and others! + +## Library authors: Join our community build + +Dotty now has a set of libraries that are built against every nightly snapshot. +Currently this includes scalatest, squants and algebra. +Join our [community build](https://github.com/lampepfl/dotty-community-build) + to make sure that our regression suite includes your library. + + +To get started, see . + + +[Scastie]: https://scastie.scala-lang.org/?target=dotty diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2017-07-12-second-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2017-07-12-second-dotty-milestone-release.md new file mode 100644 index 000000000000..0ce22338099d --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2017-07-12-second-dotty-milestone-release.md @@ -0,0 +1,368 @@ +--- +layout: blog-page +title: Announcing Dotty 0.2.0-RC1, with new optimizations, improved stability and IDE support +author: Dmytro Petrashko +authorImg: images/petrashko.jpg +date: 2017-07-12 +--- + +Today, we are excited to release Dotty version 0.2.0-RC1. This release +serves as a technology preview that demonstrates new language features +and the compiler supporting them. + +This release is based on the [previous milestone](/blog/2017/05/31/first-dotty-milestone-release.html). +The highlights of this release are: + - substantial improvement of quality of generated code for pattern matching + - improvements in VS Code IDE stability + - support Windows in VS Code IDE + - improved compatibility with scalac + - initial support for reproducible builds + + + + +This is our second scheduled release according to our [6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html). + +## What’s in the 0.2.0-RC1 technology preview? +The [previous technology preview](/blog/2017/05/31/first-dotty-milestone-release.html) has shipped new language features planned for Scala 3: +[Intersection Types](https://dotty.epfl.ch/docs/reference/new-types/intersection-types.html), +[Union Types](https://dotty.epfl.ch/docs/reference/new-types/union-types.html), +[Trait Parameters](https://dotty.epfl.ch/docs/reference/other-new-features/trait-parameters.html), +[Enumerations](https://dotty.epfl.ch/docs/reference/enums/enums.html), +[Algebraic Data Types](https://dotty.epfl.ch/docs/reference/enums/adts.html), +[By-Name Implicits](https://dotty.epfl.ch/docs/reference/other-new-features/implicit-by-name-parameters.html). + +This technology preview is geared towards improving stability and reliability. It includes: + + - [Local optimizations upstreamed from the Dotty Linker](https://github.com/lampepfl/dotty/pull/2513), [2647](https://github.com/lampepfl/dotty/pull/2647) by ([@OlivierBlanvillain](https://github.com/OlivierBlanvillain)). See more details below. + - [Optimizing Pattern Matcher](https://github.com/lampepfl/dotty/pull/2829) by ([@odersky](https://github.com/odersky)) + - [Idempotency checks](https://github.com/lampepfl/dotty/pull/2756) are the first step to reproducible builds + - [Faster Base class sets](https://github.com/lampepfl/dotty/pull/2676) by ([@odersky](https://github.com/odersky)) and ([@darkdimius](https://twitter.com/darkdimius)) + - Numerous fixes to IDE and Dotty Language Server covering: + + - [Windows support for VS Code plugin](https://github.com/lampepfl/dotty/pull/2776) + - [Fix hover-on-type for implicitly converted expressions](https://github.com/lampepfl/dotty/pull/2836) + - [Fixes to find all references in external projects](https://github.com/lampepfl/dotty/pull/2810), [2773](https://github.com/lampepfl/dotty/pull/2773/files) + - [Fix conflict with dragos-vscode-scala](https://github.com/lampepfl/dotty/pull/2777) + - [Fix ide crash on non-parsable file](https://github.com/lampepfl/dotty/pull/2752) + - [Fix hover functionality for enum classes](https://github.com/lampepfl/dotty/pull/2722) + - [Report errors on Dotty Language Server initialization](https://github.com/lampepfl/dotty/pull/2708) + - [Fixes to sbt setting up Dotty IDE](https://github.com/lampepfl/dotty/pull/2690) + - General stability improvements [2838](https://github.com/lampepfl/dotty/pull/2838), [2787](https://github.com/lampepfl/dotty/pull/2787), [2692](https://github.com/lampepfl/dotty/pull/2692) + + - Scalac compatibility improvements: + + - [Support Scala 2.12 traits](https://github.com/lampepfl/dotty/pull/2685) + - [Fixes to handling of Scala 2 classfiles](https://github.com/lampepfl/dotty/pull/2834/files) + - [Scalac parser crashes on Dotty.jar](https://github.com/lampepfl/dotty/pull/2719) + + - Java compatibility improvements: + + - [Fixes to handing of Java generic signatures](https://github.com/lampepfl/dotty/pull/2831) + - [java.lang.System.out is final but that's a lie](https://github.com/lampepfl/dotty/pull/2781) + + - Improved error messages: + + - [Nicer error message for "implicit function type needs non-empty parameter list"](https://github.com/lampepfl/dotty/pull/2821) + - [Nicer error message for nonsensical modifier combination](https://github.com/lampepfl/dotty/pull/2807/files), [2747](https://github.com/lampepfl/dotty/pull/2747) + - [Nicer error message for supercall inside @inline method](https://github.com/lampepfl/dotty/pull/2740) + - [Check that case classes don't inherit case classes](https://github.com/lampepfl/dotty/pull/2790) + - [Check that named parameters don't conflict with positional ones](https://github.com/lampepfl/dotty/pull/2785) + + - Improved command line handling: + + - [Support params in a file like @file.txt](https://github.com/lampepfl/dotty/pull/2765) + + - Type system stability: + + - [Handle wildcard types in unions and intersections](https://github.com/lampepfl/dotty/pull/2742) + + - Fixes to implicit search: + + - [Fix shadowing of higher order implicits](https://github.com/lampepfl/dotty/pull/2739) + + +### Better generated code: + +As was [spotted](https://twitter.com/gkossakowski/status/870243464528744449) by [@gkossakowski](https://twitter.com/gkossakowski) +in the previous release Dotty was on par with Scala 2.11 in speed. But why is that? +The reason is that Dotty compiled by Dotty had really horrible code generated for pattern matching. + +Let's illustrate on a simple example: + +```scala +case class CC(a: Int, b: Object) + + def foo(x: Any): Int = { + val (a, b) = x match { + case CC(s @ 1, CC(t, _)) => + (s , 2) + case _ => (42, 43) + } + a + b + } + + def booleans(a: Object) = { + val (b1, b2) = (a.isInstanceOf[CC], a.isInstanceOf[List[Int]]) + (b1, b2) match { + case (true, true) => true + case (false, false) => true + case _ => false + } + } +``` + + +The Dotty that was released in the previous milestone didn't contain any optimizations and generated inefficient code for it. +The java-with-goto code below is equivalent to what Dotty generated. + +```java +// output of dotc 0.1.2-RC1 + public int foo(Object x) { + var3_2 = x; + if (!(var3_2 instanceof CC)) ** GOTO lbl-1000 + var4_3 = (CC)var3_2; + if (CC$.MODULE$.unapply((CC)var3_2) == null) ** GOTO lbl-1000 + var5_4 = CC$.MODULE$.unapply((CC)var3_2); + s = var5_4._1(); + var7_6 = var5_4._2(); + if (1 != s) ** GOTO lbl-1000 + var8_7 = s; + if (!(var7_6 instanceof CC)) ** GOTO lbl-1000 + var9_8 = (CC)var7_6; + if (CC$.MODULE$.unapply((CC)var7_6) != null) { + var10_9 = CC$.MODULE$.unapply((CC)var7_6); + var11_10 = var10_9._2(); + v0 = Tuple2..MODULE$.apply((Object)BoxesRunTime.boxToInteger((int)1), (Object)BoxesRunTime.boxToInteger((int)2)); + } else lbl-1000: // 5 sources: + { + v0 = Tuple2..MODULE$.apply((Object)BoxesRunTime.boxToInteger((int)42), (Object)BoxesRunTime.boxToInteger((int)43)); + } + var2_11 = v0; + a = BoxesRunTime.unboxToInt((Object)var2_11._1()); + b = BoxesRunTime.unboxToInt((Object)var2_11._2()); + return a + b; + } + + public boolean booleans(Object a) { + Tuple2 tuple2 = Tuple2..MODULE$.apply((Object)BoxesRunTime.boxToBoolean((boolean)(a instanceof CC)), (Object)BoxesRunTime.boxToBoolean((boolean)(a instanceof List))); + boolean b1 = BoxesRunTime.unboxToBoolean((Object)tuple2._1()); + boolean b2 = BoxesRunTime.unboxToBoolean((Object)tuple2._2()); + Tuple2 tuple22 = Tuple2..MODULE$.apply((Object)BoxesRunTime.boxToBoolean((boolean)b1), (Object)BoxesRunTime.boxToBoolean((boolean)b2)); + Option option = Tuple2..MODULE$.unapply(tuple22); + if (!option.isEmpty()) { + Tuple2 tuple23 = (Tuple2)option.get(); + boolean bl = BoxesRunTime.unboxToBoolean((Object)tuple23._1()); + boolean bl2 = BoxesRunTime.unboxToBoolean((Object)tuple23._2()); + if (bl) { + boolean bl3 = bl; + if (bl2) { + boolean bl4 = bl2; + return true; + } + } + } + Option option2 = Tuple2..MODULE$.unapply(tuple22); + if (option2.isEmpty()) return false; + Tuple2 tuple24 = (Tuple2)option2.get(); + boolean bl = BoxesRunTime.unboxToBoolean((Object)tuple24._1()); + boolean bl5 = BoxesRunTime.unboxToBoolean((Object)tuple24._2()); + if (bl) return false; + boolean bl6 = bl; + if (bl5) return false; + boolean bl7 = bl5; + return true; + } +``` + +Due to the new optimizing pattern matcher, Dotty now is able to generate the code below without `-optimise` + +```java +// output of 0.2.0-RC1 without -optimise + public int foo(Object x) { + var3_2 = x; + if (!(var3_2 instanceof CC)) ** GOTO lbl-1000 + var4_3 = CC$.MODULE$.unapply((CC)var3_2); + s = var5_4 = var4_3._1(); + if (1 == var5_4 && (var7_6 = var4_3._2()) instanceof CC) { + t = CC$.MODULE$.unapply((CC)var7_6)._1(); + v0 = Tuple2..MODULE$.apply((Object)BoxesRunTime.boxToInteger((int)1), (Object)BoxesRunTime.boxToInteger((int)2)); + } else lbl-1000: // 2 sources: + { + v0 = Tuple2..MODULE$.apply((Object)BoxesRunTime.boxToInteger((int)42), (Object)BoxesRunTime.boxToInteger((int)43)); + } + var2_8 = v0; + a = BoxesRunTime.unboxToInt((Object)var2_8._1()); + b = BoxesRunTime.unboxToInt((Object)var2_8._2()); + return a + b; + } + + public boolean booleans(Object a) { + Tuple2 tuple2 = Tuple2..MODULE$.apply((Object)BoxesRunTime.boxToBoolean((boolean)(a instanceof CC)), (Object)BoxesRunTime.boxToBoolean((boolean)(a instanceof List))); + boolean b1 = BoxesRunTime.unboxToBoolean((Object)tuple2._1()); + boolean b2 = BoxesRunTime.unboxToBoolean((Object)tuple2._2()); + Tuple2 tuple22 = Tuple2..MODULE$.apply((Object)BoxesRunTime.boxToBoolean((boolean)b1), (Object)BoxesRunTime.boxToBoolean((boolean)b2)); + if (tuple22 != null) { + boolean bl; + boolean bl2 = BoxesRunTime.unboxToBoolean((Object)tuple22._1()); + if (!bl2) { + bl = bl2; + } else { + if (BoxesRunTime.unboxToBoolean((Object)tuple22._2())) { + return true; + } + bl = bl2; + } + if (!bl) { + if (false != BoxesRunTime.unboxToBoolean((Object)tuple22._2())) return false; + return true; + } + } + return false; + } +``` + +You can clearly see that it's shorter ;-) and it actually does less work. +If you additionally enable local optimizations, you get decent generated code: + +```java +// output of 0.2.0-RC1 with -optimise + + public int foo(Object x) { + int n; + Tuple2 tuple2; + CC cC; + Object object; + if (x instanceof CC && 1 == (n = (cC = (CC)x)._1()) && (object = cC._2()) instanceof CC) { + ((CC)object)._1(); + tuple2 = new Tuple2((Object)BoxesRunTime.boxToInteger((int)1), (Object)BoxesRunTime.boxToInteger((int)2)); + } else { + tuple2 = new Tuple2((Object)BoxesRunTime.boxToInteger((int)42), (Object)BoxesRunTime.boxToInteger((int)43)); + } + Tuple2 tuple22 = tuple2; + return BoxesRunTime.unboxToInt((Object)tuple22._1()) + BoxesRunTime.unboxToInt((Object)tuple22._2()); + } + + public boolean booleans(Object a) { + boolean bl = a instanceof CC; + boolean bl2 = a instanceof List; + new Tuple2((Object)BoxesRunTime.boxToBoolean((boolean)bl), (Object)BoxesRunTime.boxToBoolean((boolean)bl2)); + new Tuple2((Object)BoxesRunTime.boxToBoolean((boolean)bl), (Object)BoxesRunTime.boxToBoolean((boolean)bl2)); + if (bl && bl2) { + return true; + } + boolean bl3 = bl; + if (bl3) return false; + if (bl2) return false; + return true; + } +``` + +This code still has a major inefficiency; it allocates tuples. +We plan to continue the migration of local optimizations from the Dotty Linker that should allow us to generate code that is as +good the code generated by the Dotty Linker with global analysis disabled: + +```java + // output of Dotty linker https://github.com/dotty-linker/dotty/tree/opto + public int foo(Object x) { + CC cC; + int n = 0; + int n2 = 0; + if (x instanceof CC && 1 == (cC = (CC)x)._1() && cC._2() instanceof CC) { + n = 1; + n2 = 2; + } else { + n = 42; + n2 = 43; + } + return n + n2; + } + + public boolean booleans(Object a) { + boolean bl = a instanceof CC; + boolean bl2 = a instanceof List; + if (bl && bl2 || !bl && !bl2) { + return true; + } + return false; + } +``` + +## How can you try it out? +We ship with tools that help you try out the Dotty platform: + + - [IDE features for Visual Studio Code](https://dotty.epfl.ch/docs/usage/ide-support.html) + - [sbt support, including retro-compatibility with Scala 2](https://github.com/lampepfl/dotty-example-project) + + +You have several alternatives; use the `sbt-dotty` plugin, get a standalone +installation, or try it online on [Scastie]. + +### sbt +Using sbt 0.13.13 or newer, do: + +``` +sbt new lampepfl/dotty.g8 +``` + +This will setup a new sbt project with Dotty as compiler. For more details on +using Dotty with sbt, see the +[example project](https://github.com/lampepfl/dotty-example-project). + +### Standalone installation + +Releases are available for download on the _Releases_ +section of the Dotty repository: +[https://github.com/lampepfl/dotty/releases](https://github.com/lampepfl/dotty/releases) + +We also provide a [homebrew](https://brew.sh/) package that can be installed by running: + +``` +brew install lampepfl/brew/dotty +``` + +In case you have already installed Dotty via brew, you should instead update it: + +``` +brew upgrade dotty +``` + +### Scastie + +[Scastie], the online Scala playground, +supports Dotty. +You can try it out there without installing anything. + + +## What are the next steps? + +Over the coming weeks and months, we plan to work on the following topics: + + - [Add support for using Dotty generated classes with Scala 2.12](https://github.com/lampepfl/dotty/pull/2827) + - [Add Language-level support for HMaps and HLists](https://github.com/lampepfl/dotty/pull/2199); + - Upstream more optimizations from Dotty Linker + - [Add support for existing in the same classpath with Scala 2.12](https://github.com/lampepfl/dotty/pull/2827) + +If you want to get your hands dirty with any of this, now is a good +moment to get involved! Join the team of contributors, including +Martin Odersky ([@odersky](https://twitter.com/odersky)) +Dmitry Petrashko ([@DarkDimius](https://twitter.com/DarkDimius)), +Guillaume Martres ([@smarter](https://github.com/smarter)), +Felix Mulder ([@felixmulder](https://twitter.com/felixmulder)), +Nicolas Stucki ([@nicolasstucki](https://github.com/nicolasstucki)), +Liu Fengyun ([@liufengyun](https://github.com/liufengyun)), +Olivier Blanvillain ([@OlivierBlanvillain](https://github.com/OlivierBlanvillain)), +and others! + +## Library authors: Join our community build + +Dotty now has a set of libraries that are built against every nightly snapshot. +Currently this includes scalatest, squants and algebra. +Join our [community build](https://github.com/lampepfl/dotty-community-build) + to make sure that our regression suite includes your library. + + +To get started, see [https://github.com/lampepfl/dotty](https://github.com/lampepfl/dotty). + + +[Scastie]: https://scastie.scala-lang.org/?target=dotty diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2017-09-07-third-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2017-09-07-third-dotty-milestone-release.md new file mode 100644 index 000000000000..edb7e1fa732f --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2017-09-07-third-dotty-milestone-release.md @@ -0,0 +1,151 @@ +--- +layout: blog-page +title: Announcing Dotty 0.3.0-RC2 +author: Allan Renucci +authorImg: images/allan.jpg +date: 2017-09-07 +--- + +Today, we are excited to release Dotty version 0.3.0-RC2. This release +serves as a technology preview that demonstrates new language features +and the compiler supporting them. + +If you’re not familiar with Dotty, it's a platform to try out new language concepts and compiler +technologies for Scala. The focus is mainly on simplification. We remove extraneous syntax +(e.g. no XML literals), and try to boil down Scala’s types into a smaller set of more fundamental +constructs. The theory behind these constructs is researched in +[DOT](https://infoscience.epfl.ch/record/215280), a calculus for dependent object types. +You can learn more about Dotty on our [website](http://dotty.epfl.ch). + + + +This is our third scheduled release according to our [6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html). +The [previous technology preview](/blog/2017/07/12/second-dotty-milestone-release.html) improved +stability and reliability: + + - substantial improvement of quality of generated code for pattern matching + - improvements in VS Code IDE stability + - support Windows in VS Code IDE + - improved compatibility with scalac + - initial support for reproducible builds + +## What’s in the 0.3.0-RC2 technology preview? +This technology preview further improves stability and reliability. Some highlighted PRs are: + + - IDE bug fixes: + [#2986](https://github.com/lampepfl/dotty/pull/2986), + [#2932](https://github.com/lampepfl/dotty/pull/2932), + [#2885](https://github.com/lampepfl/dotty/pull/2885), + [#2876](https://github.com/lampepfl/dotty/pull/2876), + [#2870](https://github.com/lampepfl/dotty/pull/2870), + [#2872](https://github.com/lampepfl/dotty/pull/2872) by [@odersky] and [@smarter]. + + +## How can you try it out? +We ship with tools that help you try out the Dotty platform: + + - [IDE features for Visual Studio Code](https://dotty.epfl.ch/docs/usage/ide-support.html) + - [sbt support, including retro-compatibility with Scala 2](https://github.com/lampepfl/dotty-example-project) + + +You have several alternatives; use the `sbt-dotty` plugin, get a standalone +installation, or try it online on [Scastie]. + +### sbt +Using sbt 0.13.13 or newer, do: + +``` +sbt new lampepfl/dotty.g8 +``` + +This will setup a new sbt project with Dotty as compiler. For more details on +using Dotty with sbt, see the +[example project](https://github.com/lampepfl/dotty-example-project). + +### Standalone installation +Releases are available for download on the _Releases_ +section of the Dotty repository: +[https://github.com/lampepfl/dotty/releases](https://github.com/lampepfl/dotty/releases) + +We also provide a [homebrew](https://brew.sh/) package that can be installed by running: + +``` +brew install lampepfl/brew/dotty +``` + +In case you have already installed Dotty via brew, you should instead update it: + +``` +brew upgrade dotty +``` + +### Scastie +[Scastie], the online Scala playground, supports Dotty. +You can try it out there without installing anything. + + +## What are the next steps? +Over the coming weeks and months, we plan to work on the following topics: + + - [Add support for using Dotty generated classes with Scala 2.12](https://github.com/lampepfl/dotty/pull/2827) + - [Add Language-level support for HMaps and HLists](https://github.com/lampepfl/dotty/pull/2199); + - Upstream more optimizations from Dotty Linker + - [Add support for existing in the same classpath with Scala 2.12](https://github.com/lampepfl/dotty/pull/2827) + - [Add native Dotty REPL](https://github.com/lampepfl/dotty/pull/2991) + +## Questions / Reporting Bugs +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.2.0-RC1..0.3.0-RC2` these are: + +``` + 138 Martin Odersky + 36 Nicolas Stucki + 12 Guillaume Martres + 7 Dmitry Petrashko + 5 liu fengyun + 4 Allan Renucci + 4 Felix Mulder + 2 Lorand Szakacs + 1 Lukas Ciszewski + 1 Max Ovsiankin + 1 Lanny Ripple + 1 Serhii Pererva + 1 Georg Schmid +``` + +If you want to get your hands dirty with any of this, now is a good moment to get involved! +You can have a look at our [Getting Started page](https://dotty.epfl.ch/docs/contributing/getting-started.html), +our [Awesome Error Messages](http://scala-lang.org/blog/2016/10/14/dotty-errors.html) or some of +the simple [Dotty issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry-points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build +Dotty now has a set of libraries that are built against every nightly snapshot. +Currently this includes scalatest, squants and algebra. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +To get started, see [https://github.com/lampepfl/dotty](https://github.com/lampepfl/dotty). + + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2017-10-16-fourth-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2017-10-16-fourth-dotty-milestone-release.md new file mode 100644 index 000000000000..d5e4cc446e2b --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2017-10-16-fourth-dotty-milestone-release.md @@ -0,0 +1,159 @@ +--- +layout: blog-page +title: Announcing Dotty 0.4.0-RC1 +author: Allan Renucci +authorImg: images/allan.jpg +date: 2017-10-16 +--- + +Today, we are excited to release Dotty version 0.4.0-RC1. This release +serves as a technology preview that demonstrates new language features +and the compiler supporting them. + +If you’re not familiar with Dotty, it's a platform to try out new language concepts and compiler +technologies for Scala. The focus is mainly on simplification. We remove extraneous syntax +(e.g. no XML literals), and try to boil down Scala’s types into a smaller set of more fundamental +constructs. The theory behind these constructs is researched in +[DOT](https://infoscience.epfl.ch/record/215280), a calculus for dependent object types. +You can learn more about Dotty on our [website](http://dotty.epfl.ch). + + + +This is our fourth scheduled release according to our [6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html). +The [previous technology preview](/blog/2017/09/07/third-dotty-milestone-release.html) improved +stability and reliability. + +## What’s new in the 0.4.0-RC1 technology preview? + +### Rewritten REPL [#2991](https://github.com/lampepfl/dotty/pull/2991) +The original Dotty REPL was a proof of concept hacked together +from +[an ancient version of the scalac REPL](https://github.com/lampepfl/dotty/pull/1082#issuecomment-183905504). +It worked by creating Scala source files from the user input using string +concatenation, this made it easy to adapt it for Dotty since it did not rely on +the internals of scalac, but it was also fragile and hard to reason about. +The [new REPL](https://github.com/lampepfl/dotty/pull/2991) instead works by +manipulating ASTs (Abstract Syntax Trees), this is more robust and will make it +easier to develop new features: we have already implemented auto-completion +support (by reusing the APIs we had created for +the [Dotty IDE](https://dotty.epfl.ch/docs/usage/ide-support.html)) and we have +plans for displaying API documentation in the REPL. + +Note that the user interface of the REPL has not changed: like in the old REPL +we use code adapted from the [Ammonite REPL](http://ammonite.io/#Ammonite-REPL) +to provide syntax highlighting, multi-line editing, history, etc. + +### Scala 2.12 support [#2827](https://github.com/lampepfl/dotty/pull/2827) +Since our first release, it has been possible to use Scala 2 libraries in a +Dotty project as explained in the +[dotty-example-project](https://github.com/smarter/dotty-example-project#getting-your-project-to-compile-with-dotty). +Previously, we supported libraries compiled by Scala 2.11, but starting with this +release we support Scala 2.12 instead. If your Dotty project has Scala 2 +dependencies this change should be transparent for you assuming all your +dependencies have been published for 2.12. + +### Performance work +Over the last few weeks, we started working on compilation speed with some good results: +compiling [ScalaPB](https://github.com/dotty-staging/scalapb) is now 20% faster +than with Dotty 0.3.0-RC2. You can follow along our progress on +http://dotty-bench.epfl.ch/. + + +## Trying out Dotty +### Scastie +[Scastie], the online Scala playground, supports Dotty. +This is an easy way to try Dotty without installing anything. + +### sbt +Using sbt 0.13.13 or newer, do: + +``` +sbt new lampepfl/dotty.g8 +``` + +This will setup a new sbt project with Dotty as compiler. For more details on +using Dotty with sbt, see the +[example project](https://github.com/lampepfl/dotty-example-project). + +### IDE support +It is very easy to start using the Dotty IDE in any Dotty project by following +the [IDE guide](https://dotty.epfl.ch/docs/usage/ide-support.html). + + +### Standalone installation +Releases are available for download on the _Releases_ +section of the Dotty repository: +[https://github.com/lampepfl/dotty/releases](https://github.com/lampepfl/dotty/releases) + +We also provide a [homebrew](https://brew.sh/) package that can be installed by running: + +``` +brew install lampepfl/brew/dotty +``` + +In case you have already installed Dotty via brew, you should instead update it: + +``` +brew upgrade dotty +``` + +## Let us know what you think! +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.3.0-RC2..0.4.0-RC1` these are: + +``` + 226 Martin Odersky + 112 Felix Mulder + 104 Nicolas Stucki + 41 Allan Renucci + 41 Guillaume Martres + 33 liu fengyun + 8 Olivier Blanvillain + 4 Aggelos Biboudis + 3 Dmitry Petrashko + 2 Raymond Tay + 2 esarbe + 2 Enno Runne + 1 Brandon Elam Barker + 1 Raphael Bosshard + 1 Jacob J + 1 Aleksander Boruch-Gruszecki + 1 Jim Van Horn + 1 Matthias Sperl + 1 Michal Gutowski +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +You can have a look at our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +the [Awesome Error Messages](http://scala-lang.org/blog/2016/10/14/dotty-errors.html) project or some of +the simple [Dotty issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry-points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +To get started, see [https://github.com/lampepfl/dotty](https://github.com/lampepfl/dotty). + + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2017-12-01-fifth-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2017-12-01-fifth-dotty-milestone-release.md new file mode 100644 index 000000000000..a927748210f3 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2017-12-01-fifth-dotty-milestone-release.md @@ -0,0 +1,223 @@ +--- +layout: blog-page +title: Announcing Dotty 0.5.0-RC1 +author: Allan Renucci +authorImg: images/allan.jpg +date: 2017-12-01 +--- + +Today, we are excited to release Dotty version 0.5.0-RC1. This release +serves as a technology preview that demonstrates new language features +and the compiler supporting them. + +If you’re not familiar with Dotty, it's a platform to try out new language concepts and compiler +technologies for Scala. The focus is mainly on simplification. We remove extraneous syntax +(e.g. no XML literals), and try to boil down Scala’s types into a smaller set of more fundamental +constructs. The theory behind these constructs is researched in +[DOT](https://infoscience.epfl.ch/record/215280), a calculus for dependent object types. +You can learn more about Dotty on our [website](http://dotty.epfl.ch). + + + +This is our fifth scheduled release according to our [6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html). +The [previous technology preview](/blog/2017/10/16/fourth-dotty-milestone-release.html) added +support for Scala 2.12 and came with a brand new REPL. + +## What’s new in the 0.5.0-RC1 technology preview? + +### Reworked implicit search [#3421](https://github.com/lampepfl/dotty/pull/3421) +The treatment of ambiguity errors has changed. If an ambiguity is encountered +in some recursive step of an implicit search, the ambiguity is propagated to the caller. +Example: Say you have the following definitions: + +```scala +class A +class B extends C +class C +implicit def a1: A +implicit def a2: A +implicit def b(implicit a: A): B +implicit def c: C +``` + +and the query `implicitly[C]`. + +This query would now be classified as ambiguous. This makes sense, after all +there are two possible solutions, `b(a1)` and `b(a2)`, neither of which is better +than the other and both of which are better than the third solution, `c`. +By contrast, Scala 2 would have rejected the search for `A` as +ambiguous, and subsequently have classified the query `b(implictly[A])` as a normal fail, +which means that the alternative `c` would be chosen as solution! + +Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement +the analogue of a "negated" search in implicit resolution, where a query `Q1` fails if some other +query `Q2` succeeds and `Q1` succeeds if `Q2` fails. With the new cleaned up behavior these +techniques no longer work. But there is now a new special type `scala.implicits.Not` which +implements negation directly. For any query type `Q`: `Not[Q]` succeeds if and only if the +implicit search for `Q` fails. + +### Dependent function types [#3464](https://github.com/lampepfl/dotty/pull/3464) +A dependent function type describes functions where the result type may depend +on the function's parameter values. Example: + +```scala +class Entry { type Key; key: Key } + +def extractKey(e: Entry): e.Key = e.key // a dependent method +val extractor: (e: Entry) => e.Key = extractKey // a dependent function value +``` + +Scala already has _dependent methods_, i.e. methods where the result +type refers to some of the parameters of the method. Method +`extractKey` is an example. Its result type, `e.key` refers its +parameter `e` (we also say, `e.Key` _depends_ on `e`). But so far it +was not possible to turn such methods into function values, so that +they can be passed as parameters to other functions, or returned as +results. Dependent methods could not be turned into functions simply +because there was no type that could describe them. + +In Dotty this is now possible. The type of the `extractor` value above is + +```scala +(e: Entry) => e.Key +``` + +This type describes function values that take any argument `x` of type +`Entry` and return a result of type `x.Key`. + +### TASTY frontend +[TASTY](https://docs.google.com/document/d/1Wp86JKpRxyWTqUU39H40ZdXOlacTNs20aTj7anZLQDw/edit) is a +new serialization format for typed syntax trees of Scala programs. When compiled by Dotty, a program +classfile will include its TASTY representation in addition to its bytecode. + +The TASTY frontend uses ASTs from the TASTY in classfiles as input instead of source files. There +are currently two backends using the TASTY frontend: + + - A Dotty class file decompiler that let you decompile code previously compiled to TASTY: + + ```shell + dotc -decompile -classpath + ``` + + - A Dotty TASTY compiler that will recompile code previously compiled to TASTY: + + ```shell + dotc -from-tasty -classpath + ``` + + This is the first step toward linking and whole word optimisations, recompiling code to a + different backends... + +### Generic java signatures [#3234](https://github.com/lampepfl/dotty/pull/3234) +Dotty now emits generic signatures for classes and methods. Those signatures are used by compilers, +debuggers and to support runtime reflection. For example: + +```scala +scala> class Foo[T, U] +// defined class Foo + +scala> classOf[Foo[_, _]].getTypeParameters.map(_.getName).mkString(", ") +val res0: String = "T, U" +``` + +## Trying out Dotty +### Scastie +[Scastie], the online Scala playground, supports Dotty. +This is an easy way to try Dotty without installing anything. + +### sbt +Using sbt 0.13.13 or newer, do: + +```shell +sbt new lampepfl/dotty.g8 +``` + +This will setup a new sbt project with Dotty as compiler. For more details on +using Dotty with sbt, see the +[example project](https://github.com/lampepfl/dotty-example-project). + +### IDE support +It is very easy to start using the Dotty IDE in any Dotty project by following +the [IDE guide](https://dotty.epfl.ch/docs/usage/ide-support.html). + + +### Standalone installation +Releases are available for download on the _Releases_ +section of the Dotty repository: +[https://github.com/lampepfl/dotty/releases](https://github.com/lampepfl/dotty/releases) + +We also provide a [homebrew](https://brew.sh/) package that can be installed by running: + +```shell +brew install lampepfl/brew/dotty +``` + +In case you have already installed Dotty via brew, you should instead update it: + +```shell +brew upgrade dotty +``` + +## Let us know what you think! +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.4.0-RC1..0.5.0-RC1` these are: + +``` + 112 Nicolas Stucki + 108 Martin Odersky + 33 Allan Renucci + 18 Guillaume Martres + 17 Martin Duhem + 13 liu fengyun + 9 Miron Aseev + 4 Matt D'Souza + 4 Raphael Bosshard + 2 k0ala + 2 Vitor Vieira + 2 Fengyun Liu + 2 Michal Gutowski + 2 Robert Soeldner + 2 Aurélien Richez + 1 rsoeldner + 1 Hermes Espínola González + 1 Jean Detoeuf + 1 Karol Chmist + 1 Olivier Blanvillain + 1 William Narmontas + 1 Yevgen Nerush + 1 gan74 + 1 gosubpl +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +You can have a look at our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +the [Awesome Error Messages](http://scala-lang.org/blog/2016/10/14/dotty-errors.html) project or some of +the simple [Dotty issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry-points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2018-03-05-seventh-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2018-03-05-seventh-dotty-milestone-release.md new file mode 100644 index 000000000000..a24e4c595026 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2018-03-05-seventh-dotty-milestone-release.md @@ -0,0 +1,283 @@ +--- +layout: blog-page +title: Announcing Dotty 0.6.0 and 0.7.0-RC1 +author: Allan Renucci +authorImg: images/allan.jpg +date: 2018-03-05 +--- + +Today, we are excited to release Dotty versions 0.6.0 and 0.7.0-RC1. These releases +serve as a technology preview that demonstrates new language features and the compiler supporting them. + +If you’re not familiar with Dotty, it's a platform to try out new language concepts and compiler +technologies for Scala. The focus is mainly on simplification. We remove extraneous syntax +(e.g. no XML literals), and try to boil down Scala’s types into a smaller set of more fundamental +constructs. The theory behind these constructs is researched in +[DOT](https://infoscience.epfl.ch/record/215280), a calculus for dependent object types. +You can learn more about Dotty on our [website](http://dotty.epfl.ch). + + + +This is our seventh scheduled release according to our [6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html). +The [previous technology preview](https://github.com/lampepfl/dotty/releases/tag/0.6.0-RC1) focussed +on bug fixes and stability work. + +## What’s new in the 0.7.0-RC1 technology preview? + +### Enum Simplification [#4003](https://github.com/lampepfl/dotty/pull/4003) +The previously introduced syntax and rules for enum were arguably too complex. We can considerably +simplify them by taking away one capability: that cases can have bodies which can define members. +Arguably, if we choose an ADT decomposition of a problem, it's good style to write all methods using +pattern matching instead of overriding individual cases. So this removes an unnecessary choice. +We now treat enums unequivocally as classes. They can have methods and other statements just like +other classes can. Cases in enums are seen as a form of constructors. We do not need a +distinction between enum class and enum object anymore. Enums can have companion objects just like +normal classes can, of course. + +Let's consider how `Option` can be represented as an enum. Previously using an enum class: +```scala +enum class Option[+T] { + def isDefined: Boolean +} + +object Option { + case Some[+T](x: T) { + def isDefined = true + } + case None { + def isDefined = false + } + + def apply[T](x: T): Option[T] = if (x == null) None else Some(x) +} +``` + +And now: +```scala +enum Option[+T] { + case Some(x: T) + case None + + def isDefined: Boolean = this match { + case None => false + case Some(_) => true + } +} + +object Option { + def apply[T](x: T): Option[T] = if (x == null) None else Some(x) +} +``` + + +For more information about [Enumerations](https://dotty.epfl.ch/docs/reference/enums/enums.html) +and how to use them to model [Algebraic Data Types](https://dotty.epfl.ch/docs/reference/enums/adts.html), +visit the respective sections in our documentation. + + +### Erased terms [#3342](https://github.com/lampepfl/dotty/pull/3342) +The `erased` modifier can be used on parameters, `val` and `def` to enforce that no reference to +those terms is ever used. As they are never used, they can safely be removed during compilation. + +One particular use case is to add implicit type constraints that are only relevant at compilation +time. For example, let's consider the following implementation of `flatten`. + +```scala +class List[X] { + def flatten[Y](implicit erased ev: X <:< List[Y]): List[Y] = { + val buffer = new mutable.ListBuffer[Y] + this.foreach(e => buffer ++= e.asInstanceOf[List[Y]]) + buffer.toList + } +} + +List(List(1, 2), List(3)).flatten // List(1, 2, 3) +List(1, 2, 3).flatten // error: Cannot prove that Int <:< List[Y] +``` + +The implicit evidence `ev` is only used to constrain the type parameter `X` of `List` such that we +can safely cast from `X` to `List[_]`. The usage of the `erased` modifier ensures that the evidence +is not used and can be safely removed at compilation time. + +For more information, visit the [Erased Terms](https://dotty.epfl.ch/docs/reference/metaprogramming/erased-terms.html) +section of our documentation. + +**Note**: Erased terms replace _phantom types_: they have similar semantics, but with the added +advantage that any type can be an erased parameter. See [#3410](https://github.com/lampepfl/dotty/pull/3410). + + +### Improved IDE support [#3960](https://github.com/lampepfl/dotty/pull/3960) +The Dotty language server now supports context sensitive IDE completions. Completions now include +local and imported definitions. Members completions take possible implicit conversions into account. + + + +We also improved the `find references` functionality. It is more robust and much faster! + +Try it out in [Visual Studio Code](https://dotty.epfl.ch/docs/usage/ide-support.html)! + +### Better and safer types in pattern matching (improved GADT support) + +Consider the following implementation of an evaluator for a very simple +language containing only integer literals (`Lit`) and pairs (`Pair`): +```scala +sealed trait Exp +case class Lit(value: Int) extends Exp +case class Pair(fst: Exp, snd: Exp) extends Exp + +object Evaluator { + def eval(e: Exp): Any = e match { + case Lit(x) => + x + case Pair(a, b) => + (eval(a), eval(b)) + } + + eval(Lit(1)) // 1: Any + eval(Pair(Pair(Lit(1), Lit(2)), Lit(3))) // ((1, 2), 3) : Any +} +``` + +This code is correct but it's not very type-safe since `eval` returns a value +of type `Any`, we can do better by adding a type parameter to `Exp` that +represents the result type of evaluating the expression: + +```scala +sealed trait Exp[T] +case class Lit(value: Int) extends Exp[Int] +case class Pair[A, B](fst: Exp[A], snd: Exp[B]) extends Exp[(A, B)] + +object Evaluator { + def eval[T](e: Exp[T]): T = e match { + case Lit(x) => + // In this case, T = Int + x + case Pair(a, b) => + // In this case, T = (A, B) where A is the type of a and B is the type of b + (eval(a), eval(b)) + } + + eval(Lit(1)) // 1: Int + eval(Pair(Pair(Lit(1), Lit(2)), Lit(3))) // ((1, 2), 3) : ((Int, Int), Int) +} +``` + +Now the expression `Pair(Pair(Lit(1), Lit(2)), Lit(3)))` has type `Exp[((Int, +Int), Int)]` and calling `eval` on it will return a value of type `((Int, +Int), Int)` instead of `Any`. + +Something subtle is going on in the definition of `eval` here: its result type +is `T` which is a type parameter that could be instantiated to anything, and +yet in the `Lit` case we are able to return a value of type `Int`, and in the +`Pair` case a value of a tuple type. In each case the typechecker has been able +to constrain the type of `T` through unification (e.g. if `e` matches `Lit(x)` +then `Lit` is a subtype of `Exp[T]`, so `T` must be equal to `Int`). This is +usually referred to as **GADT support** in Scala since it closely mirrors the +behavior of [Generalized Algebraic Data +Types](https://en.wikipedia.org/wiki/Generalized_algebraic_data_type) in +Haskell and other languages. + +GADTs have been a part of Scala for a long time, but in Dotty 0.7.0-RC1 we +significantly improved their implementation to catch more issues at +compile-time. For example, writing `(eval(a), eval(a))` instead of `(eval(a), +eval(b))` in the example above should be an error, but it was not caught by +Scala 2 or previous versions of Dotty, whereas we now get a type mismatch error +as expected. More work remains to be done to fix the remaining [GADT-related +issues](https://github.com/lampepfl/dotty/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+gadt), +but so far no show-stopper has been found. + +## Trying out Dotty +### Scastie +[Scastie], the online Scala playground, supports Dotty. +This is an easy way to try Dotty without installing anything. + +### sbt +Using sbt 0.13.13 or newer, do: + +```shell +sbt new lampepfl/dotty.g8 +``` + +This will setup a new sbt project with Dotty as compiler. For more details on +using Dotty with sbt, see the +[example project](https://github.com/lampepfl/dotty-example-project). + +### IDE support +It is very easy to start using the Dotty IDE in any Dotty project by following +the [IDE guide](https://dotty.epfl.ch/docs/usage/ide-support.html). + + +### Standalone installation +Releases are available for download on the _Releases_ +section of the Dotty repository: +[https://github.com/lampepfl/dotty/releases](https://github.com/lampepfl/dotty/releases) + +We also provide a [homebrew](https://brew.sh/) package that can be installed by running: + +```shell +brew install lampepfl/brew/dotty +``` + +In case you have already installed Dotty via brew, you should instead update it: + +```shell +brew upgrade dotty +``` + +## Let us know what you think! +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.6.0..0.7.0-RC1` these are: + +``` + 182 Martin Odersky + 94 Nicolas Stucki + 48 Olivier Blanvillain + 38 liu fengyun + 16 Allan Renucci + 15 Guillaume Martres + 11 Aggelos Biboudis + 5 Abel Nieto + 5 Paolo G. Giarrusso + 4 Fengyun Liu + 2 Georg Schmid + 1 Jonathan Skowera + 1 Fedor Shiriaev + 1 Alexander Slesarenko + 1 benkobalog + 1 Jimin Hsieh +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry-points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/duhemm + diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2018-04-27-eighth-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2018-04-27-eighth-dotty-milestone-release.md new file mode 100644 index 000000000000..324d0f1349fa --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2018-04-27-eighth-dotty-milestone-release.md @@ -0,0 +1,214 @@ +--- +layout: blog-page +title: Announcing Dotty 0.7.0 and 0.8.0-RC1 +author: Allan Renucci +authorImg: images/allan.jpg +date: 2018-04-27 +--- + +Today, we are excited to release Dotty versions 0.7.0 and 0.8.0-RC1. These releases +serve as a technology preview that demonstrates new language features and the compiler supporting them. + +Dotty is the project name for technologies that are considered for inclusion in Scala 3. Scala has +pioneered the fusion of object-oriented and functional programming in a typed setting. Scala 3 will +be a big step towards realizing the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviors, +- build on strong foundations to ensure the design hangs well together, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +This is our eighth scheduled release according to our [6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html). +The [previous technology preview](https://github.com/lampepfl/dotty/releases/tag/0.7.0-RC1) simplified +enums, introduced erased terms, improved IDE support and improved pattern matching for GADT. + +## What’s new in the 0.8.0-RC1 technology preview? + +### sbt 1 support [#3872](https://github.com/lampepfl/dotty/pull/3872) +Starting with Dotty 0.8.0, we will only support versions of sbt >= 1.1.4. Migrating to sbt 1 +lets us use the new improved incremental compiler for Scala called [Zinc](https://github.com/sbt/zinc), +and enables integration with tools such as [Bloop](https://scalacenter.github.io/bloop/). + +If you are already using Dotty with sbt 0.13, follow these simple steps to upgrade: + +- update sbt version to 1.1.4 in `project/build.properties` +- update sbt-dotty plugin to the latest version: + ```scala + addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.2.2") + ``` +- replace usages of `.withDottyCompat()` by `.withDottyCompat(scalaVersion.value)` + +### Unchecked warnings [#4045](https://github.com/lampepfl/dotty/pull/4045) +Dotty now emits `unchecked` warnings like `scalac` whenever a type test is performed but cannot be +fully checked at runtime because of type erasure. For example: + +```scala +scala> def foo(x: Any) = x.isInstanceOf[List[String]] +1 |def foo(x: Any) = x.isInstanceOf[List[String]] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | the type test for List[String] cannot be checked at runtime +``` + +In some cases, the Dotty compiler is smarter than `scalac` and will not emit a warning: +```scala +trait Marker + +def foo[T](x: T) = x match { + case _: T with Marker => // scalac emits a spurious warning + case _ => +} +``` + +### Kind Polymorphism [#4108](https://github.com/lampepfl/dotty/pull/4108) +Normally type parameters in Scala are partitioned into kinds. First-level types are types of values. +Higher-kinded types are type constructors such as `List` or `Map`. The kind of a type is indicated +by the top type of which it is a subtype. Normal types are subtypes of `Any`, covariant single +argument type constructors such as List are subtypes of `[+X] =>> Any`, and the `Map` type +constructor is a subtype of `[X, +Y] =>> Any`. + +Sometimes we would like to have type parameters that can have more than one kind, for instance to +define an implicit value that works for parameters of any kind. This is now possible through a form +of (subtype) kind polymorphism. Kind polymorphism relies on the special type `scala.AnyKind` that +can be used as an upper bound of a type. + +```scala +def f[T <: AnyKind] = .. +``` + +The actual type arguments of f can then be types of arbitrary kinds. So the following would all be +legal: + +```scala +f[Int] +f[List] +f[Map] +f[[X] =>> String] +``` + +**Note**: This feature is considered experimental and is only enabled under a compiler flag +(i.e. `-Ykind-polymorphism`). For more information, visit the [Kind Polymorphism](https://dotty.epfl.ch/docs/reference/other-new-features/kind-polymorphism.html) +section of our documentation. + +### Improved support for SAM type [#4152](https://github.com/lampepfl/dotty/pull/4152) +This release includes fixes to [SAM types](https://www.scala-lang.org/news/2.12.0/#lambda-syntax-for-sam-types) +that greatly improve interoperability with Java 8 lambdas. One can now easily write Scala code that +uses Java streams: + +```scala +val myList = + java.util.Arrays.asList("a1", "a2", "b1", "c2", "c1") + +myList + .stream + .filter(s => s.startsWith("c")) + .map(_.toUpperCase) + .sorted + .forEach(println(_)) + +// prints: +// C1 +// C2 +``` + +## Trying out Dotty +### Scastie +[Scastie], the online Scala playground, supports Dotty. +This is an easy way to try Dotty without installing anything. + +### sbt +Using sbt 1.1.4 or newer, do: + +```shell +sbt new lampepfl/dotty.g8 +``` + +This will setup a new sbt project with Dotty as compiler. For more details on +using Dotty with sbt, see the +[example project](https://github.com/lampepfl/dotty-example-project). + +### IDE support +It is very easy to start using the Dotty IDE in any Dotty project by following +the [IDE guide](https://dotty.epfl.ch/docs/usage/ide-support.html). + + +### Standalone installation +Releases are available for download on the _Releases_ +section of the Dotty repository: +[https://github.com/lampepfl/dotty/releases](https://github.com/lampepfl/dotty/releases) + +We also provide a [homebrew](https://brew.sh/) package that can be installed by running: + +```shell +brew install lampepfl/brew/dotty +``` + +In case you have already installed Dotty via brew, you should instead update it: + +```shell +brew upgrade dotty +``` + +## Let us know what you think! +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.7.0..0.8.0-RC1` these are: + +``` + 95 Martin Odersky + 91 liu fengyun + 91 Nicolas Stucki + 84 Allan Renucci + 73 Guillaume Martres + 67 Martin Duhem + 18 Jendrik Wenke + 16 Paolo G. Giarrusso + 8 Robert Stoll + 6 Thierry Treyer + 4 Aggelos Biboudis + 1 tokkiyaa + 1 Rajesh Veeranki + 1 Maxime Kjaer + 1 Saurabh Rawat + 1 Joan + 1 Jorge Vicente Cantero + 1 Jasper Moeys + 1 Piotr Gabara +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry-points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2018-07-06-ninth-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2018-07-06-ninth-dotty-milestone-release.md new file mode 100644 index 000000000000..cefd8bd39e4d --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2018-07-06-ninth-dotty-milestone-release.md @@ -0,0 +1,207 @@ +--- +layout: blog-page +title: Announcing Dotty 0.9.0-RC1 +author: Allan Renucci +authorImg: images/allan.jpg +date: 2018-07-06 +--- + +Today, we are excited to release Dotty version 0.9.0-RC1. This release serves as a technology +preview that demonstrates new language features and the compiler supporting them. + +Dotty is the project name for technologies that are considered for inclusion in Scala 3. Scala has +pioneered the fusion of object-oriented and functional programming in a typed setting. Scala 3 will +be a big step towards realizing the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviors, +- build on strong foundations to ensure the design hangs well together, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +This is our ninth scheduled release according to our [6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html). +The [previous technology preview](https://github.com/lampepfl/dotty/releases/tag/0.8.0-RC1) added +support for sbt 1, introduced improved unchecked warnings and improved SAM type support. + +## What’s new in the 0.9.0-RC1 technology preview? + +### Improved REPL [#4680](https://github.com/lampepfl/dotty/pull/4680) +The REPL now uses [JLine 3](https://github.com/jline/jline3) under the hood which improves on +many aspects such as, auto-completions and multi-line editing. The REPL now also works on Windows! + + +### Documentation support in the IDE [#4461](https://github.com/lampepfl/dotty/pull/4461), [#4648](https://github.com/lampepfl/dotty/pull/4648) +The Dotty IDE will now display documentation while hovering over symbols that were previously +compiled by the Dotty compiler. In the future, we plan to let users query the documentation +in the REPL as well. + + +### Drop requirement that implicit functions must be non-empty [#4549](https://github.com/lampepfl/dotty/pull/4549) +We remove the arbitrary restriction that parameters of implicit functions must by non-empty. +We can now write: +```scala +type IntProducer = implicit () => Int + +def prod1: IntProducer = 1 +val prod2: IntProducer = 2 +``` + +An interesting observation is that by-name parameters can now be encoded as implicit function types: +```scala +def timed[T](op: => T): T = ... +def timed[T](op: implicit () => T): T = ... + +timed { + fetch(url) +} +``` + +Both definitions above are equivalent. + + +### Emit feature warnings for implicit conversions [#4229](https://github.com/lampepfl/dotty/pull/4229) +Implicit conversions are easily the most misused feature in Scala. We now emit feature warnings +when encountering an implicit conversion definition, just like Scala 2 does. + +In addition, we also emit a feature warning when an implicit conversion is used, +unless the conversion is: + +- an implicit class +- co-defined with the type to which it converts +- predefined in `scala.Predef` or is the `scala.reflect.Selectable.reflectiveSelect` conversion + (we might extend this to more conversions). + + +### Optimise s and raw interpolators [#3961](https://github.com/lampepfl/dotty/pull/3961) +`s` and `raw` string interpolators were known to be slower than their not type-safe counterparts: +```scala +s"Hello $name!" + +// compared to: +"Hello " + name + "!" +``` +The compiler will now desugar the former into the latter. Special thanks to +[Wojtek Swiderski](https://github.com/Wojtechnology) who contributed this feature to the Dotty +compiler! + + +### Support for compiler plugins [#3438](https://github.com/lampepfl/dotty/pull/3438) +Dotty now supports Compiler plugins. Compiler plugins let you customize the compiler pipeline +without having to modify the compiler source code. A major difference compared to Scala 2 is +that Dotty plugins must run after the type checker. Being able to influence normal type checking +is a powerful feature but for production usages, a predictable and consistent type checker is +more important. + +Starting with 1.1.5 Dotty compiler plugins can be used with `sbt`. Please refer to the `sbt` +[documentation](https://www.scala-sbt.org/1.x/docs/Compiler-Plugins.html) for more information. + +For more information, visit the [Compiler Plugin](https://dotty.epfl.ch/docs/reference/changed-features/compiler-plugins.html) +section of our documentation. + +## Trying out Dotty + +### sbt +Using sbt 1.1.5 or newer, do: + +```shell +sbt new lampepfl/dotty.g8 +``` + +This will setup a new sbt project with Dotty as compiler. For more details on +using Dotty with sbt, see the +[example project](https://github.com/lampepfl/dotty-example-project). + +### IDE support +Start using the Dotty IDE in any Dotty project by following +the [IDE guide](https://dotty.epfl.ch/docs/usage/ide-support.html). + + +### Standalone installation +Releases are available for download on the _Releases_ +section of the Dotty repository: +[https://github.com/lampepfl/dotty/releases](https://github.com/lampepfl/dotty/releases) + +We also provide a [homebrew](https://brew.sh/) package that can be installed by running: + +```shell +brew install lampepfl/brew/dotty +``` + +In case you have already installed Dotty via brew, you should instead update it: + +```shell +brew upgrade dotty +``` + +### Scastie +[Scastie], the online Scala playground, supports Dotty. This is an easy way to try Dotty without +installing anything. Note however that Scastie only supports Dotty 0.7.0-RC1. + +## Let us know what you think! +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.8.0..0.9.0-RC1` these are: + +``` + 200 Nicolas Stucki + 155 Martin Odersky + 71 Allan Renucci + 42 Paolo G. Giarrusso + 27 Aggelos Biboudis + 25 Guillaume Martres + 22 Martin Duhem + 10 Sebastian Nadorp + 10 Wojtek Swiderski + 6 Olivier Blanvillain + 5 benkobalog + 4 Ingar Abrahamsen + 3 Ankit Soni + 2 Liu Fengyun + 2 Olivier ROLAND + 2 Fabian Page + 1 Roberto Bonvallet + 1 Fengyun Liu + 1 Zoltán Elek + 1 benkbalog + 1 Glavo + 1 dieutth + 1 fschueler + 1 mentegy +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry-points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2018-10-10-10th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2018-10-10-10th-dotty-milestone-release.md new file mode 100644 index 000000000000..8bede806ac53 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2018-10-10-10th-dotty-milestone-release.md @@ -0,0 +1,249 @@ +--- +layout: blog-page +title: Announcing Dotty 0.10.0-RC1 +author: Allan Renucci +authorImg: images/allan.jpg +date: 2018-10-10 +--- + +After a long summer break, we are excited to release Dotty version 0.10.0-RC1. +This release serves as a technology preview that demonstrates new language features and the +compiler supporting them. + +Dotty is the project name for technologies that are considered for inclusion in Scala 3. Scala has +pioneered the fusion of object-oriented and functional programming in a typed setting. Scala 3 will +be a big step towards realising the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviours, +- build on strong foundations to ensure the design hangs well together, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and + performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +This is our 10th scheduled release according to our +[6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html). + +## What’s new in the 0.10.0-RC1 technology preview? + +### Java 9+ + +Dotty now supports the latest versions of Java including Java 11! + +### Type-level programming: Match Types + +We've introduced a new form of types called match types. A match types is a mechanism for checking a +type against a pattern. A match type reduces to one of a number of right hand sides, depending on a +scrutinee type. E.g: + +```scala +type Elem[X] = X match { + case String => Char + case Array[t] => t + case Iterable[t] => t +} +``` + +This defines a type that, depending on the scrutinee type `X`, can reduce to one of its right hand +sides. For instance, + +```scala +Elem[String] =:= Char +Elem[Array[Int]] =:= Int +Elem[List[Float]] =:= Float +Elem[Nil] =:= Nothing +``` + +Here `=:=` is understood to mean that left and right hand sides are mutually subtypes of each other. + +This feature is still experimental and subject to changes. For more information, visit the +[Match Types](https://dotty.epfl.ch/docs/reference/new-types/match-types.html) section of our documentation. + +### Documentation in the REPL + +The previous release added documentation support for the IDE. Users can now query the documentation +of sources previously compiled with Dotty within the REPL: + +```scala +scala> /** An object */ object O { /** A def */ def foo = 0 } +// defined object O + +scala> :doc O +/** An object */ + +scala> :doc O.foo +/** A def */ +``` + +### Tail-recursive methods can now be polymorphic + +Previously, a tail recursive call would be optimised only if the type arguments of the method +or the enclosing class did not change at call site. E.g. + +```scala +@tailrec def loop[T](x: T): Int = { + ... + loop[Int](1) +} +``` + +```shell + loop[Int](1) + ^^^^^^^^^^^^ + Cannot rewrite recursive call: it changes type arguments on a polymorphic recursive call +``` + +This restriction has now been removed. We also improve upon `scalac` which is not able to optimise +methods that change the type of `this` on a polymorphic recursive call. +[Examples](https://github.com/lampepfl/dotty/blob/7a45a4a386d33180e5b7b21aa74271a77cce4707/tests/neg-tailcall/tailrec.scala#L43-L44) +can be found in our test suite. + +### Experimental support for generic Tuples + +We augmented the `Tuple` class with generic methods such as `head`, `tail`, `apply`, `*:` and `++`. + +```scala +scala> val t0 = (1, "2", 3L) +val t0: (Int, String, Long) = (1,2,3) + +scala> val head = t0.head +val head: Int = 1 + +scala> val tail = t0.tail +val tail: (String, Long) = (2,3) + +scala> val t1 = 0.0 *: t0 +val t1: (Double, Int, String, Long) = (0.0,1,2,3) + +scala> val t2 = t0 ++ t0 +val t2: (Int, String, Long, Int, String, Long) = (1,2,3,1,2,3) +``` + +### And much more! + +Please read our [release notes](https://github.com/lampepfl/dotty/releases/tag/0.10.0-RC1) +for more details! + +## Breaking changes + +Dotty 0.10.0-RC1 requires sbt-dotty 0.2.4 and sbt 1.2.3 or newer. + +## Trying out Dotty + +### sbt + +You can setup a new sbt project with Dotty as the compiler by running: + +```shell +sbt new lampepfl/dotty.g8 +``` + +For more details on using Dotty with sbt, see the +[example project](https://github.com/lampepfl/dotty-example-project). + +### [Mill](http://www.lihaoyi.com/mill/) + +The Mill build tool version 0.2.6 introduced experimental support for Dotty. For more details on +using Dotty with Mill, see the +[example project](https://github.com/lampepfl/dotty-example-project/tree/mill). + +### IDE support + +Start using the Dotty IDE in any Dotty project by following +the [IDE guide](https://dotty.epfl.ch/docs/usage/ide-support.html). + +### Standalone installation + +Releases are available for download on the _Releases_ +section of the Dotty repository: +[https://github.com/lampepfl/dotty/releases](https://github.com/lampepfl/dotty/releases) + +For macOS users, we also provide a [homebrew](https://brew.sh/) package that can be installed by +running: + +```shell +brew install lampepfl/brew/dotty +``` + +In case you have already installed Dotty via `brew`, you should instead update it: + +```shell +brew upgrade dotty +``` + +## Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.9.0..0.10.0-RC1` these are: + +``` + 219 Martin Odersky + 142 Nicolas Stucki + 67 Paolo G. Giarrusso + 52 Allan Renucci + 48 Guillaume Martres + 39 Martin Duhem + 23 Liu Fengyun + 15 Olivier Blanvillain + 10 Dmytro Melnychenko + 10 Abel Nieto + 10 Sébastien Doeraene + 7 Jaemin Hong + 7 Eugene Melekhov + 5 Saloni Vithalani + 3 Daniel Li + 3 Dale Wijnand + 3 Jasper Moeys + 2 lloydmeta + 2 Aggelos Biboudis + 2 Greg Pevnev + 1 Adriaan Moors + 1 Lukas Rytz + 1 Kazuhiro Sera + 1 Justin du Coeur, AKA Mark Waks + 1 Jan Rock + 1 Fengyun Liu + 1 Szymon Pajzert + 1 Chris Birchall + 1 benkobalog + 1 Martijn Hoekstra +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2018-11-30-11th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2018-11-30-11th-dotty-milestone-release.md new file mode 100644 index 000000000000..db3108b12eee --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2018-11-30-11th-dotty-milestone-release.md @@ -0,0 +1,228 @@ +--- +layout: blog-page +title: Announcing Dotty 0.11.0-RC1 +author: Allan Renucci +authorImg: images/allan.jpg +date: 2018-11-30 +--- + +Today we are excited to release Dotty version 0.11.0-RC1. +This release serves as a technology preview that demonstrates new language features and the +compiler supporting them. + +Dotty is the project name for technologies that are considered for inclusion in Scala 3. Scala has +pioneered the fusion of object-oriented and functional programming in a typed setting. Scala 3 will +be a big step towards realising the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviours, +- build on strong foundations to ensure the design hangs well together, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and + performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +This is our 11th scheduled release according to our +[6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html). + +## What’s new in the 0.11.0-RC1 technology preview? + +### Opaque Type Aliases + +Opaque types aliases provide type abstraction without any overhead. Example: + +```scala +opaque type Duration = Long +``` + +This introduces `Duration` as a new type, which is implemented as a `Long` but is different from +it. The fact that `Duration` is the same as `Long` is only known in the companion object of +`Duration`. Here is a possible companion object: + +```scala +object Duration { + + // These are the ways to lift to the Duration type + def fromNanos(duration: Long): Duration = duration + def fromSeconds(duration: Long): Duration = duration * 1000000000 + + // This is the first way to unlift the Duration type + def toNanos(l: Duration): Long = l + + // Extension methods define opaque types' public APIs + implicit class DurationOps(self: Duration) extends AnyVal { + // This is the second way to unlift the Duration type + def toSeconds: Long = self / 1000000000 + def + (that: Duration): Duration = self + that + } +} +``` + +The companion object contains the `fromNanos` and `fromSeconds` methods that convert from longs to +`Duration` values. It also adds a `toNanos` function and a decorator that implements `+` on +duration values, as well as a conversion `toSeconds`. All of this is possible because within object +`Duration`, the type `Duration` is just an alias of `Long`. + +Outside the companion object, `Duration` is treated as a new abstract type. So the following +operations would be valid because they use functionality implemented in the `Duration` +object. + +```scala +val d1 = Duration.fromNanos(1000L) +val d2 = Duration.fromSeconds(2L) +val d3 = d1 + d2 +``` + +But the following operations would lead to type errors: + +```scala +val l: Long = d1 // error: found: Duration, required: Long +val d: Duration = 3L // error: found: Long(3L), required: Duration +d1 + 2L // error: found: Long(2L), required: Duration +d1 - d2 // error: `-` is not a member of Duration +``` + +### Worksheet Mode Support in Visual Studio Code + +Dotty IDE can now be used in Worksheet mode. A worksheet is a Scala file that is evaluated on save, +and the result of each expression is displayed in a column on the right of your program. Worksheets +are like a REPL session on steroids, and enjoy 1st class editor support: completions, hyperlinking, +interactive errors-as-you-type, etc. + +![]({{ site.baseurl }}/images/worksheets/worksheet-demo.gif "Run worksheet") + +For more information about the worksheets, see [Worksheet mode with Dotty +IDE](https://dotty.epfl.ch/docs/usage/worksheet-mode.html) + +### Various IDE improvements + +#### Help with method signatures + +When writing a method call, Dotty IDE will now show contextual information that helps filling in the +arguments of the method. + +![]({{ site.baseurl }}/images/dotty-ide/signature-help.png "Signature help") + +#### Improved display of documentation in Dotty IDE + +In this release, we reworked how we show documentation inside the IDE. We now extract useful +information from the Scaladoc comment, then format it before we display it in the IDE. + +![]({{ site.baseurl }}/images/dotty-ide/documentation-hover.png "Documentation hover") + +### And much more! + +Please read our [release notes](https://github.com/lampepfl/dotty/releases/tag/0.11.0-RC1) +for more details! + +## Trying out Dotty + +### sbt + +You can set up a new sbt project with Dotty as the compiler by running: + +```shell +sbt new lampepfl/dotty.g8 +``` + +For more details on using Dotty with sbt, see the +[example project](https://github.com/lampepfl/dotty-example-project). + +### [Mill](http://www.lihaoyi.com/mill/) + +The Mill build tool version 0.2.6 introduced experimental support for Dotty. For more details on +using Dotty with Mill, see the +[example project](https://github.com/lampepfl/dotty-example-project/tree/mill). + +### IDE support + +Start using the Dotty IDE in any Dotty project by following +the [IDE guide](https://dotty.epfl.ch/docs/usage/ide-support.html). + +### Standalone installation + +Releases are available for download on the _Releases_ +section of the Dotty repository: +[https://github.com/lampepfl/dotty/releases](https://github.com/lampepfl/dotty/releases) + +For macOS users, we also provide a [homebrew](https://brew.sh/) package that can be installed by +running: + +```shell +brew install lampepfl/brew/dotty +``` + +In case you have already installed Dotty via `brew`, you should instead update it: + +```shell +brew upgrade dotty +``` + +## Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.10.0..0.11.0-RC1` these are: + +``` + 143 Martin Duhem + 104 Nicolas Stucki + 82 Martin Odersky + 60 Guillaume Martres + 35 Allan Renucci + 21 poechsel + 12 Olivier Blanvillain + 10 Liu Fengyun + 8 Aleksander Boruch-Gruszecki + 6 Tobias Bordenca + 5 Sébastien Doeraene + 4 Stéphane Micheloud + 3 João Pedro Evangelista + 3 Miles Sabin + 3 Neeraj Jaiswal + 3 Abel Nieto + 2 Ólafur Páll Geirsson + 2 Fengyun Liu + 2 veera venky + 1 mikhail + 1 Glavo + 1 0xflotus + 1 Paolo G. Giarrusso +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2019-01-21-12th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2019-01-21-12th-dotty-milestone-release.md new file mode 100644 index 000000000000..1b24c359a2f6 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2019-01-21-12th-dotty-milestone-release.md @@ -0,0 +1,224 @@ +--- +layout: blog-page +title: Announcing Dotty 0.12.0-RC1 +author: Aggelos Biboudis +authorImg: images/aggelos.jpg +date: 2019-01-21 +--- + +Happy New Year to all with the first release of Dotty for 2019! ✨🎊🎉 + +Today we are excited to release the version 0.12.0-RC1 of the Dotty compiler. +This release serves as a technology preview that demonstrates new language features and the +compiler supporting them. + +Dotty is the project name for technologies that are considered for inclusion in Scala 3. Scala has +pioneered the fusion of object-oriented and functional programming in a typed setting. Scala 3 will +be a big step towards realising the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviours, +- build on strong foundations to ensure the design hangs well together, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and + performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +This is our 12th scheduled release according to our +[6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html). + +## What’s new in the 0.12.0-RC1 technology preview? + +### Extension Methods + +We are excited to announce that extension methods are now offered through dedicated language support! +Extension methods allow one to add methods to a type after the type is defined. +This is done by writing a method with a parameter for the type to be extended +_on the left-hand side_ of the method name: + +```scala +case class Circle(x: Double, y: Double, radius: Double) + +def (c: Circle) circumference: Double = c.radius * math.Pi * 2 +``` + +Extension methods are enabled when they are syntactically in scope (as above), +or when their enclosing instance is present in the implicit scope of the type that they extend, +as we exemplify below. + +Extension methods were previously encoded in a rather roundabout way via the implicit class pattern. +Such encoding required a lot of boilerplate, especially when defining type classes. +In Dotty, this is no longer the case, +and type classes with infix syntax become very straightforward to define! +For example, consider: + +```scala +trait Semigroup[T] { + def (x: T) combine (y: T): T +} +implicit val IntSemigroup: Semigroup[Int] = new { + def (x: Int) combine (y: Int): Int = x + y +} +implicit def ListSemigroup[T]: Semigroup[List[T]] = new { + def (x: List[T]) combine (y: List[T]): List[T] = x ::: y +} +1.combine(2) // == 3 +List(1,2).combine(List(3,4)) // == List(1,2,3,4) +``` + +This works because the `combine` extension methods of `IntSemigroup` and `ListSemigroup` are available +from the relevant implicit scopes. + +Read the [full documentation](https://dotty.epfl.ch/docs/reference/contextual/extension-methods.html) about generic extension methods, higher-kinded extension methods, and more. + +### TASTy Reflect goodies + +We implement a new decompiler for TASTy files and we also offer a new VS Code Extension. +The decompiler allows to view both decompiled scala source code and the pretty printed TASTy tree when opening a .tasty file. +The feature is similar to opening .class files in IntelliJ. + +![]({{ site.baseurl }}/images/dotty-ide/decompiler.png "Decompiler") + +The decompiler can be invoked with the corresponding flag: `dotc -decompile xyz.tasty`. + +On the programmatic side of TASTy we are rolling out changes according to our plan discussed at [Macros: The Plan for Scala 3](https://www.scala-lang.org/blog/2018/04/30/in-a-nutshell.html). +In this release, we make progress following the _Next Steps_ of the aforementioned blogpost by offering constructors that work directly with reflect trees. +Consequently, TASTy extractors meet their dual, TASTy constructors! +We also connect the new lower-level reflection layer to the existing principled macro system based on quotes and splices offering, two new expression methods for `Expr[T]`: + +- `unseal` that unseals an `Expr[T]` (non traversable code) into a `Term` and +- `seal` that seals back a `Term` into an `Expr[T]`. + +Read the [relevant documentation](https://dotty.epfl.ch/docs/reference/metaprogramming/tasty-reflect.html) to learn how to go from quotes and splices to TASTys Reflect trees and back . + +### Alignments with the Scala Improvement Process + +In this version we improve the implementation of by-name implicits making it compliant with the [By-name Implicits](https://docs.scala-lang.org/sips/byname-implicits.html) SIP and we implement the `ValueOf` operator which yields the unique value of types with a single inhabitant, effectively syncing it up with the [Literal-Based Singleton Types](https://docs.scala-lang.org/sips/42.type.html) SIP. + +### Improvements to GADT support + +In this release, we're also rolling out the first batch of improvements to GADT support - namely, variable unification. +To keep it short, from knowing that `A <: B` and `B <: A`, we can now deduce that `A = B`, and from `A = B` and `B <: C` we deduce that `A <: C`. +This kind of reasoning is necessary for many advanced GADT usages! + +### And much more! + +Please read our [release notes](https://github.com/lampepfl/dotty/releases/tag/0.12.0-RC1) +for more details! + +## Trying out Dotty + +### sbt + +You can set up a new sbt project with Dotty as the compiler by running: + +```shell +sbt new lampepfl/dotty.g8 +``` + +For more details on using Dotty with sbt, see the +[example project](https://github.com/lampepfl/dotty-example-project). + +### [Mill](http://www.lihaoyi.com/mill/) + +The Mill build tool version 0.2.6 introduced experimental support for Dotty. For more details on +using Dotty with Mill, see the +[example project](https://github.com/lampepfl/dotty-example-project/tree/mill). + +### IDE support + +Start using the Dotty IDE in any Dotty project by following +the [IDE guide](https://dotty.epfl.ch/docs/usage/ide-support.html). + +### Standalone installation + +Releases are available for download on the _Releases_ +section of the Dotty repository: +[https://github.com/lampepfl/dotty/releases](https://github.com/lampepfl/dotty/releases) + +For macOS users, we also provide a [homebrew](https://brew.sh/) package that can be installed by +running: + +```shell +brew install lampepfl/brew/dotty +``` + +In case you have already installed Dotty via `brew`, you should instead update it: + +```shell +brew upgrade dotty +``` + +## Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.11.0-RC1..0.12.0-RC1` these are: + +``` + 109 Martin Odersky + 64 Nicolas Stucki + 34 Martin Duhem + 25 Allan Renucci + 16 Guillaume Martres + 12 Aleksander Boruch-Gruszecki + 11 Tobias Bordenca + 10 Miles Sabin + 10 Liu Fengyun + 7 Aggelos Biboudis + 7 Jaemin Hong + 5 Paolo G. Giarrusso + 3 duanebester + 3 Dotty CI + 2 Sébastien Doeraene + 2 Saurabh Rawat + 2 Vlastimil Dort + 1 tOverney + 1 Denis Buzdalov + 1 Hermes Espínola González + 1 Ivan Youroff + 1 João Pedro de Carvalho + 1 Neeraj Jaiswal + 1 Olivier Blanvillain + 1 poechsel + 1 Abel Nieto +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin \ No newline at end of file diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2019-03-05-13th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2019-03-05-13th-dotty-milestone-release.md new file mode 100644 index 000000000000..6ddf49c99ad6 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2019-03-05-13th-dotty-milestone-release.md @@ -0,0 +1,446 @@ +--- +layout: blog-page +title: Announcing Dotty 0.13.0-RC1 with Spark support, top level definitions and redesigned implicits +author: Aggelos Biboudis +authorImg: images/aggelos.jpg +date: 2019-03-05 +--- + +Hello hello! This is the second release for 2019. Spark, top level definitions +and redesigned implicits ✨🎊🎉 are the most important inclusions in this release +and you will understand why we are super excited, in a bit! + +Without further ado, today we release the version 0.13.0-RC1 of the Dotty +compiler. This release serves as a technology preview that demonstrates new +language features and the compiler supporting them. + +Dotty is the project name for technologies that are being considered for +inclusion in Scala 3. Scala has pioneered the fusion of object-oriented and +functional programming in a typed setting. Scala 3 will be a big step towards +realising the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviours, +- build on strong foundations to ensure the design hangs together well, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and + performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +This is our 13th scheduled release according to our +[6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html). + +# What’s new in the 0.13.0-RC1 technology preview? + +## Experimental support for Spark + +Dotty projects have always been able to [depend on Scala 2 +libraries](https://github.com/lampepfl/dotty-example-project#getting-your-project-to-compile-with-dotty), +and this usually works fine (as long as the Dotty code does not call a Scala 2 +macro directly). However, [Spark](http://spark.apache.org/) was known to not work +correctly as it heavily relies on Java serialization which we were not fully +supporting. + +Meanwhile, at EPFL, we've started updating our Scala courses to use Dotty +instead of Scala 2, the *Functional Programming* course given last semester went +smoothly, but the *Parallelism and Concurrency* course given in the +Spring semester teaches Spark, which means we needed to support it in Dotty! + +Luckily, this turned out to be mostly straightforward: we adopted the [object +serialization scheme](https://github.com/lampepfl/dotty/pull/5775) and [lambda +serialization scheme](https://github.com/lampepfl/dotty/pull/5837) pioneered by +Scala 2, and that was enough to make our Spark assignments run correctly! This +doesn't mean that our support is perfect however, so don't hesitate to [open an +issue](http://github.com/lampepfl/dotty/issues) if something is amiss. + +## Introducing top level definitions + +_Top level_ definitions are now supported. This means that package objects are +now redundant, and will be phased out. This means that all kinds of definitions +can be written at the top level. + +```scala +package p + +type Labelled[T] = (String, T) + +val a: Labelled[Int] = ("count", 1) +def b = a._2 +``` + +You can read about [dropping package +objects](https://dotty.epfl.ch/docs/reference/dropped-features/package-objects.html) +at the documentation linked or at the relevant PR +[#5754](https://github.com/lampepfl/dotty/pull/5754). + +## All things impl... implied + +Scala's implicits are its most distinguished feature. They are _the_ fundamental +way to abstract over context. They represent a single concept with an extremely +varied number of use cases, among them: implementing type classes, establishing +context, dependency injection, expressing capabilities, computing new types and +proving relationships between them. + +However, with great power comes great responsibility. The current design of +implicits has shown some limitations, which we have been trying to identify and +address to make Scala a clearer and more pleasant language. First of all, we +found that the syntactic similarity was too great between implicit _conversions_ +and implicit _values_ that depend on other implicit values. Both of them appear +in the snippet below: + +```scala +implicit def i1(implicit x: T): C[T] = ... // 1: conditional implicit value +implicit def i2(x: T): C[T] = ... // 2: implicit conversion +``` + +Some users used implicit conversions, in an unprincipled matter. This overuse of +implicit conversions decluttered code. However, while implicit conversions can +be useful to remove clutter, their abuse makes it harder for people to reason +about the code. + +The `implicit` keyword is used for both implicit conversions and conditional +implicit values and we identified that their semantic differences must be +communicated more clearly syntactically. Furthermore, the `implicit` keyword is +ascribed too many overloaded meanings in the language (implicit vals, defs, +objects, parameters). For instance, a newcomer can easily confuse the two +examples above, although they demonstrate completely different things, a +type class instance is an implicit object or val if unconditional and an implicit +def with implicit parameters if conditional; arguably all of them are +surprisingly similar (syntactically). Another consideration is that the +`implicit` keyword annotates a whole parameter section instead of a single +parameter, and passing an argument to an implicit parameter looks like a regular +application. This is problematic because it can create confusion regarding what +parameter gets passed in a call. Last but not least, sometimes implicit +parameters are merely propagated in nested function calls and not used at all, +so giving names to implicit parameters is often redundant and only adds noise to +a function signature. + +Consequently, we introduce two new language features: + +1. _implied instance definitions_ designated syntactically by the scheme `implied ... for` and, +2. _inferable parameters_ designated by the keyword `given`. + +In the code below we demonstrate both of them. This code defines a trait `Ord` +and two `implied` instance definitions. `IntOrd` defines an `implied` instance +for the type `Ord[Int]` whereas `ListOrd[T]` defines implied instances of +`Ord[List[T]]` for all types `T` that come with an implied `Ord[T]` instance +themselves. The `given` clause in `ListOrd` defines an _inferable parameter_. + +```scala +trait Ord[T] { + def compare(x: T, y: T): Int + def (x: T) < (y: T) = compare(x, y) < 0 + def (x: T) > (y: T) = compare(x, y) > 0 +} + +implied IntOrd for Ord[Int] { + def compare(x: Int, y: Int) = + if (x < y) -1 else if (x > y) +1 else 0 +} + +implied ListOrd[T] given (ord: Ord[T]) for Ord[List[T]] { + def compare(xs: List[T], ys: List[T]): Int = (xs, ys) match { + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = ord.compare(x, y) + if (fst != 0) fst else xs1.compareTo(ys1) + } +} +``` + +A `given` clause can also designate an inferable parameter for functions: + +```scala +def max[T](x: T, y: T) given (ord: Ord[T]): T = + if (ord.compare(x, y) < 1) y else x +``` + +With this scheme all invocations of the `max` function below are equally valid: +```scala +max(2, 3) given IntOrd +max(List(1, 2, 3), Nil) +max(2, 3) +``` + +We introduce _Anonymous Implied Instances_ which are used when we do not need a name for an implied +instance: + +```scala +implied for Ord[Int] { ... } +``` + +For convenience, we also introduce _Implied Alias Instances_. They offer aliases +for implied instances. For example, the line below offers an alias with the name +`ctx` (could also be anonymous if name can be omitted). Each time an implied +instance of `ExecutionContext` is demanded the right-hand side is returned. + +```scala +implied ctx for ExecutionContext = currentThreadPool().context +``` + +We have also added a synonym to `implicitly`, which is often more natural to +spell out in user code. Functions like `the` that have only _inferable +parameters_ are also called _context queries_ from now on. Consequently, to +summon an implied instance of `Ord[List[Int]]` we write: + +```scala +the[Ord[List[Int]]] +``` + +How do we import implied instances in scope? From now on, normal import clauses +fetch all definitions *except* implied instance into scope whereas _Implied +Imports_ bring only implied instances in scope. + +```scala +object A { + class TC + implied tc for TC + def f given TC = ??? +} +object B { + import A._ // normal import clause + import implied A._ // implied import clause +} +``` + +**You can read more about** [implied +imports](https://dotty.epfl.ch/docs/reference/contextual/import-delegate.html) +from the docs or the relevant PR +[#5868](https://github.com/lampepfl/dotty/pull/5868). + +As we mentioned above, *context queries* are functions with (only) inferable +parameters. Here is an example of such a function: + +```scala +type Contextual[T] = given Context => T +``` + +Context queries--previously named implicit function types (IFTs)--are now also +expressed with `given`, providing types for first-class context queries. This is +merely an alignment of IFTs into the new scheme. + +**You can read more about** the alternative to implicits through the *Contextual +Abstractions* section of our documentation or for a deep dive from the relevant +PR chain that originated from +[#5458](https://github.com/lampepfl/dotty/pull/5458). The syntax changes for new +implicits are summarized in +[#5825](https://github.com/lampepfl/dotty/pull/5825). + +This release offers the support for _type class derivation_ as a language +feature. Type class derivation is a way to generate instances of certain type +classes automatically or with minimal code hints, and is now supported natively +with *dedicated language support*. A type class in this sense is any trait or +class with a type parameter that describes the type being operated on. Commonly +used examples are `Ordering`, `Show`, or `Pickling`. We introduce a new +`derives` clause that generates implied instances of the `Eql`, `Ordering`, and +`Pickling` traits in the companion object `Tree`. Take a look at the example +below: + +```scala +enum Tree[T] derives Eql, Ordering, Pickling { + case Branch(left: Tree[T], right: Tree[T]) + case Leaf(elem: T) +} +``` + +where the generated implied instances are the ones below: +```scala +implied [T: Eql] for Eql[Tree[T]] = Eql.derived +implied [T: Ordering] for Ordering[Tree[T]] = Ordering.derived +implied [T: Pickling] for Pickling[Tree[T]] = Pickling.derived +``` + +Note, the new syntax: + +```scala +A extends B, C { ... } +``` + +which replaces: + +```scala +A extends B with C { ... } +``` + +With type class derivation we can also derive types. A trait or class can appear +in a derives clause if its companion object defines a method named `derived`. +The type and implementation of a `derived` method are arbitrary, but typically +it has a definition like this: + +```scala +def derived[T] given Generic[T] = ... +``` + +**You can read more about** [Type class +Derivation](https://dotty.epfl.ch/docs/reference/contextual/derivation.html) or +have a deep dive at the relevant PRs: +[#5540](https://github.com/lampepfl/dotty/pull/5540) and +[#5839](https://github.com/lampepfl/dotty/pull/5839). + +_Multiversal equality_ is now supported through the `Eql` marker trait (renamed +from `Eq` to differentiate it from Cats' `Eq`). For example, in order to be able +to compare integers with strings now, instead of a custom implicit we can +provide a derived implicit instance: + +```scala +implied for Eql[Int, String] = Eql.derived +``` + +**You can read more about** how we based multiversal equality on type class derivation through +the relevant PR [#5843](https://github.com/lampepfl/dotty/pull/5843). + +_Implicit conversions_ are now defined by implied instances of the +`scala.Conversion` class. For example: + +```scala +implied for Conversion[String, Token] { + def apply(str: String): Token = new KeyWord(str) +} +``` + +**Note:** that these release notes contain only a brief summary of the new +features, for more details please read our documentation page under the new +section named [*Contextual Abstractions*](https://dotty.epfl.ch/docs/). Equally +important with the documentation of each feature, please consult the +[Relationship with Scala 2 Implicits](https://dotty.epfl.ch/docs/reference/contextual/relationship-implicits.html) section as well. + +## Implicit resolution rule changes + +PR [#5887](https://github.com/lampepfl/dotty/pull/5887) applies the following +changes to implicit resolution: + +1. nested implicits always take precedence over outer ones +2. no more shadowing checks +3. package prefixes are not considered. + +## SemanticDB generator + +[SemanticDB](https://github.com/scalameta/scalameta/tree/master/semanticdb) is a +data model for semantic information such as symbols and types about programs in +Scala and other languages. SemanticDB decouples production and consumption of +semantic information, establishing documented means for communication between +tools. With PR [#5761](https://github.com/lampepfl/dotty/pull/5761) we add the +first prototype for the generation of SemanticDB information from TASTy. + +## And much more! + +Please read our [release notes](https://github.com/lampepfl/dotty/releases/tag/0.13.0-RC1) +for more details! + +# Trying out Dotty + +## sbt + +You can set up a new sbt project with Dotty as the compiler by running: + +```shell +sbt new lampepfl/dotty.g8 +``` + +For more details on using Dotty with sbt, see the +[example project](https://github.com/lampepfl/dotty-example-project). + + + +## IDE support + +Start using the Dotty IDE in any Dotty project by following +the [IDE guide](https://dotty.epfl.ch/docs/usage/ide-support.html). + +## Standalone installation + +Releases are available for download on the _Releases_ +section of the Dotty repository: +[https://github.com/lampepfl/dotty/releases](https://github.com/lampepfl/dotty/releases) + +For macOS users, we also provide a [homebrew](https://brew.sh/) package that can be installed by +running: + +```shell +brew install lampepfl/brew/dotty +``` + +In case you have already installed Dotty via `brew`, you should instead update it: + +```shell +brew upgrade dotty +``` + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.12.0-RC1..0.13.0-RC1` these are: + +``` + 309 Martin Odersky + 116 Nicolas Stucki + 52 Guillaume Martres + 42 poechsel + 22 Aggelos Biboudis + 20 Paolo G. Giarrusso + 19 Olivier Blanvillain + 11 Liu Fengyun + 5 Allan Renucci + 4 Miles Sabin + 3 Tobias Bordenca + 3 Lionel Parreaux + 3 Abel Nieto + 2 Lukas Rytz + 1 lpwisniewski + 1 Adriaan Moors + 1 Georg Schmid + 1 Jentsch + 1 Marc Karassev + 1 Daniel Murray + 1 Olivier ROLAND + 1 Raphael Jolly + 1 Stéphane Micheloud + 1 Sébastien Doeraene + 1 Umayah Abdennabi +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2019-04-15-14th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2019-04-15-14th-dotty-milestone-release.md new file mode 100644 index 000000000000..492de82590d4 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2019-04-15-14th-dotty-milestone-release.md @@ -0,0 +1,200 @@ +--- +layout: blog-page +title: Announcing Dotty 0.14.0-RC1 with export, immutable arrays, creator applications and more +author: Anatolii Kmetiuk +authorImg: images/anatolii.png +date: 2019-04-15 +--- + +Hello! This is the 14th release of Dotty. Some of the most interesting changes in this release include the new `export`, the dual of `import`, feature, an immutable array type and the creator applications syntax. + +This release serves as a technology preview that demonstrates new +language features and the compiler supporting them. + +Dotty is the project name for technologies that are being considered for +inclusion in Scala 3. Scala has pioneered the fusion of object-oriented and +functional programming in a typed setting. Scala 3 will be a big step towards +realising the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviours, +- build on strong foundations to ensure the design hangs together well, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and + performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +This is our 14th scheduled release according to our +[6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html). + +# What’s new in the 0.14.0-RC1 technology preview? + +## Export as a dual of Import + +A new `export` keyword is added to the language that defines aliases for selected members of an object. Consider the following example: + +```scala +class BitMap +class InkJet +class Printer { + type PrinterType + def print(bits: BitMap): Unit = ??? + def status: List[String] = ??? +} +class Scanner { + def scan(): BitMap = ??? + def status: List[String] = ??? +} +class Copier { + private val printUnit = new Printer { type PrinterType = InkJet } + private val scanUnit = new Scanner + export scanUnit.scan + export printUnit.{status => _, _} + def status: List[String] = printUnit.status ++ scanUnit.status +} +``` + +When defined like this, the `Copier` class defines aliases of the `scanner`'s `scan` method and all the methods of `printUnit` except the `status` method. You can hence call them on the `Copier` as follows: + +```scala +val copier = new Copier +copier.print(copier.scan()) +``` + +The motivation for this change is to promote composition over inheritance. In OOP languages it is easy to define inheritance but the above example would be tricky if you follow the composition route. One would need to implement proxy methods in the `Copier` to delegate to those of the `Scanner` and the `Printer`. + +With the `export` feature, making the `Copier` behave as the `Printer` and the `Scanner` became much more ergonomic. Also, note the fine-grained control over which methods are exposed in cases of the possible method collision, as shown with the `status` method example. + +For more information, please read more in the [documentation](https://dotty.epfl.ch/docs/reference/other-new-features/export.html). + + +## An immutable array type +A new type, `scala.IArray[T]`, is added, which is an immutable version of the `Array` type. Its implementation deserves a special attention, as it uses the new Dotty features in an elegant way (the below is an abstract from the corresponding [commit](https://github.com/lampepfl/dotty/commit/af2a0e66eb4b1204eac5dcb1d979486b92ef93d7#diff-156dc405d9f228bbc0fe406dfba63f65): + +```scala +opaque type IArray[T] = Array[T] + +object IArray { + + implied arrayOps { + inline def (arr: IArray[T]) apply[T] (n: Int): T = (arr: Array[T]).apply(n) + inline def (arr: IArray[T]) length[T] : Int = (arr: Array[T]).length + } + def apply[T: ClassTag](xs: T*): IArray[T] = Array(xs: _*) + /*...*/ +} +``` + +Essentially, the above defines a wrapper around the ordinary `Array` which exposes only its `apply` (to get an element by index) and `length` methods: + +- `opaque type IArray[T]` defines a type which is known to be an `Array`, but this information is known only in its companion object `IArray`. To the rest of the world, this information is not available. +- The `implied arrayOps` implied instance defines the extension methods that expose the operations available on the `IArray` type. +- The extension methods, `apply` and `length`, delegate to these of the `Array` type. These methods are inlined which means the performance footprint is the same as that of the original `Array` methods. +- Because the methods are `inline` and because the `IArray` type is `opaque`, we need the `(arr: Array[T])` casts. `IArray` is known to be equal to `Array` only inside the `IArray` object and the inlining will cause the extension methods' bodies to appear outside the `IArray` object when these methods are called. + +## Creator Applications +This new language feature is a generalisation of the ability to construct case classes without the `new` keyword: + +```scala +class StringBuilder(s: String) { + def this() = this("") +} +StringBuilder("abc") // same as new StringBuilder("abc") +StringBuilder() // same as new StringBuilder() +``` + +The motivation for the feature is mainly ergonomic. To make it possible, a new interpretation was added to a function call `f(a)`. Previously, the rules were as follows: + +Given a function call `f(args)`, + + - if `f` is a method applicable to `args`, typecheck `f(args)` unchanged, + - otherwise, if `f` has an `apply` method applicable to `args` as a member, continue with `f.apply(args)`, + - otherwise, if `f` is of the form `p.m` and there is an implicit conversion `c` applicable to `p` so that `c(p).m` is applicable to `args`, continue with `c(p).m(args)` + + There's now a fourth rule following these rules: + + - otherwise, if `f` is syntactically a stable identifier, and `new f` where `f` is interpreted as a type identifier is applicable to `args`, continue with `new f(args)`. + +For more information, please see the [documentation](https://dotty.epfl.ch/docs/reference/other-new-features/creator-applications.html). + +## Other changes + +Some of the other changes include: + +- `infer` method renamed to `the`, the semantics of which is now the same as that of the `the` method of Shapeless. Namely, the implicits are resolved more precisely – see this [gist](https://gist.github.com/milessabin/8833a1dbf7e8245b30f8) for an example in Shapeless, and the Dotty [documentation](https://dotty.epfl.ch/docs/reference/contextual/given-clauses.html#querying-implied-instances) for more details. +- The syntax of quoting and splicing was changed. Now the quoting is expressed via `'{ ... }` and `'[...]` and splicing – via `${...}` and `$id`. Please see the [documentation](https://dotty.epfl.ch/docs/reference/metaprogramming/macros.html) for more details on these features. + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.13.0-RC1..0.14.0-RC1` these are: + +``` + 214 Martin Odersky + 151 Nicolas Stucki + 71 Liu Fengyun + 53 Guillaume Martres + 26 Olivier Blanvillain + 10 Aleksander Boruch-Gruszecki + 9 Aggelos Biboudis + 6 Miles Sabin + 4 Allan Renucci + 4 Dale Wijnand + 3 Anatolii Kmetiuk + 2 Fengyun Liu + 2 Alex Zolotko + 1 gnp + 1 tim-zh + 1 Dmitry Petrashko + 1 Dotty CI + 1 Jasper Moeys + 1 Jentsch + 1 Jim Van Horn + 1 Lionel Parreaux + 1 Master-Killer + 1 Olivier ROLAND + 1 Robert Stoll + 1 Seth Tisue + 1 Tomasz Godzik + 1 Victor +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2019-05-23-15th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2019-05-23-15th-dotty-milestone-release.md new file mode 100644 index 000000000000..68337d78ca8c --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2019-05-23-15th-dotty-milestone-release.md @@ -0,0 +1,235 @@ +--- +layout: blog-page +title: Announcing Dotty 0.15.0-RC1 – the fully bootstrapped compiler +author: Anatolii Kmetiuk +authorImg: images/anatolii.png +date: 2019-05-23 +--- + +Hi! We are very excited to announce the 15th release of Dotty. The most exciting thing in this release is the full bootstrap for Dotty introduced by PR [#5923](https://github.com/lampepfl/dotty/pull/5923)🎉😍. This means that we now always compile Dotty with Dotty itself, hence we can use use all the new features in the compiler code base. + +With this release comes a bunch of new features and improvements, such as the ability to enforce whether an operator is intended to be used in an infix position, the type safe pattern bindings and more. + +This release serves as a technology preview that demonstrates new +language features and the compiler supporting them. + +Dotty is the project name for technologies that are being considered for +inclusion in Scala 3. Scala has pioneered the fusion of object-oriented and +functional programming in a typed setting. Scala 3 will be a big step towards +realising the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviours, +- build on strong foundations to ensure the design hangs together well, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and + performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +This is our 15th scheduled release according to our +[6-week release schedule](https://dotty.epfl.ch/docs/contributing/procedures/release.html). + +# What’s new in the 0.15.0-RC1 technology preview? +## Full Bootstrap +Bootstrapping Dotty is a big milestone for us and in compiler construction in general. Firstly, we feel more confident that our compiler works as is (even without reusing the new features). Secondly, in the immediate future, we will be able to reuse many of the features that dotty proposes within dotty itself. For example, we have no fewer than 2641 occurrences of the text string (implicit ctx: Context) in the compiler that we can scrap with [Contextual Function types](https://www.scala-lang.org/blog/2016/12/07/implicit-function-types.html). Big milestones have high risk/high gain and we must be attentive. That is the reason that we will wait a bit until we start using new features. Consequently, at the moment we cross-compile the build with 2.12 on the CI so that we don't accidentally start using Dotty features in case we need to revise the bootstrap process (we'll start using Dotty features eventually, but let's wait until we're confident that this setup works well enough). + +Check the following for more information [#5923 (comment)](https://github.com/lampepfl/dotty/pull/5923#issuecomment-485421148) and please let us know if you have any incremental compilation issues or anything else! + +## Operator Rules +This change addresses the problem of the regulation of whether an operator is supposed to be used in an infix position. The motivation is for the library authors to be able to enforce whether a method or a type is supposed to be used in an infix position by the users. This ability will help to make code bases more consistent in the way the calls to methods are performed. + +Methods with symbolic names like `+` are allowed to be used in an infix position by default: + +```scala +scala> case class Foo(x: Int) { def +(other: Foo) = x + other.x } +// defined case class Foo + +scala> Foo(1) + Foo(2) +val res0: Int = 3 +``` + +Methods with alphanumeric names are not allowed to be used in an infix position. Breaking this constraint will raise a deprecation warning: + +```scala +scala> case class Foo(x: Int) { def plus(other: Foo) = x + other.x } +// defined case class Foo + +scala> Foo(1) plus Foo(2) +1 |Foo(1) plus Foo(2) + | ^^^^ + |Alphanumeric method plus is not declared @infix; it should not be used as infix operator. + |The operation can be rewritten automatically to `plus` under -deprecation -rewrite. + |Or rewrite to method syntax .plus(...) manually. +val res1: Int = 3 + +scala> Foo(1).plus(Foo(2)) +val res2: Int = 3 +``` + +As the warning says, if you want the users of the method to be able to use it in an infix position, you can do so as follows: + +```scala +scala> import scala.annotation.infix + +scala> case class Foo(x: Int) { @infix def plus(other: Foo) = x + other.x } +// defined case class Foo + +scala> Foo(1) plus Foo(2) +val res3: Int = 3 +``` + +To smoothen the migration, the deprecation warnings will only be emitted if you compile with the `-strict` flag under Scala 3. Alphanumeric methods that are defined without the `@infix` annotation used in an infix position will be deprecated by default starting with Scala 3.1. + +For more information, see the [documentation](https://dotty.epfl.ch/docs/reference/changed-features/operators.html#the-infix-annotation). Note that the `@alpha` annotation also described in the documentation is planned for the future and is not available in this release. + +## `given` clause comes last +In the previous release, you could write something like this: + +```scala +implied for String = "foo" +def f(x: Int) given (y: String) (z: Int) = x + z +f(1)(3) +``` + +Now, however, `given` clauses must come last. The above code will fail with: + +``` +-- Error: ../issues/Playground.scala:3:34 -------------------------------------- +3 | def f(x: Int) given (y: String) (z: Int) = x + z + | ^ + | normal parameters cannot come after `given' clauses +one error found +``` + +The following snippet is the correct way to express the program in question: + +```scala +implied for String = "foo" +def f(x: Int)(z: Int) given (y: String) = x + z +f(1)(3) +``` + +We changed this to reduce confusion when calling functions with mixed explicit and implied parameters. + +## Type-safe Pattern Bindings +```scala +val xs: List[Any] = List(1, 2, 3) +val (x: String) :: _ = xs // error: pattern's type String is more specialized + // than the right hand side expression's type Any +``` + +The above code will fail with a compile-time error in Scala 3.1 and in Scala 3 with the `-strict` flag. In contrast, in Scala 2, the above would have compiled fine but failed on runtime with an exception. + +Dotty compiler will allow such a pattern binding only if the pattern is *irrefutable* – that is, if the right-hand side conforms to the pattern's type. E.g. the following is OK: + +```scala +val pair = (1, true) +val (x, y) = pair +``` + +If we want to force the pattern binding if the pattern is not irrefutable, we can do so with an annotation: + +```scala +val xs: List[Any] = List("1", "2", "3") +val (x: String) :: _: @unchecked = xs +``` + +The same is implemented for pattern bindings in `for` expressions: + +```scala + val elems: List[Any] = List((1, 2), "hello", (3, 4)) + for ((x, y) <- elems) yield (y, x) // error: pattern's type (Any, Any) is more specialized + // than the right hand side expression's type Any +``` + +For the migration purposes, the above change will only take effect in Scala 3.1. You can use it in Scala 3 with the `-strict` flag. + +For more information, see the [documentation](https://dotty.epfl.ch/docs/reference/changed-features/pattern-bindings.html). + +## Further improvements to Generalised Algebraic Data Types (GADTs) support +In this release, we've further improved our support for GADTs. Most notably, we now support variant GADTs, thus fixing [#2985](https://github.com/lampepfl/dotty/issues/2985): + +```scala +enum Expr[+T] { + case StrLit(s: String) extends Expr[String] + case Pair[A, B](a: Expr[A], b: Expr[B]) extends Expr[(A, B)] +} + +def eval[T](e: Expr[T]): T = e match { + case Expr.StrLit(s) => s + case Expr.Pair(a, b) => (eval(a), eval(b)) +} +``` + +We've also plugged a few soundness problems (e.g. [#5667](https://github.com/lampepfl/dotty/issues/5667)) caused by inferring too much when matching on abstract, union and intersection types. For more information, see PR [#5736](https://github.com/lampepfl/dotty/pull/5736). + +## Other changes +Some of the other notable changes include the following: + +- Singletons are now allowed in union types. E.g. the following is allowed: `object foo; type X = Int | foo.type`. +- A bunch of improvements was made for the type inference system – see, e.g., PRs [#6454](https://github.com/lampepfl/dotty/pull/6454) and [#6467](https://github.com/lampepfl/dotty/pull/6467). +- Improvements to the Scala 2 code support which, in particular, improves Cats support – see PRs [#6494](https://github.com/lampepfl/dotty/pull/6494) and [#6498](https://github.com/lampepfl/dotty/pull/6498). + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.14.0-RC1..0.15.0-RC1` these are: + +``` + 191 Martin Odersky + 112 Nicolas Stucki + 29 Guillaume Martres + 25 Olivier Blanvillain + 21 Aleksander Boruch-Gruszecki + 17 Anatolii Kmetiuk + 10 Miles Sabin + 9 Liu Fengyun + 8 Aggelos Biboudis + 8 Jentsch + 5 Sébastien Doeraene + 2 Anatolii + 1 Fengyun Liu + 1 Olivier ROLAND + 1 phderome +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin +[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2019-06-11-16th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2019-06-11-16th-dotty-milestone-release.md new file mode 100644 index 000000000000..e3d81f6c3df5 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2019-06-11-16th-dotty-milestone-release.md @@ -0,0 +1,275 @@ +--- +layout: blog-page +title: Announcing Dotty 0.16.0-RC3 – the Scala Days 2019 Release +author: Aggelos Biboudis and Anatolii Kmetiuk +date: 2019-06-11 +--- + +Hello again! Today, we are excited to announce the 16th release of Dotty. The +development of Dotty continues according to our schedule but today, Tuesday June +the 11th, we are electrified as it is the first day of [Scala Days 2019](https://scaladays.org/) +which marks the *10th* anniversary of Scala Days. +With this release we are getting closer to the _envelope_ of the new features +that Dotty plans to offer. + +![]({{ site.baseurl }}/images/others/scala-days-logo.png "Scala Days 2019") + +This release serves as a technology preview that demonstrates new +language features and the compiler supporting them. + +Dotty is the project name for technologies that are being considered for +inclusion in Scala 3. Scala has pioneered the fusion of object-oriented and +functional programming in a typed setting. Scala 3 will be a big step towards +realising the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviours, +- build on strong foundations to ensure the design hangs together well, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and + performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +This is our 16th scheduled release according to our +[6-week release schedule](https://dotty.epfl.ch/docs/contributing/procedures/release.html). + +# What’s new in the 0.16.0-RC3 technology preview? + +## Syntax Change: Type Lambdas + +We reconsider the syntax of type lambdas in an effort to provide an improved +visual cue for two categories of types: types that relate to normal function +types and types that operate on a higher level. The _fat_ arrow `=>` definitely +relates to the first, while we reserve now `->` to mean _pure function_ in the +future. As a result, we disengage `=>` from type lambdas, which are now +represented by `=>>`. As a result a function from types to types is written as +`[X] =>> F[X]`. + +For those who are interested in the discussions, +[#6558](https://github.com/lampepfl/dotty/pull/6558) introduced the new syntax. + +## Syntax Change: Wildcard Arguments in Types + +The syntax of wildcard arguments in types has changed from `_` to `?`. Example: + +```scala +List[?] +Map[? <: AnyRef, ? >: Null] +``` + +Again, in an effort to fine-tune our syntax we put two features, from the world +of terms and types, side-by-side and drew parallels at the syntactic level. +Consequently, as `f(_)` is a shorthand for the lambda `x => f(x)` and as we plan +ahead for making `C[_]` to be a shorthand for the type lambda `[X] =>> C[X]` in +the future we pick `?` as a replacement syntax for wildcard types, since it +aligns with Java's syntax. + +For more information please read our documentation on +[Wildcards](https://dotty.epfl.ch/docs/reference/changed-features/wildcards.html). + +## Syntax Change: Contextual Abstractions + +We reconsider the syntax for contextual abstractions introducing `delegates` +(formerly known as `implied`). `delegate`, in the context of contextual +abstraction means that we declare a _representative of a type_. We use +`delegate` as a noun. Note that this change is solely syntactical/grammatical +and its motivation is to give a clearer meaning to those _canonical_ values of +certain types (like `Ord[Int]`), that serve for synthesizing arguments to +`given` clauses. + +```scala +delegate IntOrd for Ord[Int] { + def compare(x: Int, y: Int) = + if (x < y) -1 else if (x > y) +1 else 0 +} +``` + +```scala +delegate ListOrd[T] for Ord[List[T]] given (ord: Ord[T]) { +``` + +For more information, the documentation has been updated as part of the relevant +PR [#6649](https://github.com/lampepfl/dotty/pull/6649) + +## Polymorphic function types + +We add preliminary support for _polymorphic function types_. Nowadays, when we +want to write a universally quantified function over elements of lists of type +`T` we write e.g., `List[T] => List[(T, T)]` where `T` is bound at an enclosing +definition. With polymorphic function types (PFT hereafter) we can quantify the +parametric type locally. For example: + +```scala +[T <: AnyVal] => List[T] => List[(T, T)] +``` + +As you notice, this gives us the ability to impose restrictions on the type +variable `T` locally. Assume, you have an identity function with `type id = T => T`. +By writing it as `type id = [T] => T => T` we abstract further the concept +of a _polymorphic function_ and make it a *true* _family of functions_. + +The code below (correctly) fails to type check because `T` needs to be bounded +in the enclosing class: + +```scala + val id: T => T = t => t + println(s"${id(1)} , ${id(7.0d)}") +``` + +With PFTs we can now achieve what we want: + +```scala + val id = [T] => (t: T) => t + println(s"${id(1)} , ${id(7.0d)}") +``` + +For those who are interested in the discussions and more test cases, +[#4672](https://github.com/lampepfl/dotty/pull/4672/) introduced PFTs. + +## `lazy val`s are now thread-safe by default + +Previously thread-safety was required using `@volatile` but that would not be +consistent with Scala 2. The old behavior of non-volatile lazy vals can be +recovered by using the newly-introduced `@threadUnsafe`. + +For more information please read our documentation on the +[threadUnsafe annotation](https://dotty.epfl.ch/docs/reference/other-new-features/threadUnsafe-annotation.html). + +## Add support for Java-compatible enums + +We add support for Java-compatible enumerations. The users can just extend +`java.lang.Enum[T]`. + +```scala +enum A extends java.lang.Enum[A] { + case MONDAY, TUESDAY, SATURDAY +} + +enum B(val gravity: Double) extends java.lang.Enum[B] { + case EARTH extends B(9.8) + case JUPITER extends B(100) + case MOON extends B(4.3) + case Foo extends B(10) +} +``` + +For more information please check the [test case](https://github.com/lampepfl/dotty/tree/master/tests/run/enum-java) and +also the relevant PRs [#6602](https://github.com/lampepfl/dotty/pull/6602) and +[#6629](https://github.com/lampepfl/dotty/pull/6629). + +In the test, the enums are defined in the `MainScala.scala` file and used from a +Java source, `Test.java`. + +## Introducing `for` clauses for importing delegate imports by type + +Since delegate instances can be anonymous it is not always practical to import +them by their name, and wildcard imports are typically used instead. By-type +imports provide a more specific alternative to wildcard imports, which makes it +clearer what is imported. Example: + + ```scala +import delegate A.{for TC} +``` + +This imports any delegate instance in `A` that has a type which conforms tp `TC`. +There can be several bounding types following a `for` and bounding types can +contain wildcards. +For instance, assuming the object + +```scala +object Delegates { + delegate intOrd for Ordering[Int] + delegate [T: Ordering] listOrd for Ordering[List[T]] + delegate ec for ExecutionContext = ... + delegate im for Monoid[Int] +} +``` +the import +```scala +import delegate Delegates.{for Ordering[_], ExecutionContext} +``` +would import the `intOrd`, `listOrd`, and `ec` instances but leave out the `im` +instance, since it fits none of the specified bounds. + +## New type class derivation scheme + +Summary of measured differences with the old scheme: + +- About 100 lines more compiler code - the rest of the lines changed diff is +tests. +- About 13-15% more code generated for type class instances +- About 3-4% slower to compile type class instances + +Advantages of new scheme: + +- Fewer allocations, since mirrors (`Generic` has been renamed to `Mirror`) are + usually shared instead of being allocated at runtime. +- It works well even if there are no derives clauses. The old scheme would + generate more code in that case. +- Complete decoupling between derives clauses and mirror generation. + +For the technical details of these changes please consule the corresponding PR +[#6531](https://github.com/lampepfl/dotty/pull/6531). + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.15.0-RC1..0.16.0-RC3` these are: + +``` +88 Martin Odersky +51 Anatolii +48 Nicolas Stucki +26 Guillaume Martres +21 Miles Sabin +19 Liu Fengyun +12 Aleksander Boruch-Gruszecki +11 Sébastien Doeraene + 8 Aggelos Biboudis + 4 Olivier Blanvillain + 3 Eugene Yokota + 1 Dale Wijnand + 1 Allan Renucci + 1 Olivier ROLAND +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin +[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2019-07-25-17th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2019-07-25-17th-dotty-milestone-release.md new file mode 100644 index 000000000000..eea99263def9 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2019-07-25-17th-dotty-milestone-release.md @@ -0,0 +1,143 @@ +--- +layout: blog-page +title: Announcing Dotty 0.17.0-RC1 – new implicit scoping rules and more +author: Anatolii Kmetiuk +date: 2019-07-25 +--- + +Greetings! With this post, we are proud to announce the 17th release of Dotty. With this release, we are making steady progress on the metaprogramming capabilities of Scala 3. Also, implicit scoping rules have seen a rework and a bunch of organizational changes took place. + +This release serves as a technology preview that demonstrates new +language features and the compiler supporting them. + +Dotty is the project name for technologies that are being considered for +inclusion in Scala 3. Scala has pioneered the fusion of object-oriented and +functional programming in a typed setting. Scala 3 will be a big step towards +realising the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviours, +- build on strong foundations to ensure the design hangs together well, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and + performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +This is our 17th scheduled release according to our +[6-week release schedule](https://dotty.epfl.ch/docs/contributing/procedures/release.html). + +# What’s new in the 0.17.0-RC1 technology preview? +## New implicit scoping rules +We aim to make the implicit scoping rules clean and intuitive. In this release, the scoping rules were refactored to facilitate this goal. As specified in the [code documentation](https://github.com/lampepfl/dotty/pull/6832/files#diff-584b631c45ba6f2d4bc5d803074b8f12R474): + +The implicit scope of a type `tp` is the smallest set S of object references (i.e. TermRefs +with Module symbol) such that: + +- If `tp` is a class reference, S contains a reference to the companion object of the class, + if it exists, as well as the implicit scopes of all of `tp`'s parent class references. +- If `tp` is an opaque type alias `p.A` of type `tp'`, S contains a reference to an object `A` defined in the + same scope as the opaque type, if it exists, as well as the implicit scope of `tp'`. +- If `tp` is a reference `p.T` to a class or opaque type alias, S also contains all object references + on the prefix path `p`. Under Scala-2 mode, package objects of package references on `p` also + count towards the implicit scope. +- If `tp` is a (non-opaque) alias of `tp'`, S contains the implicit scope of `tp'`. +- If `tp` is a singleton type, S contains the implicit scope of its underlying type. +- If `tp` is some other type, its implicit scope is the union of the implicit scopes of + its parts (parts defined as in the spec). + +You can learn more from PR [#6832](https://github.com/lampepfl/dotty/pull/6832). + +## Metaprogramming +We are making steady progress developing metaprogramming features. The highlights for this release are: + +- Tasty Reflection's `Reflection` object moved inside `QuoteContext` object. This means that if previously to do Tasty Reflection you had to implicitly depend on `Reflection`, now you need to depend on `QuoteContext`. To know more, see [#6723](https://github.com/lampepfl/dotty/pull/6723). +- Progress made on quoted patterns – see [#6504](https://github.com/lampepfl/dotty/pull/6504). +- `code` string interpolator allows to obtain the code a user passes to a macro as a String. See [#6661](https://github.com/lampepfl/dotty/pull/6661). To enable this feature, do the following import: `import scala.compiletime._`. + +## 2.12 build removed from the CI tests +2.12 build is removed from the test suite. The 2.12 build compiled and tested the Dotty compiler with the Scala 2.12 compiler. This means that, even though Dotty is bootstrapped (i.e. capable of compiling itself), we were not able to use any of the new Dotty features in the Dotty codebase since these features would not compile with Scala 2.12. The decision to abstain from using the new features was made to give us the time to see if something goes wrong with the bootstrap and the ability to revert to Scala 2.12 if it becomes necessary. + +The removal of 2.12 build marks the stage in Dotty's life when we start to actively use new Dotty features in our code base, making it incompatible with Scala 2. + +## Other changes +There were some organizational and infrastructural changes worth mentioning. + +- [Shapeless 3](https://github.com/milessabin/shapeless/tree/shapeless-3) was added to the community build. This means that all the new Dotty features are now tested for the ability to compile Shapeless 3 with them. +- The process of submitting issues to the Dotty issue tracker is standardized using a GitHub issue template. We have separate templates for bugs, compiler crashes and language feature requests. The latter are now not allowed in the main Dotty repository, and the template for feature requests redirects users to a separate repo meant solely for such requests. +- Dotty Knowledge Collection initiative. To improve the documentation of the compiler, we came up with an idea of a separate repo where we will log the raw, unrefined knowledge about the compiler internals. This e.g. can be a quick catch we learnt while working that we believe is worth saving somewhere. To read more about the idea, see the [dotty-knowledge](https://github.com/lampepfl/dotty-knowledge) repo's README. +- `f`-interpolator was implemented as a macro – see the [Scala 2 documentation](https://docs.scala-lang.org/overviews/core/string-interpolation.html#the-f-interpolator) to learn more about what it is. + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.16.0-RC3..0.17.0-RC1` these are: + +``` + 119 Martin Odersky + 103 Nicolas Stucki + 38 Liu Fengyun + 33 Guillaume Martres + 33 Sara Alemanno + 17 Anatolii + 10 Aggelos Biboudis + 6 Miles Sabin + 5 Anatolii Kmetiuk + 4 Olivier Blanvillain + 4 Robert Stoll + 3 odersky + 2 Dale Wijnand + 2 Timothée Floure + 2 Rodrigo Fernandes + 2 James Thompson + 2 Steven Heidel + 1 Stéphane MICHELOUD + 1 bishabosha + 1 noti0na1 + 1 Daniel Reigada + 1 Haemin Yoo + 1 Bunyod + 1 Deon Taljaard + 1 Ondra Pelech + 1 Jon Pretty +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin +[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2019-08-30-18th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2019-08-30-18th-dotty-milestone-release.md new file mode 100644 index 000000000000..103aee123f70 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2019-08-30-18th-dotty-milestone-release.md @@ -0,0 +1,340 @@ +--- +layout: blog-page +title: Announcing Dotty 0.18.1-RC1 – switch to the 2.13 standard library, indentation-based syntax and other experiments +author: Anatolii Kmetiuk +authorImg: images/anatolii.png +date: 2019-08-30 +--- + +Greetings! With this post, we are proud to announce the 18th release of Dotty. With this release, we have switched to the 2.13 standard library (which is why the patch version of Dotty is now `1`)🎉. We are also conducting more experiments with the language syntax which will hopefully result in a better, cleaner way to write Scala programs. + +This release serves as a technology preview that demonstrates new +language features and the compiler supporting them. + +Dotty is the project name for technologies that are being considered for +inclusion in Scala 3. Scala has pioneered the fusion of object-oriented and +functional programming in a typed setting. Scala 3 will be a big step towards +realising the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviours, +- build on strong foundations to ensure the design hangs together well, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and + performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +This is our 18th scheduled release according to our +[6-week release schedule](https://dotty.epfl.ch/docs/contributing/procedures/release.html). + +# What’s new in the 0.18.1-RC1 technology preview? +The hottest change of this release is a series of experiments with the language syntax. Some of them are controversial, some of them are almost unanimously considered useful. Regardless, the underlying motivation for all of them is something all of us want, we believe d: to make programming in Scala easier to write and read, drop unnecessary boilerplate, facilitate idiomatic programming. + +We are thrilled to have this unique opportunity to experiment while Scala 3 is still in its inception and malleable. This is the only time we can try out significant language changes, and we are determined to make the most out of it. + +Our view on these trials is that, like with any big change, we need time to see if these are good ideas. We don't know if they will work or not. We believe that the only way to find out is to play with them for some time. + +Some of these changes will end up in Scala 3, some of them will be deemed not worth it. One way or another, trying the new look of an old language is an educational and fun experience. + +Keeping that in mind, let us proceed to the nitty-gritty! + +## Switch to Standard Library 2.13 +Dotty is now using the standard library 2.13 instead of the previous 2.12.8. + +## `@main` functions +Bootstrapping a new Scala application is as hard as a new Java application. How do you write a main method? Normally, something like that: + +```scala +object Test { + def main(args: Array[String]): Unit = println(s"Hello World") +} +``` + +You need to define at least two things that serve no real purpose: an object and `(args: Array[String])`. + +Not anymore! Meet the `@main` functions: + + +```scala +@main def test: Unit = println(s"Hello World") +``` + +The above generates the following code at the top-level of the compilation unit (source file): + +```scala +final class test { + def main(args: Array[String]): Unit = + try Main$package.test() catch + { + case + error @ _:scala.util.CommandLineParser.CommandLineParser$ParseError + => scala.util.CommandLineParser.showError(error) + } +} +``` + +So, a `final class` is generated with the same name as the `@main` method and the `def main(args: Array[String])` inside. The body of this method calls the original `test()` method. Since it is a top-level definition, it resides in the synthetic `Main$package` object generated for the `Main.scala` source being compiled. + +An astute reader has probably noticed the mentions of things like `CommandLineParser` in the body of the generated method, which hints to certain features. That's right, we support a basic ability for command-line args parsing: + +```scala +@main def sayHello(name: String, age: Int): Unit = + println(s"Hello $name, you are $age years old") +``` + +If you run the above with command line arguments "Jack 25", the output will be "Hello Jack, you are 25 years old". And here is how you can define a custom parser for your own class: + +```scala +case class Address(city: String, street: String) + +given scala.util.FromString[Address] { + /** Can throw java.lang.IllegalArgumentException */ + def fromString(s: String): T = + s.split(",").toList match { + case city :: street :: Nil => Address(city, street) + case _ => throw new IllegalArgumentException(s"Please specify address in the format 'city, street'") + } +} + +@main def sayHello(addr: Address): Unit = + println(s"You are living at $addr") +``` + +The motivation for the `@main` functions is to make Scala scripting friendly. So far we do not plan to support something more complex than the above – we believe if a user needs a complex command line parsing capability, they can always fall back to the conventional `def main(args: Array[String])` syntax plus a dedicated library like [scopt](https://github.com/scopt/scopt). The changes described above, however, are already enough to make script development much less tedious than before. + +To learn more, see the [documentation](https://dotty.epfl.ch/docs/reference/changed-features/main-functions.html). + +## Allow infix operators at the start of the line +A small change yet relevant to many. Now, you can write the following code: + +```scala +def isABorC(x: Char) = x == 'A' + || x == 'B' + || x == 'C' +``` + +Prior to this change, it was only possible to express infix operators at the beginning of the line as follows: + +```scala +def isABorC(x: Char) = (x == 'A' + || x == 'B' + || x == 'C') +``` + +## Drop do-while syntax +Remember that obscure `do-while` feature of Scala 2 where you could write: + +```scala +scala> var x = 0 +x: Int = 0 + +scala> val iterator = Iterator.from(10, -1) +iterator: Iterator[Int] = + +scala> do { + | x = iterator.next() + | println(x) + | } while (x > 0) +10 +9 +8 +7 +6 +5 +4 +3 +2 +1 +0 +``` + +Well, it is no more! That is the only place where the `do` token is used in Scala, the feature itself is rarely employed, and it would be nice to reclaim the `do` token for other uses (described in details in the section on the new syntax for control expressions). + +The language does not lose its expressiveness though – you can still write the following to achieve the same functionality: + +```scala +val iterator = Iterator.from(10, -1) + +@main def test = { + var x: Int = 0 + while ({ + x = iterator.next + println(x) + x > 0 + }) () +} +``` + +For more information, see PR [#6994](https://github.com/lampepfl/dotty/pull/6994). + +## Brace-less syntax for control expressions +This is an effort to clean-up the control expressions. Scala 2 has two ways of writing `if` statements – with and without parentheses. Parentheses can be dropped in Scala 2 `if`s inside `match` or `for` statements. We'd like to have a single style of writing all of the control expressions, and the cleaner the better. + +This release, hence, brings the ability to write all of the control expressions without braces. E.g.: + +```scala +@main def testFor = { + val xs = 0 to 10 + val xsFiltered = for x <- xs if x > 1 yield x + for + x <- xsFiltered + y <- xsFiltered + do println(s"$x * $y = ${x * y}") +} + +@main def testIf(day: String) = { + if day == "Sunday" || day == "Saturday" then println("Today is a weekend, hooray!") + else println(s"Today is a workday.") +} + +@main def testWhile(bound: Int) = { + var x = 0 + def incrementX() = { + x += 1 + println(x) + } + while x <= bound do incrementX() +} +``` + +Moreover, the compiler can automatically rewrite your sources from the old syntax to the new syntax and vice versa. To rewrite the sources to the new syntax, run the compiler with the `-rewrite -new-syntax` flags, and to rewrite to the old syntax, use `-rewrite -old-syntax`. So far, both syntaxes are supported. + +For more information and the precise rules, see PR [#7024](https://github.com/lampepfl/dotty/pull/7024). + +## Significant indentation syntax +Significant indentations syntax is here! A logical continuation of the brace-less syntax for control expressions described above, meant as an exploration into a better way to write Scala, it allows writing Scala programs without braces. For example: + +```scala +enum Day: + case Monday, Tuesday, Wednesdey, Thursday, Friday, Saturday, Sunday + def isWeekend: Boolean = this match + case Saturday | Sunday => true + case _ => false + +given as scala.util.FromString[Day]: + def fromString(str: String): Day = + try Day.valueOf(str) + catch + case _: IllegalArgumentException => + throw new IllegalArgumentException(s"$str is not a valid day") + +@main def test(day: Day) = + if day.isWeekend then + println("Today is a weekend") + println("I will rest") + else + println("Today is a workday") + println("I will work") +``` + +So far, it is a purely experimental effort. This means there is no final decision yet on whether or not it will be included in Scala 3. However, we treat this feature seriously enough to give it an extended period of trial and see if it is viable as the new look and feel for Scala. + +For more details and the discussion, see PRs [#7083](https://github.com/lampepfl/dotty/pull/7083) and [#7114](https://github.com/lampepfl/dotty/pull/7114). + +## Generic Number Literals +It is now possible to seamlessly integrate with different number formats: that is, to write a number and get it automatically converted to your class of choice. E.g.: + +```scala +import scala.util.FromDigits + +case class Digits(ds: List[Char]) + +given as FromDigits[Digits] = (digits: String) => Digits(digits.toList) + +@main def test = + val x: Digits = 1234 + println(x) // Digits(List('1', '2', '3', '4')) +``` + +If a number is written in place where a non-numeric type is expected and there is an `FromDigits` given in scope, this given will be used to convert the number (presented as `String`) to that type. + +For precise rules, semantics and a larger example of `BigFloat`, see [the documentation](https://dotty.epfl.ch/docs/reference/changed-features/numeric-literals.html). + +## Metaprogramming Progress +We are making steady progress with the language metaprogramming features. The metaprogramming spotlights of this release are as follows: + +- `toExprOfTuple` method which allows converting a `Seq[Expr[Any]]` to `Expr[Tuple]`. The types of the expressions will be preserved in the tuple. See [#7037](https://github.com/lampepfl/dotty/pull/7037) and [#7076](https://github.com/lampepfl/dotty/pull/7076) for the details. +- `toExprOfTuple` method that converts a tuple of expressions to an expression of tuple – see [#7047](https://github.com/lampepfl/dotty/pull/7047). +- `toExprOfSeq` which converts an `Seq[Expr[A]]` to `Expr[Seq[A]]` – see [#6935](https://github.com/lampepfl/dotty/pull/6935). +- More `Liftable` instances – for Tuples of arity greater than 22, `BigInt` and `BigDecimal` – see [#6947](https://github.com/lampepfl/dotty/pull/6947) and [#6944](https://github.com/lampepfl/dotty/pull/6944). +- Leverage implicit lambdas to simplify `Liftable.toExpr` method – see [#6924](https://github.com/lampepfl/dotty/pull/6924) to learn how it is done. +- Runtime staging `run` moved to `scala.quoted.staging` in [#7077](https://github.com/lampepfl/dotty/pull/7077). +- Runtime staging factored out to a separate library in [#7080](https://github.com/lampepfl/dotty/pull/7080). + +## Type class Derivation +Type class derivation has received a major rework and an [updated documentation](https://dotty.epfl.ch/docs/reference/contextual/derivation.html). We have dropped the usage of the `Shape` type to describe the shape of a type. Instead, all the relevant information is now encoded in the `Mirror` type and its subtypes as tuples. + +For more information, see the [documentation](https://dotty.epfl.ch/docs/reference/contextual/derivation.html). + +## Other +- This release also features the new version of the SBT Dotty Plugin – 0.3.4. It contains some bug fixes – see [#7120](https://github.com/lampepfl/dotty/pull/7120) for details. +- Scala Days 2019 talks related to Dotty are now [mentioned](https://dotty.epfl.ch/docs/resources/talks.html) at our website – this allows to systematize the knowledge about the next generation of Scala in one place – see [#6984](https://github.com/lampepfl/dotty/pull/6984). +- ScalaJS needs your help! We would like to have robust support for ScalaJS in Dotty, which unfortunately is not the case so far. If you are interested in contributing, please see [the getting started tutorial](https://gist.github.com/sjrd/e0823a5bddbcef43999cdaa032b1220c) and [the discussion](https://github.com/lampepfl/dotty/issues/7113). + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.17.0-RC1..0.18.1-RC1` these are: + +``` + 106 Nicolas Stucki + 84 Martin Odersky + 68 Guillaume Martres + 26 Liu Fengyun + 24 Jamie Thompson + 23 Miles Sabin + 16 Anatolii + 8 Sébastien Doeraene + 7 bishabosha + 4 Aggelos Biboudis + 4 Michał Gutowski + 2 odersky + 2 Nikolay + 1 Master-Killer + 1 Ashwin Bhaskar + 1 Carlos Quiroz + 1 = + 1 Olivier Blanvillain + 1 SrTobi +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently, this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin +[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2019-09-23-19th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2019-09-23-19th-dotty-milestone-release.md new file mode 100644 index 000000000000..0de8d87b92bb --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2019-09-23-19th-dotty-milestone-release.md @@ -0,0 +1,236 @@ +--- +layout: blog-page +title: Announcing Dotty 0.19.0-RC1 – further refinements of the syntax and the migration to 2.13.1 standard library +author: Anatolii Kmetiuk +authorImg: images/anatolii.png +date: 2019-09-23 +--- + +Greetings! With this post, we are proud to announce the 19th release of Dotty. This release features further changes to the syntax following the feedback from the community and further discussion. Another important change is the migration to the 2.13.1 standard library. + +This release serves as a technology preview that demonstrates new +language features and the compiler supporting them. + +Dotty is the project name for technologies that are being considered for +inclusion in Scala 3. Scala has pioneered the fusion of object-oriented and +functional programming in a typed setting. Scala 3 will be a big step towards +realising the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviours, +- build on strong foundations to ensure the design hangs together well, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and + performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +# What’s new in the 0.19.0-RC1 technology preview? +## Given syntax reworked +`the` method (a better version of `implicitly` in Scala 3) was renamed to `summon`. + +`given` definitions now closely resemble ordinary definitions: + +```scala +given Int = 10 // Anonymous +given x: String = "foo" // Named +given f(given x: Int): Option[Int] = Some(x * x) // With given parameters +given [T](given opt: Option[T]): List[T] = opt.toList // Anonymous with type parameters + +@main def Test = println(summon[List[Int]]) +``` + +Note that `as` was dropped and `given` must now go inside the parentheses as opposed of being used in an infix style. + +All of the experimental syntax related to givens – such as `delegate for`, `given as`, infix-style `given` – is now dropped. + +## Colons dropped from class or object definitions +Now you can define an object as follows: + +```scala +object Bar + val x = 10 + +@main def Test = println(Bar.x) // 10 +``` + +In `0.18.1-RC1`, you would have needed to put a colon after `Bar`. The colon was also dropped for traits, classes and enums. + +## Allow `given` in pattern bindings +Consider you have the following monadic flow: + +```scala +for + x <- bar + res <- foo(given x) +yield res +``` + +Writing entire programs in a monadic flow is not uncommon in functional programming. When working in this style, a situation may arise as shown above: one statement of the monadic flow implicitly depends on the result of another one. It was impossible to declare a pattern variable as a given, which necessitated passing it around explicitly. Not anymore! Now, you can write the above code as follows: + +```scala +for + given x: Int <- bar // Int, or whatever type you are extracting + res <- foo +yield res +``` + +Note that the type of the given variable must be specified explicitly. + +Full example: + +```scala +def foo(given x: Int): Option[Int] = Some(x * x) +def bar = Some(10) + +@main def Test = + for + given x: Int <- bar + res <- foo + yield println(res) +``` + +This syntax is allowed anywhere where a pattern is allowed. So you can write: + +```scala +user match + case User(_, Some(given email: Email)) => sendEmail +``` + +Full example: + +```scala +opaque type Email = String +object Email + def apply(value: String): Email = value + +def sendEmail(given m: Email): Unit = + println(s"Sent an email to $m") + +case class User(name: String, email: Option[Email]) + +@main def Test = + val user = User("Tom", Some(Email("tom@gmail.com"))) + user match + case User(_, Some(given email: Email)) => sendEmail +``` + +## Replace given matches by a library method +Given matches was a feature that allowed to query the implicit scope and execute different logic based on what was found there. We have replaced this feature with a library method called `summonFrom`. You can use it as follows: + +```scala +import compiletime.summonFrom + +given Int = 10 + +@main inline def Test = summonFrom { + case str: String => println(s"String $str") + case int: Int => println(s"Int $int") // Int 10 +} +``` + +The above code will print "Int 10" since an integer with the value 10 was present in the implicit scope but no String was present. + +Notice that we had to define the `Test` method as `inline` since `summonFrom` can only be used from an inline method. + +## Wildcard types written with `?` +You can now use both `_` and `?` to express wildcard types. For example: + +```scala +@main def Test = + val xs: List[Int] = (1 to 10).toList + xs match + case xss: List[?] => println(s"It is a list") +``` + +This is the first step in a multi-step process to disallow `_` as wildcards so that we can use underscores for both terms and type parameters instead. This will make the language more regular. + +## Lambda parameters must be enclosed in parentheses +Lambda parameters with type ascriptions are now required to be enclosed in parentheses. E.g. `x: Int => x * x` is no longer legal, it must be written as `(x: Int) => x * x`. However, you can still write `x => x * x`, that is, if `x` does not have an explicit type ascription. + +## Dottydoc redesign +The output of [Dottydoc](https://dotty.epfl.ch/docs/usage/dottydoc.html) has been redesigned. It is now fully responsive: every element, including API docs and search, is adapted to both small and big screens. + +The most visible changes are the toolbar and the sidebar. They now have a common darker background, which makes them more readable and helps separating navigation from content. Also, the sidebar is collapsible and has been optimized so that it doesn't glitch when the page loads. + +The toolbar's logo can now be set with the `-project-logo` option. +For instance, `-project-logo dotty-logo.svg` will make `/images/dotty-logo.svg` appear in the toolbar. + +[The front page](https://dotty.epfl.ch) has been redesigned too, with a new responsive menu and improved contrast. + +Overall, every page has been updated with consistent settings of fonts and colors. A more detailed comparison between the new and the old design can be found [here](https://github.com/lampepfl/dotty/pull/7153). + +## Metaprogramming Progress +We're making steady progress on the Dotty metaprogramming capability. In our previous work, we've implemented a bunch of functions for working with expressions. For example, we have a capability to convert a list of expressions into an expression of list, or a tuple of expressions into an expression of tuple. + +In this release, we have collected this family of functions in one place – the companion object `scala.quoted.Expr`. Currently, the following methods are available in that object for working with expressions: + +- nullExpr – an expression of `null`. +- unitExpr – an expression of `unit`. +- block – given a list of statements and a final expression, concatenates them in a block. +- ofSeq, ofList – constructs an expression of collection from a collection of expressions +- ofTuple – constructs an expression of tuple from either a tuple of expressions or a sequence of expressions. + +Also, `x.toExpr` syntax which lifts `x` into an expression is now deprecated. It is replaced with `Expr(x)`. + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.18.1-RC1..0.19.0-RC1` these are: + +``` + 87 Martin Odersky + 50 Nicolas Stucki + 42 Guillaume R + 33 Nikita Eshkeev + 20 Guillaume Martres + 9 Liu Fengyun + 8 Anatolii + 5 Robert Stoll + 3 Miles Sabin + 1 Sam Desborough + 1 Anatolii Kmetiuk + 1 Jon Pretty + 1 Oron Port + 1 Aggelos Biboudis +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently, this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin +[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2019-11-04-20th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2019-11-04-20th-dotty-milestone-release.md new file mode 100644 index 000000000000..78cbe171ca11 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2019-11-04-20th-dotty-milestone-release.md @@ -0,0 +1,203 @@ +--- +layout: blog-page +title: Announcing Dotty 0.20.0-RC1 – `with` starting indentation blocks, inline given specializations and more +author: Anatolii Kmetiuk +authorImg: images/anatolii.png +date: 2019-11-04 +--- + +Greetings! We are excited to announce the 20th release of Dotty. This release brings a bunch of improvements to the language, such as `with` keyword starting an indentation block, normal parameters after given parameters, inline givens specialization and more. + +This release serves as a technology preview that demonstrates new +language features and the compiler supporting them. + +Dotty is the project name for technologies that are being considered for +inclusion in Scala 3. Scala has pioneered the fusion of object-oriented and +functional programming in a typed setting. Scala 3 will be a big step towards +realising the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviours, +- build on strong foundations to ensure the design hangs together well, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and + performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +# What’s new in the 0.20.0-RC1 technology preview? +## Syntax change for type parameters of extension methods +When writing extension methods with type parameters, the type parameters must come first, e.g.: + +```scala +def [T](xs: List[T]) append (ys: List[T]): List[T] = ... +``` + +Previously, the same would have been written as: + +```scala +def (xs: List[T]) append [T] (ys: List[T]): List[T] = ... +``` + +An argument for the old syntax is that it aligns the definition and call syntax. On the other hand, the new syntax maintains the general rule that parameter introductions always come before parameter uses. The decisive argument to switch is to be consistent with the new collective parameter syntax, where `append` would be written like this: + +```scala +given [T](xs: List[T]) + def append (ys: List[T]): List[T] = ... +``` + +To avoid misalignment of type parameters between definition and call syntax, we considered disallowing explicit type parameters for extension methods altogether, and to require that the method is called as a normal method instead. But that would not work for anonymous givens as in the last example above. + +## Infer `private[this]` +We now infer the `private[this]` modifier for variables if all the accesses to a variable are via this. Explicit `private[this]` and `protected[this]` in code are deprecated under the `-strict` flag. + +The main reasons for dropping `private[this]` are: + +- It is syntactically an irregular case. A pair of brackets usually encloses a type, but `this` is a value. +- Its effect over `private` is purely local and can be easily inferred. +- It leads to bike shedding: should I use `private` or `private[this]`? One is shorter but the other might be more efficient. + +`protected[this]` by now influences compiler decisions in no way at all. Hence it is reasonable to drop it. + +## `with` keyword's new role +`with` keyword can now optionally precede the class body. So that you can write your classes as follows: + +```scala +trait A with { + def f: Int +} +class C(x: Int) extends A with { + def f = x +} +type T = A with { + def f: Int +} +``` + +Or, equivalently: + +```scala +trait A with + def f: Int +class C(x: Int) extends A with + def f = x +type T = A with + def f: Int +``` + +The problem this change solves is that it is very easy to accidentally outdent a class member – and it will end up outside the class. The benefit of the new `with` is that starts an indentation block. Since the compiler knows for sure an indentation block must follow, it will emit an error if you forget to indent your statement. + +## Inline `given` specialization +It is now possible to specialize `inline given`s with the help of `<:` as follows: + +```scala +trait A +class B extends A + +inline given tc <: A = B() + +val x: B = summon[A] +``` + +This change brings `given`s even with the ordinary `inline def`s. + +## Normal parameters can follow `given` parameters +Previously normal parameters after `given` parameter was disallowed mainly because they looked awkward with the old syntax. With the syntax being improved, this restriction is now lifted and you can write, e.g., the following program: + +```scala +class C(val x: Int) +def f(x: Int)(given c: C)(y: Int) = x + c.x + y +``` + +## `then` is optional at line end +So the following is now legal: + +```scala +val y1 = + if x > 0 + 1 + else + 2 +``` + +It is easy to forget to put `then` at the end of the line if nothing else follows it, but also easy to infer that it must be inserted there. + +## Metaprogramming Progress +We are making a steady progress developing and improving the metaprogramming features of Dotty. Here are metaprogramming highlights of this release: + +- Fix #7189: Do not try to load contents if file does not exist [#7476](https://github.com/lampepfl/dotty/pull/7476) +- Add customizable names for definitions in quotes [#7346](https://github.com/lampepfl/dotty/pull/7346) +- Rename scala.quoted.matching.{Bind => Sym} [#7332](https://github.com/lampepfl/dotty/pull/7332) +- Replace AsFunction implicit class with Expr.reduce [#7299](https://github.com/lampepfl/dotty/pull/7299) + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.19.0-RC1..0.20.0-RC1` these are: + +``` + 99 Martin Odersky + 64 Nicolas Stucki + 16 Nikita Eshkeev + 15 Guillaume Martres + 9 Robert Stoll + 8 Anatolii + 5 Liu Fengyun + 5 Olivier Blanvillain + 3 Miles Sabin + 2 Aggelos Biboudis + 2 Jamie Thompson + 2 Antoine Brunner + 2 Ben Elliott + 2 Guillaume R + 1 noti0na1 + 1 Ashwin Bhaskar + 1 Batanick + 1 Bojan Dunaj + 1 Harpreet Singh + 1 Lucas + 1 Lucas Jenß + 1 Martijn Hoekstra + 1 bishabosha + 1 brunnerant +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently, this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin +[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2019-12-20-21th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2019-12-20-21th-dotty-milestone-release.md new file mode 100644 index 000000000000..6b5d28c35254 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2019-12-20-21th-dotty-milestone-release.md @@ -0,0 +1,408 @@ +--- +layout: blog-page +title: Announcing Dotty 0.21.0-RC1 - explicit nulls, new syntax for `match` and conditional givens, and more +author: Aggelos Biboudis +authorImg: images/aggelos.jpg +date: 2019-12-20 +--- + +Greetings and we wish you Merry Christmas 🎄! We are excited to announce +0.21.0-RC1 of Dotty. In this version we add support for non-nullable reference +types, supported by flow-sensitive analysis. We present new syntax for given +extensions, matches and pattern matching over quotes. We are also happy to +announce that SemanticDB generation is now supported within the compiler, this +will eventually enable Metals to support Dotty! And last but not least ... we are +happy to announce that we are now feature complete! + +# Feature Complete! + +This release is a HUGE milestone for us, for Dotty, for Scala 3, for our community. Since that +[initial commit](https://github.com/lampepfl/dotty/commit/90962407e72d88f8f3249ade0f6bd60ff15af5ce) +on the 6th December of 2012 when the only feature was the basic structure of a +compiler based on the DOT calculus, we have come a long way. + +7 years and 20k commits later we are happy to announce that we are now _feature-complete_ for Scala 3. +This means that with this release we stop adding new features and we focus on: + +- bug fixing and general quality assurance towards the final release 🐛 +- performance engineering 🏎️ +- documentation improvements 📕 +- education 👨‍🏫 + +Being feature complete does not mean that every detail of Scala 3 is cast in +stone yet. Some details can still change, or be enabled conditionally, or even +be dropped entirely. That will depend on the additional experience we gain over +the next months, in particular the feedback we receive from the community and +the SIP committee. But the envelope of what will (most likely) be in Scala 3 is +now defined and implemented. + +For an overview of the feature envelope that Scala 3 carries you can read our +[Overview](https://dotty.epfl.ch/docs/reference/overview.html) page. +For a more detailed discussion on the transition to Scala 3 you can read the +[Scala 2 roadmap update: The road to Scala 3](https://www.scala-lang.org/2019/12/18/road-to-scala-3.html). + +### Community build + +Being feature complete doesn't mean that development slows down. On the contrary! +It means that we can now put the Scala 3 compiler under heavy load, getting it +ready for industrial strength applications. At the moment we have 23 +projects on our community projects and we expect this number to go up! + +> https://github.com/lampepfl/dotty/tree/master/community-build/community-projects + +This project contains tests to build and test a corpus of open sources Scala 2.x +projects against Scala 3. + +To run the community-build on a local machine from the main Dotty repository, first fetch all the git +submodules with `git submodule update --init` and run `sbt community-build/test` +from the root of the dotty repo. + +For more details also follow the [Migrating the Ecosystem](https://www.scala-lang.org/2019/12/18/road-to-scala-3.html#migrating-the-ecosystem) on the Road to Scala 3 blogpost. + +### New Issues + +Firstly thank you for all the hard work in issue reporting! Being feature complete means that our +issue tracker will now be more important than ever. We encourage you to stress +the compiler and report self-contained test-cases! Bug minimization is hard and +an art form! Help us unearth those nasty bugs! ✊ + +Last but not least we restate the mission of Scala 3. Scala has pioneered the +fusion of object-oriented and functional programming in a typed setting and Scala 3 +will be a big step towards realising the full potential of these ideas. Its main +objectives are to: + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviours, +- build on strong foundations to ensure the design hangs together well, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and + performance. + +You can learn more about Dotty on our [website](https://dotty.epfl.ch). + + + +# What’s new in the 0.21.0-RC1 technology preview? + +## Dotty with explicit nulls and flow typing + +We add support for non-nullable reference types under the compiler option +`-Yexplicit-nulls`. Nullability needs then to be expressed explicitly via unions +(e.g. `String|Null`). + +This means the following code will no longer typecheck: + +```scala +val x: String = null // error: found `Null`, but required `String` +``` + +Instead, to mark a type as nullable we use a type union: + +```scala +val x: String|Null = null // ok +``` + +This change affects two parts of the compiler. Firstly we have a new type +hierarchy for `Null` and a _translation layer_ from Java types to Scala types, +which balances soundness and usability. + +With this release we also introduce a flow-sensitive analysis that refines the +type of an expression based on control-flow. In the example below `s` is +`String|Null`. The `if` branch validates the value of `s` against `Null` so `s` +can be safely considered `String` in that scope. + +```scala +val s: String|Null = ??? + +if (s != null) { + // s: String +} +else { + // s: String|Null +} +``` + +Note, that more complex tests are also supported like: + +```scala +val s: String|Null = ??? +val s2: String|Null = ??? + +if (s != null && s2 != null) // s: String and s2: String +``` + +but also in a short-circuiting manner: + +```scala +val s: String|Null = ??? + +if (s != null && s.length > 0) // s: String in `s.length > 0` +``` + +To support Java Interop under explicit nulls we provide an alias for `Null` +called `UncheckedNull`. The compiler can load Java classes in two ways: from +source or from bytecode. In either case, when a Java class is loaded, we "patch" +the type of its members to reflect that Java types remain implicitly nullable. + +An additional value of `UncheckedNull` (on the Scala side) is that we +effectively support method chaining on Java-returned values. e.g., + +```scala +val s2: String = someJavaMethod().trim().substring(2).toLowerCase() +``` + +as opposed to: + +```scala +val ret = someJavaMethod() +val s2 = if (ret != null) { + val tmp = ret.trim() + if (tmp != null) { + val tmp2 = tmp.substring(2) + if (tmp2 != null) { + tmp2.toLowerCase() + } + } +} +// Additionally, we need to handle the `else` branches. +``` + +This feature is the result of a successful collaboration between LAMP/EPFL, Abel +Nieto, Yaoyu Zhao and Ondřej Lhoták from the University of Waterloo. For more +info refer to the docs on [Explicit Nulls](https://dotty.epfl.ch/docs/reference/other-new-features/explicit-nulls.html). + +## New syntax for given instances defining extension methods + +To make code navigation easier in the case of `given` extension methods we +change the syntax in the following two manners. Hereafter, we write: + +```scala +given listOps: extension [T](xs: List[T]) { ... } + +given extension (s: String) { ... } +``` +or +```scala +given listOps: [T](xs: List[T]) extended with { ... } + +given (s: String) extended with { ... } +``` + +instead of: + +```scala +given listOps: [T](xs: List[T]) { ... } + +given (s: String) { ... } +``` + +After experimenting with both, one will be settled upon. +The rationale is to communicate in a clean way that the parameters go on the +extension method and not the wrapper (e.g., `listOps`) . + +To learn more about extension methods and given instances for extension methods in particular follow the docs on [Given Instances for Extension Methods](https://dotty.epfl.ch/docs/reference/contextual/extension-methods.html#given-instances-for-extension-methods) + +## New syntax for conditional givens + +We are experimenting with a new way to write given instances that are conditionally provided given other instances. + +Using present given syntax, it can seem awkward to define parameterised instances: +```scala +given listOrd[T](given Ord[T]): Ord[List[T]] ... +``` +it's particularly unfortunate for anonymous given instances: +```scala +given [T](given Ord[T]): Ord[List[T]] ... +``` +and worst in the monomorphic case: +```scala +given (given outer: Context): Context = ... +``` + +With the new syntax, the above definitions become +```scala +given listOrd[T]: Ord[T] => Ord[List[T]] ... + +given [T]: Ord[T] => Ord[List[T]] ... + +given (outer: Context) => Context = ... +``` +where the `=>` is read as `implies`, e.g. "a given `Ord[T]` implies a given `Ord[List[T]]`." + +This syntax is intentionally similar to function types. Indeed, multiple given parameter lists are provided as such: +```scala +given [T]: (ctx: Context) => (ctx.Type[T]) => Zero[ctx.Expr[T]] +``` +where the above reads as "a given `(ctx: Context)` and a given `ctx.Type[T]` implies a given `Zero[ctx.Expr[T]]`." + +As a worked example, we define a parameterised given instance for `Show[(A,B)]`: + +```scala +trait Show[-A] with + def (a: A) show: String + +given Show[String] = x => x +given Show[Int] = _.toString + +given [A,B]: (Show[A], Show[B]) => Show[(A,B)] = + (a,b) => s"(${a.show}, ${b.show})" + +@main def ShowPair = + println((1 -> "one").show) // (1, "one") +``` + +After a period of experimentation, either the new or old way will remain. + +## New match syntax + +We introduce an improved treatment of `match`. We reintroduce `match` as an +alphanumeric, left-associative, infix operator that can support chained matches: + +```scala +xs match { + case Nil => "empty" + case x :: xs1 => "nonempty" +} match { + case "empty" => 0 + case "nonempty" => 1 +} +``` + +By using the new treatment we can now offer `match` as a method: + +```scala +xs.match { + case Nil => false + case _ => true +} +``` + +You can read more in our docs [Match Expressions](https://dotty.epfl.ch/docs/reference/changed-features/match-syntax.html) and on the interesting discussions in [contributors](https://contributors.scala-lang.org/t/pre-sip-demote-match-keyword-to-a-method/2137/2). + +## Metaprogramming: New quoted pattern matching + +We introduce a high-level API to deconstruct or extract values out of `Expr` +using pattern matching. It consists of high-level extractors for getting static +information out of exprs and, of quoted patterns that allows to deconstruct +complex code that contains a precise structure, types or methods. +Patterns `'{ ... }` can be placed in any location where Scala expects a pattern. + +The new extractors are summarized below: + +* `scala.quoted.matching.Const`: matches an expression a literal value and returns the value. +* `scala.quoted.matching.ExprSeq`: matches an explicit sequence of expresions and returns them. These sequences are useful to get individual `Expr[T]` out of a varargs expression of type `Expr[Seq[T]]`. +* `scala.quoted.matching.ConstSeq`: matches an explicit sequence of literal values and returns them. + +The following snippet demonstrates the new _quoted patterns_ implementing a +simple, 1-level, non-recursive rewriter macro for exponents. `rewrite` is a an +inline method definition designating a macro as usual. To inspect an `expr` +value with friendly syntax we can now use the quoted syntax as patterns inside a +match expression. Notice that quotes designate patterns and `$`, the familiar +syntax for splices is used to _extract_ (capture) information out of a pattern. + +```scala +inline def rewrite(expr: => Double): Double = ${rewrite('expr)} + +def rewrite(expr: Expr[Double])(given QuoteContext): Expr[Double] = { + val res = expr match { + // product rule + case '{ power2($a, $x) * power2($b, $y)} if a.matches(b) => '{ power2($a, $x + $y) } + // rules of 1 + case '{ power2($a, 1)} => a + case '{ power2(1, $a)} => '{ 1.0 } + // rule of 0 + case '{ power2($a, 0)} => '{ 1.0 } + // power rule + case '{ power2(power2($a, $x), $y)} => '{ power2($a, $x * $y ) } + case _ => expr + } + res +} +``` + +To learn more read our docs on [pattern matching over quotes](https://dotty.epfl.ch/docs/reference/metaprogramming/macros.html#pattern-matching-on-quoted-expressions). + +## Added support for SemanticDB file generation + +As part of ongoing efforts to support Dotty in Metals, our latest release now offers support for generation +of SemanticDB files, enabled with the `-Ysemanticdb` compiler flag. +Providing `-semanticdb-target` allows the user to select a separate target destination for the `META-INF` directory (the +root for `.semanticdb` files) and `-sourceroot` to calculate a relative path for SemanticDB files within `META-INF`. + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.20.0-RC1..0.21.0-RC1` these are: + +``` + 176 Martin Odersky + 171 Bryan Abate + 88 Nicolas Stucki + 81 Jamie Thompson + 44 noti0na1 + 29 Anatolii + 28 bishabosha + 23 Antoine Brunner + 19 Guillaume Martres + 10 Aleksander Boruch-Gruszecki + 8 Guillaume Raffin + 6 Olafur Pall Geirsson + 5 Sébastien Doeraene + 4 Aggelos Biboudis + 4 Liu Fengyun + 4 Paolo G. Giarrusso + 3 Andrea Mocci + 3 Martijn Hoekstra + 2 Ben Elliott + 2 Patrik Mada + 2 Rafal Piotrowski + 2 odersky + 1 Markus Kahl + 1 Richard Beddington + 1 Vlastimil Dort + 1 Anatolii Kmetiuk + 1 Raphael Jolly + 1 Lucas + 1 Nikita Eshkeev + 1 Brian Wignall + 1 Olivier Blanvillain + 1 张志豪 +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently, this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin +[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2020-02-05-22nd-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2020-02-05-22nd-dotty-milestone-release.md new file mode 100644 index 000000000000..a901e83130d8 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2020-02-05-22nd-dotty-milestone-release.md @@ -0,0 +1,297 @@ +--- +layout: blog-page +title: Announcing Dotty 0.22.0-RC1 - syntactic enhancements, type-level arithmetic and more +author: Anatolii Kmetiuk +authorImg: /images/anatolii.png +date: 2020-02-05 +--- + +Hello! We are excited to announce 0.22.0-RC1 of Dotty. This version brings syntactic enhancements for extension methods and context parameters, as well as the kind projector syntax. Other notable changes include type-level arithmetic, changes to the `inline` parameters semantics and suggestions on missing context parameters. + +You can try out this version right now, from the comfort of your SBT, by visiting the [home page](https://dotty.epfl.ch/) and scrolling down to the "Create a Dotty Project" section. + +Alternatively, you can try this version of Scala online via [Scastie](https://scastie.scala-lang.org/). Once you're there, click "Build Settings" and set "Target" to "Dotty". + +Enjoy the ride🚀! + + +# New syntax for collective extension methods +Extension methods have been present in Dotty for a while. They present an idiomatic way to extend types with methods after these types are defined. For example: + +```scala +def (x: Int) toPower (n: Int): Int = + @annotation.tailrec def loop(accum: Int, power: Int): Int = + if power == 0 then accum + else if power > 0 then loop(accum * x, power - 1) + else throw IllegalArgumentException("This operation only supports positive powers") + loop(1, n) + println(s"3^3 = ${3.toPower(3)}") // 3^3 = 27 +``` + +However, when one wants to define multiple extension methods for a type, a lot of boilerplate manifests: + +```scala +def (x: Int) toPower (n: Int): Int = ??? +def (x: Int) squared = ??? +def (x: Int) asBinaryString = ??? +``` + +The type in question and the name of its parameter, `(x: Int)`, repeat. + +This boilerplate was the motivation to introduce collective extension methods. For a while, we were experimenting with looking at these through the lens of the `given` mechanism. We have tried out an idea of making these methods belong to an object visible in the `given` scope and, if such an object is present in the `given` scope, its extension methods are also automatically usable. + +However, `given` instances are about *types* and the collective extension methods describe *parameters* of extension methods. Hence, in this release we introduce a new syntax for the collective extension methods: + +```scala +extension listOps on [T](xs: List[T]) { + def second = xs.tail.head + def third: T = xs.tail.tail.head +} + +val list = List(1, 2, 3) +println(s"Second: ${list.second}") // 2 +println(s"Third: ${list.third}") // 3 +``` + +This syntax is a completely separate one from the `given` syntax and hence is aimed to bring more clarity and disentangle the two different concepts. + +For the discussion, see [PR #7917](https://github.com/lampepfl/dotty/pull/7917). For more information on how to use extension methods in general and collective extension methods in particular, see the [documentation](https://dotty.epfl.ch/docs/reference/contextual/extension-methods.html). + +# Kind projector syntax support +[Kind projector](https://github.com/typelevel/kind-projector) is a popular compiler plugin for Scala 2. It is especially useful in the context of purely functional programming and type class derivation – everywhere where you need to work extensively with types. + +As of this release, a subset of the kind projector syntax is now supported in Dotty. Credits for this contribution go to [Travis Brown](https://github.com/travisbrown). + +To enable it, you need to run the compiler with the `-Ykind-projector` flag. You can e.g. write the following: + +```scala +// Fix #7139: Implement kind-projector compatibility #7775 +// With -Ykind-projector + +trait Functor[F[_]] + def map[A, B](fa: F[A], f: A => B): F[B] + +object eitherFunctor extends Functor[Either[Int, *]] + def map[A, B](fa: Either[Int, A], f: A => B): Either[Int, B] = fa match + case Right(x) => Right(f(x)) + case Left(x) => Left(x) + +object functionFunctor extends Functor[Int => *] + def map[A, B](fa: Int => A, f: A => B): Int => B = + fa andThen f + +object tupleFunctor extends Functor[λ[x => (x, x)]] + def map[A, B](fa: (A, A), f: A => B): (B, B) = fa match + case (a1, a2) => (f(a1), f(a2)) + +@main def Test = + val tpl = (1, 2) + val squared = tupleFunctor.map(tpl, a => a * a) + println(squared) // (1,4) +``` + +For the discussion, see [PR #7775](https://github.com/lampepfl/dotty/pull/7775). Also see the GitHub [repository](https://github.com/typelevel/kind-projector) of the kind projector Scala 2 plugin for more context. + +# Further improvements to the context parameters syntax +Scala 3 context parameters are successors of Scala 2 implicits. In Scala 2, they proved useful for a wide range of applications including purely functional programming, dependency injection, type class derivation, type-level programming. Because their apparent value, one of the priorities in Scala 3 for us is to improve the conceptual framework behind them. + +The state of context parameters before this release heavily employed the `given` keyword. For example: + +```scala +// OLD SYNTAX BELOW +given String = "10" +given (given str: String) : Int = str.toInt +def f(x: Int)(given y: Int) = x * y +``` + +The above is a suboptimal solution, however. The feedback we received from the community suggested that many people felt like the `given` keyword was overused, similarly to the `implict` keyword in Scala 2. This overuse is one of the things we'd like to avoid in Scala 3. It leads, for example, to situations like `given (given ...)` which are not nice to read. + +For this release, we have changed the syntax for the context parameters. The keyword for the context argument group is now `using` instead of `given`. The above snippet now becomes: + +```scala +given String = "10" +given (using str: String) as Int = str.toInt +def f(x: Int)(using y: Int) = x * y +``` + +On the call site, the syntax for explicitly specifying the context parameters is now: + +```scala +f(2)(using 20) +``` + +As opposed to the previous: + +```scala +// OLD SYNTAX BELOW +f(2)(given 20) +``` + +For the time being, the change is experimental and the old syntax is also supported. For the discussion, see [PR #8162](https://github.com/lampepfl/dotty/pull/8162). You can browse the documentation concerning the new syntax [here](https://dotty.epfl.ch/docs/reference/contextual/motivation-new.html). + +# Semantics of inline parameters changed +Inline parameters is a metaprogramming feature of Dotty which allows to splice the body of the parameter on its call site. Previously, inline parameters to methods were required to be known on compile time. With this release, this constraint has been relaxed. The following: + +```scala +inline def sumTwice(a: Int, b: =>Int, inline c: Int) = a + a + b + b + c + c +sumTwice(f(), g(), h()) +``` + +Translates to: + +```scala + val a = f() + def b = g() + a + a + b + b + h() + h() +``` + +Notice how the value of the by-name parameter `b` is not inlined but is bound to `def b`. This is an important change that affects all the macros that accepted by-name parameters and analyzed the AST of the underlying code. With this release, such macros will stop working correctly because the AST of the code in question will be the identifier of the by-name parameter, `b` in this case, and not the AST of the code passed under that parameter's name. The workaround is to change all the by-name parameters in your macros to inline parameters. + +So, if previously you had a macro `inline def operationOnCode(code: => Unit) = ${ mcrImpl('code) }` which did something on the AST of the passed `code`, with this release you need to change it to `inline def operationOnCode(inline code: Unit) = ${ mcrImpl('code) }`. + +This change was introduced by [PR #8060](https://github.com/lampepfl/dotty/pull/8060/). + +Another change in the semantics of the inline parameters involves the fact that the can no longer be passed as constants to macro implementations. Previously, the following was possible: + +```scala +// OLD SEMANTICS +inline def power(x: Double, inline n: Int) = ${ powerCode('x, n) } +private def powerCode(x: Expr[Double], n: Int)(given +QuoteContext): Expr[Double] = ??? +``` + +It was possible to pass `n` directly to the spliced `powerCode` and it would have been treated as a constant in that macro implementation. + +Now, the inline parameters must be quoted when passed to a macro: + +```scala +inline def power(x: Double, inline n: Int) = ${ powerCode('x, 'n) } +private def powerCode(x: Expr[Double], n: Expr[Int])(given QuoteContext): Expr[Double] = ??? +``` + +You can obtain the constant value of `n` from within the macro implementation by calling `n.getValue` on it which returns an `Option`. This change was introduced by [PR #8061](https://github.com/lampepfl/dotty/pull/8061). + +For more information about the inline capability of Dotty, see [documentation](https://dotty.epfl.ch/docs/reference/metaprogramming/inline.html). + +# Primitive compiletime operations on singleton types +Contributed by [Maxime Kjaer](https://github.com/MaximeKjaer), this release brings along type-level arithmetic: + +```scala +import scala.compiletime.ops.int._ + +val x: 2 + 3 = 5 // OK +val y: 3 * 4 + 1 = 12 // error +``` + +The compile-time error above will say: + +```scala +4 |val y: 3 * 4 + 1 = 12 + | ^^ + | Found: (12 : Int) + | Required: (13 : Int) +``` + +This feature is particularly useful for data science applications. In data science, it is very easy to make a linear algebra mistake, multiply matrices of wrong dimensions and get a runtime error – sometimes after a few hours of running the model. Hence compile-time verification of the models has a great potential for saving time. With such a type-level arithmetic, Scala becomes well-positioned to implement such type-safe data science frameworks. + +For the discussion, see [PR #7628](https://github.com/lampepfl/dotty/pull/7628). The documentation is available [here](https://dotty.epfl.ch/docs/reference/metaprogramming/inline.html#the-scalacompiletimeops-package). + +# Suggestions on missing context parameters +If there's a compile-time error due to a missing context parameter and this error can be fixed with an import, the compiler will attempt to suggest such an import in the error message. Here is an example of how this error looks like: + +``` +-- Error: tests/neg/missing-implicit1.scala:17:4 ----------------------------------------------------------------------- +17 | ff // error + | ^ + |no implicit argument of type testObjectInstance.Zip[Option] was found for parameter xs of method ff in object testObjectInstance + | + |The following import might fix the problem: + | + | import testObjectInstance.instances.zipOption +``` + +One area where these suggestions will make life easier is purely functional programming with type-classes, with libraries like [cats](https://typelevel.org/cats/). Having the fix for a missing type class in the error message itself is a big time-saver. + +For the discussion, see [PR #7862](https://github.com/lampepfl/dotty/pull/7862). + +# TASTy Inspector library +TASTy Consumer was renamed to TASTy Inspector as of this release. It was also published in a library of its own. For more information, see the [documentation](https://dotty.epfl.ch/docs/reference/metaprogramming/tasty-inspect.html) on this library. + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.21.0-RC1..0.22.0-RC1` these are: + +``` + 192 Martin Odersky + 85 Nicolas Stucki + 57 Antoine Brunner + 42 Liu Fengyun + 29 Guillaume Martres + 23 Aggelos Biboudis + 17 Maxime Kjaer + 10 Anatolii + 7 Jamie Thompson + 4 Minghao Liu + 3 Travis Brown + 3 Andrew Valencik + 2 fhackett + 2 Dvir Faivel + 2 Nadezhda Balashova + 2 Ruslan Shevchenko + 2 Lan, Jian + 2 Anatolii Kmetiuk + 2 Yevgen Nerush + 1 Dale Wijnand + 1 odersky + 1 Dmitrii Naumenko + 1 Eric K Richardson + 1 Eric Loots + 1 Jaap van der Plas + 1 Keith Pinson + 1 Miles Sabin + 1 Alexander Shamukov + 1 Som Snytt + 1 Taisuke Oe + 1 Timothée Floure + 1 bishabosha + 1 gzoller +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently, this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin +[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2020-03-18-23rd-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2020-03-18-23rd-dotty-milestone-release.md new file mode 100644 index 000000000000..9ee9b3bfe902 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2020-03-18-23rd-dotty-milestone-release.md @@ -0,0 +1,324 @@ +--- +layout: blog-page +title: Announcing Dotty 0.23.0-RC1 - safe initialization checks, type-level bitwise operations and more +author: Anatolii Kmetiuk +authorImg: /images/anatolii.png +date: 2020-03-18 +--- + +Hello! We are excited to announce 0.23.0-RC1 of Dotty. This version brings safe initialization checks, minor syntactic changes related to the context parameters, type-level bitwise operations and improvements of the metaprogramming capabilities. + +You can try out this version right now, from the comfort of your SBT, by visiting the [home page](https://dotty.epfl.ch/) and scrolling down to the "Create a Dotty Project" section. + +Alternatively, you can try this version of Scala online via [Scastie](https://scastie.scala-lang.org/). Once you're there, click "Build Settings" and set "Target" to "Dotty". + +Enjoy the ride🚀! + + +# Cool new features +## Safe initialization checks +When a class is instantiated, the fields in the class body are initialized by field initializers, which could be any Scala code. Such a versatile language feature gives the programmer flexibility in defining how objects are initialized. However, such flexibility also brings complexity to ensure that we never accidentally use a field before it's initialized. Initialization errors can be difficult to spot in the presence of complex language features, such as inheritance, traits, inner classes, and aliasing. Such errors, sometimes simple sometimes subtle, require programmer efforts to debug and fix, which has been a [pain point for Scala programmers](https://contributors.scala-lang.org/t/improve-forward-reference-handling/3616) for a long time. + +Most programming languages do not statically check initialization safety, such as C++, Java, Kotlin, etc. +Or, they check initialization safety but overly restrict how objects are initialized, like Swift. +Now, Scala 3 has the best of two worlds: flexibility of initialization patterns and static check for safety. + +Consider the following program: + +```scala +abstract class AbstractFile { + def name: String + val extension: String = name.reverse.dropWhile(_ != '.').reverse +} + +class RemoteFile(url: String) extends AbstractFile { + val localFile: String = url.hashCode + ".tmp" + def name: String = localFile +} +``` + +Above, `extension` value is initialized prior to `localFile` because the fields of the parents of a class are initialized prior to the fields of the class. However, `extension` uses `localFile` during its initialization since it accesses this field from the `name` method. This scenario will lead to a `NullPointerException` on runtime when the access to uninitialized `localFile` happens. + + +In this release, we have added an aid for the programmer to detect such mistakes automatically. If you compile the above program with the `-Ycheck-init` flag, you will get the following compile-time error: + +```scala +-- Error: /Users/kmetiuk/Projects/scala3/pg/release/snip_4.scala:8:7 ----------- +8 | val localFile: String = url.hashCode + ".tmp" + | ^ + |Access non-initialized field localFile. Calling trace: + | -> val extension: String = name.reverse.dropWhile(_ != '.').reverse [ snip_4.scala:4 ] + | -> def name: String = localFile [ snip_4.scala:9 ] +1 error found +``` + +You can learn more about the feature from the [documentation](https://dotty.epfl.ch/0.23.0-RC1/docs/reference/other-new-features/safe-initialization.html). For the discussion, see PR [#7789](https://github.com/lampepfl/dotty/pull/7789). + +## Bitwise Int compiletime operations +In the previous release, Dotty has [received](https://dotty.epfl.ch/blog/2020/02/05/22nd-dotty-milestone-release.html#primitive-compiletime-operations-on-singleton-types) a support for type-level arithmetic operations on integers. In this release, we are extending this support by adding bitwise operations. For example: + +```scala +import scala.compiletime.ops.int._ + +@main def Test = + val t1: 1 << 1 = 2 + val t2: 1 << 2 = 4 + val t3: 1 << 3 = 8 + val t4: 1 << 4 = 0 // error +``` + +Above `t4` will fail to compile with the following error: + + -- [E007] Type Mismatch Error: /Users/kmetiuk/Projects/scala3/pg/release/snip_3.scala:7:20 +7 | val t67: 1 << 4 = 0 // error + | ^ + | Found: (0 : Int) + | Required: (16 : Int) + +You can find the list of all the supported operations in the `scala.compiletime.ops` [package](https://github.com/bishabosha/dotty/blob/e2b0de0bf70bbde5a9a92dc7fa91b36537b02a87/library/src/scala/compiletime/ops/package.scala) + +# Syntactic Changes +## Context functions syntax improved +In this release, we have done some work to improve the syntax of context functions. Now, their syntax is closer to the syntax for context parameters of methods. + +Previously, a context function was written as follows: + +```scala +// OLD SYNTAX +val ctxFunOld = (x: String) ?=> x.toInt +``` + +Now, it is written as follows: + +```scala +val ctxFunNew = (using x: String) => x.toInt +``` + +We hope that this change will improve the readability of context functions for a person who already knows the syntax for context parameters of ordinary methods. + +## Drop `given` parameter syntax +As part of our experimentation with the syntax of the language, we are now dropping the old syntax for context parameters. + +The old syntax for context parameters was as follows: + +```scala +// OLD SYNTAX, NO LONGER SUPPORTED +def f(given x: Int) = x * x +``` + +In the previous release, it was [replaced](https://dotty.epfl.ch/blog/2020/02/05/22nd-dotty-milestone-release.html#further-improvements-to-the-context-parameters-syntax) by the new `using` syntax: + +```scala +def f(using x: Int) = x * x +``` + +However, both syntaxes were supported for that release for experimental purposes. Now, we are dropping the support of the old syntax in favor of the new one as we see it as a clear win over the old one. + +# Metaprogramming +## Inline version of `summon` +Inside an inline method, we often want to summon a value without declaring it as a context parameter of the method: + +```scala +inline def lookup[X] = + val x = summon[X] // error + // -- Error: /Users/kmetiuk/Projects/scala3/pg/release/snip_5.scala:6:19 ---------- + // 6 | val x = summon[X] + // | ^ + // |no implicit argument of type X was found for parameter x of method summon in object DottyPredef + // 1 error found + println(s"x = $x") +``` + +The above program will give us a compile time error because it cannot find a context parameter of type `X`. This is because the `summon` function is not inline and hence the compiler needs to know that a context parameter of type `X` exists on call site of `summon` which happens to be in the body of `lookup`. Since `X` is unknown in that body, the compiler can't find the context parameter and shows an error. + +We have now added an inline version of `summon`: + +```scala +import scala.compiletime.summonInline + +inline def lookup[X] = + val x = summonInline[X] + println(s"x = $x") + +@main def Test = + given Int = 10 + lookup[Int] +``` + +`summonInline` is an inline version of `summon`. It is defined as follows: + +```scala +inline def summonInline[T] <: T = summonFrom { + case t: T => t +} +``` + +Since it is inline, the context parameter is resolved on expansion site, not the call site. The expansion site happens to be wherever `lookup` function is expanded, and there the type `X` is bound to a concrete type. + +## `ValueOfExpr` renamed to `Unlifted` +This feature allows you to obtain the value captured in an expression using pattern matching: + +Macro.scala + +```scala +import scala.quoted._ + +inline def square(inline x: Int): Int = ${ squareImpl('x) } +def squareImpl(x: Expr[Int])(using QuoteContext): Expr[Int] = + x match + case Unlifted(value: Int) => Expr(value * value) +``` + +Test.scala + +```scala +@main def Test = + println(square(10)) // println(100) +``` + +## Extractors for quotes moved under `scala.quoted` package +The metaprogramming capabilities are undergoing simplifications in this release. In particular, fewer imports are now needed. + +Previously, to access the extractors for expressions you had to do the `scala.quoted.matching._` import. Now, the extractors from there have been moved to `scala.quoted`. For example, you can write the following program: + +Macro.scala: + +```scala +import scala.quoted._ + +inline def square(inline x: Int): Int = ${ squareImpl('x) } +def squareImpl(xExpr: Expr[Int])(using QuoteContext): Expr[Int] = + xExpr match + case Const(x) => Expr(x * x) +``` + +Test.scala: + +```scala +@main def Test = + println(square(2)) // println(4) +``` + +Above, `Const` is an extractor that matches constants. Notice how we do not need to import anything else but `scala.quoted._` to use it. + +## TASTy Reflect imports simplified +Previously, to access TASTy Reflect features of Dotty, you had to include an import as follows: + +```scala +// OLD CODE +import qctx.tasty.{ _, given } +``` + +Above, `qctx` is a `QuoteContext` which is available in all the macro implementations. The `given` keyword imports all the context instances. In this particular case, it was needed to bring extension methods for ASTs in scope. + +With this release, the `given` part is no longer needed and the extension methods are in scope after merely importing `import qctx.tasty._`. + +Consider the following example: + +Macro.scala + +```scala +import scala.quoted._ + +inline def showTree(inline x: Any): String = ${ showTreeImpl('x) } +def showTreeImpl(x: Expr[Any])(using qctx: QuoteContext): Expr[String] = + import qctx.tasty._ + x.unseal match + case Inlined(_, _, app: Apply) => + val fun: Term = app.fun + val args: List[Term] = app.args + val res = s"Function: $fun\nApplied to: $args" + Expr(res) +``` + +Test.scala + +```scala +@main def Test = + def f(x: Int) = x * x + val x = 10 + println(showTree(f(x))) +``` + +Notice how above, we are calling `app.fun` and `app.args`. `fun` and `args` are extension methods on `Apply` tree node. Previously they would not have been available unless we did `import qctx.tasty.given`. However as of this release, the above program compiles without errors. + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible! + +According to `git shortlog -sn --no-merges 0.22.0-RC1..0.23.0-RC1` these are: + +``` + 165 Martin Odersky + 124 Nicolas Stucki + 121 Liu Fengyun + 45 Robert Stoll + 15 Guillaume Martres + 15 Anatolii + 10 gzoller + 8 Som Snytt + 8 Stéphane Micheloud + 5 Ausmarton Zarino Fernandes + 5 Oron Port + 3 Adam Fraser + 3 Gabriele Petronella + 3 Uko + 3 Anatolii Kmetiuk + 2 ybasket + 2 Dale Wijnand + 2 Dani Rey + 2 Jamie Thompson + 2 Olivier Blanvillain + 2 Tomasz Godzik + 2 Travis Brown + 2 Vlastimil Dort + 1 tanaka takaya + 1 Miles Sabin + 1 Andrew Valencik + 1 bishabosha + 1 fhackett + 1 Lionel Parreaux + 1 kenji yoshida + 1 manojo + 1 odersky + 1 Raj Parekh + 1 Sébastien Doeraene + 1 xuwei-k +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently, this includes ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty-community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin +[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2020-04-29-24th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2020-04-29-24th-dotty-milestone-release.md new file mode 100644 index 000000000000..d4f34446e5c4 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2020-04-29-24th-dotty-milestone-release.md @@ -0,0 +1,155 @@ +--- +layout: blog-page +title: Announcing Dotty 0.24.0-RC1 - 2.13.2 standard library, better error messages and more +author: Anatolii Kmetiuk +authorImg: /images/anatolii.png +date: 2020-04-29 +--- + +Hello! We are excited to announce 0.24.0-RC1 of Dotty. In this version, we have updated the standard library to 2.13.2. Also, we have made some work to make error messages more user-friendly and a bunch of other polishings to the language. + +You can try out this version right now, from the comfort of your SBT, by visiting the [home page](https://dotty.epfl.ch/) and scrolling down to the "Create a Dotty Project" section. + +Alternatively, you can try this version of Scala online via [Scastie](https://scastie.scala-lang.org/). Once you're there, click "Build Settings" and set "Target" to "Dotty". + +Enjoy the ride🚀! + + +# REPL works with indented code +REPL now supports indented code. Consider the following snippet: + +```scala +scala> if true then + | print(1) + | print(2) + | +``` + +Previously, the REPL would have stopped after `print(1)`. Now, it waits either for an `else` block or an extra newline to indicate the end of the expression. The above example will output `12` as expected. + +# Better error message for ifs that miss an else branch +The error messages are now more beginner-friendly. Consider the following: + +```scala +def f: Int = if ??? then 1 +``` + +Above, the `if` expression returns a `Unit` since an `else` clause is missing. Previously, the user would have gotten the following error: + +``` +-- [E007] Type Mismatch Error: ... +12 |def f: Int = if ??? then 1 + | ^^^^^^^^^^^^^ + | Found: Unit + | Required: Int +``` + +Now, the above error message also contains the following sentence: + +``` + | Maybe you are missing an else part for the conditional? +``` + +We hope this change will make the language more intuitive for new users. + +# Inline overrides +Inline overrides are now supported. For example, consider the following code: + +```scala +abstract class A: + def f(x: Int) = s"Foo $x" + +class B extends A: + inline override def f(x: Int) = s"Bar $x" + +@main def Test = + val b = B() + println(b.f(22)) + val a: A = b + println(a.f(22)) +``` + +The output of the above program is: + +``` +Bar 22 +Bar 22 +``` + +This new change, however, comes with rather intricated rules – if you are interested to learn about them in details, see [documentation](https://dotty.epfl.ch/docs/reference/metaprogramming/inline.html#rules-for-overriding) on inlines and the PR #[8543](https://github.com/lampepfl/dotty/pull/8543/files) which introduced the change. + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 0.23.0-RC1..0.24.0-RC1` these are: + +``` + 136 Martin Odersky + 74 Nicolas Stucki + 37 Guillaume Martres + 33 Robert Stoll + 22 Liu Fengyun + 19 Anatolii Kmetiuk + 16 Arnaud ESTEVE + 15 Olivier Blanvillain + 10 Arnaud Esteve + 9 Martijn Hoekstra + 6 Anatolii + 4 Som Snytt + 4 bishabosha + 4 Aleksander Boruch-Gruszecki + 3 Miles Sabin + 2 odersky + 2 Fengyun Liu + 2 Julien Richard-Foy + 1 Ara Adkins + 1 Maxime Kjaer + 1 Philippus + 1 Rike-Benjamin Schuppner + 1 Julien Jean Paul Sirocchi + 1 Dani Rey + 1 Sébastien Doeraene + 1 aesteve + 1 Dale Wijnand + 1 fhackett + 1 gzoller + 1 Michael Pilquist +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently, this includes shapeless, ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty/tree/master/community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin +[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2020-06-22-25th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2020-06-22-25th-dotty-milestone-release.md new file mode 100644 index 000000000000..e80e42bd6a6b --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2020-06-22-25th-dotty-milestone-release.md @@ -0,0 +1,119 @@ +--- +layout: blog-page +title: Announcing Dotty 0.25.0-RC2 - speed-up of givens and change in the tuple API +author: Anatolii Kmetiuk +authorImg: /images/anatolii.png +date: 2020-06-22 +--- + +Hello! We are excited to announce 0.25.0-RC2 of Dotty. In this version, following feedback of the community, we have improved compilation speeds when programming with givens. We have also made some improvements to the tuple API. + +You can try out this version right now, from the comfort of your SBT, by visiting the [home page](https://dotty.epfl.ch/) and scrolling down to the "Create a Dotty Project" section. + +Alternatively, you can try this version of Scala online via [Scastie](https://scastie.scala-lang.org/). Once you're there, click "Build Settings" and set "Target" to "Dotty". + +Enjoy the ride🚀! + + +# Dedicated type for empty tuples +We have added a type `EmptyTuple` to represent empty tuples. Previously empty tuples were represented by `Unit`. This change was done so that all the tuples are a subtype of `Product`. Now, tuples have the following type hierarchy: + +```scala +Product -- Tuple -+- EmptyTuple + | + +- NonEmptyTuple -- *:[Head, Tail <: Tuple] +``` + +This change impacts existing typeclass derivation codebases in that they now should use `EmptyTuple` instead of `Unit` in the tuple context. + +# Avoid excessive slowdowns when suggesting missing imports in error messages +Dotty brings to the user an enhanced error reporting when it comes to programming with givens. This better error reporting, however, proved to be a trade-off. Sometimes it takes an unreasonable amount of time and space to compute a quality suggestion to the end user. + +A number of users reported considerable slowdowns when it comes to programming with implicits. To address this issue, we have modified the logic for given imports suggestion. We introduced a per-run budget of 10 seconds, so it is guaranteed that it won't take longer than that time to compute all the given imports. + +This default budget is configurable via a compiler flag `-Ximport-suggestion-timeout`. + +This change should speed up the compiler when it comes to programming with givens. + +For more information, see PR [#9167](https://github.com/lampepfl/dotty/pull/9167). + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 0.24.0-RC1..0.25.0-RC2` these are: + +``` + 190 Nicolas Stucki + 175 Martin Odersky + 67 Liu Fengyun + 32 Guillaume Martres + 17 Robert Stoll + 17 bishabosha + 9 Anatolii Kmetiuk + 7 yu-croco + 6 Reto Hablützel + 5 Akhtiam Sakaev + 5 odersky + 4 Raphael Jolly + 4 Ruslan Shevchenko + 4 Olivier Blanvillain + 3 Jamie Thompson + 3 Chris Birchall + 2 Radosław Waśko + 2 Aleksander Boruch-Gruszecki + 2 Eric Loots + 2 Jens Kat + 2 Miles Sabin + 2 noti0na1 + 1 Krzysztof Bochenek + 1 Seth Tisue + 1 Tobias Kahlert + 1 Yilin Wei + 1 ansvonwa + 1 FabioPinheiro + 1 december32 + 1 yytyd + 1 Ara Adkins + 1 squid314 + 1 typeness + 1 xuwei-k + 1 Alex Zolotko + 1 Julien Richard-Foy +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently, this includes shapeless, ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty/tree/master/community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin +[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk diff --git a/scala3doc/dotty-docs/docs/blog/_posts/2020-07-27-26th-dotty-milestone-release.md b/scala3doc/dotty-docs/docs/blog/_posts/2020-07-27-26th-dotty-milestone-release.md new file mode 100644 index 000000000000..13297735d94c --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/_posts/2020-07-27-26th-dotty-milestone-release.md @@ -0,0 +1,149 @@ +--- +layout: blog-page +title: Announcing Dotty 0.26.0-RC1 - unified extension methods and more +author: Anatolii Kmetiuk +authorImg: /images/anatolii.png +date: 2020-07-27 +--- + +Hello! We are excited to announce 0.26.0-RC1 of Dotty. In this version, we have improved the extension methods – their syntax is now more uniform. We have also implemented local selectable instances and have done a bunch of improvements to the compiler and the language API. Otherwise, we are focusing our efforts on reducing the issue count on the issue tracker, boosting performance and improving the stability of the compiler in other ways. + +You can try out this version right now, from the comfort of your SBT, by visiting the [home page](https://dotty.epfl.ch/) and scrolling down to the "Create a Dotty Project" section. + +Alternatively, you can try this version of Scala online via [Scastie](https://scastie.scala-lang.org/). Once you're there, click "Build Settings" and set "Target" to "Dotty". + +Enjoy the ride🚀! + + +# Unified extension methods +In this release, we have made extension method syntax uniform. Previously, we had three separate syntaxes for single extension methods, collective extension methods and given instances with extension methods. Now, these three cases have been unified into one. The new syntax looks like follows: + +```scala +extension (x: String) + def < (y: String): Boolean = ... +``` + +Collective extensions look like follows: + +```scala +extension (ss: Seq[String]): + + def longestStrings: Seq[String] = + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + + def longestString: String = longestStrings.head +``` + +You can read more about the new syntax in the [documentation](https://dotty.epfl.ch/docs/reference/contextual/extension-methods.html). For the discussion, see [PR](https://github.com/lampepfl/dotty/pull/9255). + +# Local Selectable Instances +Local and anonymous classes that extend `Selectable` get more refined types than other classes. For example: + +```scala +trait Vehicle extends reflect.Selectable { + val wheels: Int +} +val i3 = new Vehicle { // i3: Vehicle { val range: Int } + val wheels = 4 + val range = 240 +} +i3.range +``` + +Without the `extends reflect.Selectbale`, the last line would have errored: + +```scala +i3.range: // error: range is not a member of `Vehicle` +``` + +The new functionality is similar to `scala.Dynamic` but different since `Selectable` is typesafe. For more about this feature, see [documentation](https://dotty.epfl.ch/docs/reference/changed-features/structural-types.html#local-selectable-instances). + +# Tuple counterparts for `summon` and `constValue` +Two new methods for compile-time programming were added, `summonAll` and `constValueTuple`. + +`summonAll[T <: Tuple]` takes a tuple type, summons all the members of it and returns them as a tuple. For example: + +```scala +given as Int = 10 +given as String = "foo" +given as Double = 1.2 +println(summonAll[Int *: String *: Double *: EmptyTuple]) // (10,foo,1.2) +``` + +In the same spirit, `constValueTuple[T <: Tuple]` is a tuple counterpart for `constValue`. For example: + +```scala +val result = constValueTuple["foo" *: "bar" *: 10 *: 2.5 *: EmptyTuple] +println(result) // (foo,bar,10,2.5) +``` + +This feature was introduced by PR [#9209](https://github.com/lampepfl/dotty/pull/9209). + +# Per-run time budget for import suggestions +Import suggestions is a feature useful for debugging but potentially taxing for performance. Therefore, we have added the `-Ximport-suggestion-timeout ` to allow specifying the timeout (in milliseconds) after which the suggestions mechanism should stop the lookup. The timeout budget is per-run (and not per suggestion) which ensures that the performance does not degrade in case of too many suggestions. + +# Let us know what you think! + +If you have questions or any sort of feedback, feel free to send us a message on our +[Gitter channel](https://gitter.im/lampepfl/dotty). If you encounter a bug, please +[open an issue on GitHub](https://github.com/lampepfl/dotty/issues/new). + +## Contributing + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 0.25.0-RC2..0.26.0-RC1` these are: + +``` + 128 Martin Odersky + 53 Nicolas Stucki + 30 Sébastien Doeraene + 18 Anatolii Kmetiuk + 18 Guillaume Raffin + 17 Lan, Jian + 12 Guillaume Martres + 5 Aleksander Boruch-Gruszecki + 3 Ruslan Shevchenko + 3 odersky + 2 Alden Torres + 2 Robert Stoll + 2 yu-croco + 1 Alex Zolotko + 1 Kevin Dreßler + 1 FabioPinheiro + 1 adpi2 + 1 Matthew Pickering + 1 Liu Fengyun +``` + +If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved! +Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html), +and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice). +They make perfect entry points into hacking on the compiler. + +We are looking forward to having you join the team of contributors. + +## Library authors: Join our community build + +Dotty now has a set of widely-used community libraries that are built against every nightly Dotty +snapshot. Currently, this includes shapeless, ScalaPB, algebra, scalatest, scopt and squants. +Join our [community build](https://github.com/lampepfl/dotty/tree/master/community-build) +to make sure that our regression suite includes your library. + +[Scastie]: https://scastie.scala-lang.org/?target=dotty + +[@odersky]: https://github.com/odersky +[@DarkDimius]: https://github.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://github.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@biboudis]: https://github.com/biboudis +[@allanrenucci]: https://github.com/allanrenucci +[@Blaisorblade]: https://github.com/Blaisorblade +[@Duhemm]: https://github.com/Duhemm +[@AleksanderBG]: https://github.com/AleksanderBG +[@milessabin]: https://github.com/milessabin +[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk diff --git a/scala3doc/dotty-docs/docs/blog/index.html b/scala3doc/dotty-docs/docs/blog/index.html new file mode 100644 index 000000000000..6db9ee4ce1c4 --- /dev/null +++ b/scala3doc/dotty-docs/docs/blog/index.html @@ -0,0 +1,32 @@ +--- +layout: main +title: Blog +--- +
      +

      {{ page.title }}

      + +
        + {% for post in site.posts %} +
      • +

        + {{ post.title }} +

        + +
        + {{ post.excerpt }} +
        +
      • + {% endfor %} +
      +
      diff --git a/scala3doc/dotty-docs/docs/css/bootstrap.min.css b/scala3doc/dotty-docs/docs/css/bootstrap.min.css new file mode 100644 index 000000000000..d9bbfab3ab34 --- /dev/null +++ b/scala3doc/dotty-docs/docs/css/bootstrap.min.css @@ -0,0 +1 @@ +@import url("https://fonts.googleapis.com/css?family=Lato:400,700|Fira+Code:400,700&display=fallback");:root{--light:#fafafa;--toolbar:#414551;--toolbar-entry:#fafafa;--toolbar-active:#7c8296;--sidebar:#fafafa;--sidebar-category:#000;--sidebar-page:#414551;--sidebar-active:#ca445e;--pre-bg:#fafafa;--doc-bg:rgba(202,68,94,0.135);--primary:#414551;--secondary:#ca445e;--breakpoint-xs:0;--breakpoint-sm:576px;--breakpoint-md:768px;--breakpoint-lg:992px;--breakpoint-xl:1200px;--font-family-sans-serif:"Lato",sans-serif;--font-family-monospace:"Fira Code","Andale Mono",monospace}*,:after,:before{box-sizing:border-box}html{font-family:sans-serif;line-height:1.15;-webkit-text-size-adjust:100%;-webkit-tap-highlight-color:rgba(0,0,0,0)}article,aside,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}body{margin:0;font-family:Lato,sans-serif;font-size:1rem;font-weight:400;line-height:1.5;color:#212529;text-align:left;background-color:#fff}[tabindex="-1"]:focus{outline:0!important}hr{box-sizing:content-box;height:0;overflow:visible}h1,h2,h3,h4,h5,h6{margin-top:0;margin-bottom:.5rem}p{margin-top:0;margin-bottom:1rem}abbr[data-original-title],abbr[title]{text-decoration:underline;-webkit-text-decoration:underline dotted;text-decoration:underline dotted;cursor:help;border-bottom:0;-webkit-text-decoration-skip-ink:none;text-decoration-skip-ink:none}address{font-style:normal;line-height:inherit}address,dl,ol,ul{margin-bottom:1rem}dl,ol,ul{margin-top:0}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}dt{font-weight:700}dd{margin-bottom:.5rem;margin-left:0}blockquote{margin:0 0 1rem}b,strong{font-weight:bolder}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}a{color:#007bff;text-decoration:none;background-color:transparent}a:hover{color:#0056b3;text-decoration:underline}a:not([href]):not([tabindex]),a:not([href]):not([tabindex]):focus,a:not([href]):not([tabindex]):hover{color:inherit;text-decoration:none}a:not([href]):not([tabindex]):focus{outline:0}code,kbd,pre,samp{font-family:Fira Code,Andale Mono,monospace;font-size:1em}pre{margin-top:0;margin-bottom:1rem;overflow:auto}figure{margin:0 0 1rem}img{border-style:none}img,svg{vertical-align:middle}svg{overflow:hidden}table{border-collapse:collapse}caption{padding-top:.75rem;padding-bottom:.75rem;color:#6c757d;text-align:left;caption-side:bottom}th{text-align:inherit}label{display:inline-block;margin-bottom:.5rem}button{border-radius:0}button:focus{outline:1px dotted;outline:5px auto -webkit-focus-ring-color}button,input,optgroup,select,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,input{overflow:visible}button,select{text-transform:none}select{word-wrap:normal}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button}[type=button]:not(:disabled),[type=reset]:not(:disabled),[type=submit]:not(:disabled),button:not(:disabled){cursor:pointer}[type=button]::-moz-focus-inner,[type=reset]::-moz-focus-inner,[type=submit]::-moz-focus-inner,button::-moz-focus-inner{padding:0;border-style:none}input[type=checkbox],input[type=radio]{box-sizing:border-box;padding:0}input[type=date],input[type=datetime-local],input[type=month],input[type=time]{-webkit-appearance:listbox}textarea{overflow:auto;resize:vertical}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;max-width:100%;padding:0;margin-bottom:.5rem;font-size:1.5rem;line-height:inherit;color:inherit;white-space:normal}progress{vertical-align:baseline}[type=number]::-webkit-inner-spin-button,[type=number]::-webkit-outer-spin-button{height:auto}[type=search]{outline-offset:-2px;-webkit-appearance:none}[type=search]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{font:inherit;-webkit-appearance:button}output{display:inline-block}summary{display:list-item;cursor:pointer}template{display:none}[hidden]{display:none!important}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{margin-bottom:.5rem;font-family:Lato,sans-serif;font-weight:400;line-height:1.2}.h1,h1{font-size:2.5rem}.h2,h2{font-size:2rem}.h3,h3{font-size:1.75rem}.h4,h4{font-size:1.5rem}.h5,h5{font-size:1.25rem}.h6,h6{font-size:1rem}.lead{font-size:1.25rem;font-weight:300}.display-1{font-size:6rem}.display-1,.display-2{font-weight:300;line-height:1.2}.display-2{font-size:5.5rem}.display-3{font-size:4.5rem}.display-3,.display-4{font-weight:300;line-height:1.2}.display-4{font-size:3.5rem}hr{margin-top:1rem;margin-bottom:1rem;border:0;border-top:1px solid rgba(0,0,0,.1)}.small,small{font-size:80%;font-weight:400}.mark,mark{padding:.2em;background-color:#fcf8e3}.list-inline,.list-unstyled{padding-left:0;list-style:none}.list-inline-item{display:inline-block}.list-inline-item:not(:last-child){margin-right:.5rem}.initialism{font-size:90%;text-transform:uppercase}.blockquote{margin-bottom:1rem;font-size:1.25rem}.blockquote-footer{display:block;font-size:80%;color:#6c757d}.blockquote-footer:before{content:"\2014\00A0"}.img-fluid,.img-thumbnail{max-width:100%;height:auto}.img-thumbnail{padding:.25rem;background-color:#fff;border:1px solid #dee2e6;border-radius:.25rem}.figure{display:inline-block}.figure-img{margin-bottom:.5rem;line-height:1}.figure-caption{font-size:90%;color:#6c757d}code{font-size:87.5%;color:#ca445e;word-break:break-word}a>code{color:inherit}kbd{padding:.2rem .4rem;font-size:87.5%;color:#fff;background-color:#212529;border-radius:.2rem}kbd kbd{padding:0;font-size:100%;font-weight:700}pre{display:block;font-size:87.5%;color:#212529}pre code{font-size:inherit;color:inherit;word-break:normal}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{width:100%;padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}@media (min-width:768px){.container{max-width:720px}}@media (min-width:992px){.container{max-width:960px}}@media (min-width:1200px){.container{max-width:960px}}.container-fluid{width:100%;padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}.row{display:flex;flex-wrap:wrap;margin-right:-15px;margin-left:-15px}.no-gutters{margin-right:0;margin-left:0}.no-gutters>.col,.no-gutters>[class*=col-]{padding-right:0;padding-left:0}.col,.col-1,.col-2,.col-3,.col-4,.col-5,.col-6,.col-7,.col-8,.col-9,.col-10,.col-11,.col-12,.col-auto,.col-lg,.col-lg-1,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-auto,.col-md,.col-md-1,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-md-10,.col-md-11,.col-md-12,.col-md-auto,.col-sm,.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-auto,.col-xl,.col-xl-1,.col-xl-2,.col-xl-3,.col-xl-4,.col-xl-5,.col-xl-6,.col-xl-7,.col-xl-8,.col-xl-9,.col-xl-10,.col-xl-11,.col-xl-12,.col-xl-auto{position:relative;width:100%;padding-right:15px;padding-left:15px}.col{flex-basis:0;flex-grow:1;max-width:100%}.col-auto{flex:0 0 auto;width:auto;max-width:100%}.col-1{flex:0 0 8.333333%;max-width:8.333333%}.col-2{flex:0 0 16.666667%;max-width:16.666667%}.col-3{flex:0 0 25%;max-width:25%}.col-4{flex:0 0 33.333333%;max-width:33.333333%}.col-5{flex:0 0 41.666667%;max-width:41.666667%}.col-6{flex:0 0 50%;max-width:50%}.col-7{flex:0 0 58.333333%;max-width:58.333333%}.col-8{flex:0 0 66.666667%;max-width:66.666667%}.col-9{flex:0 0 75%;max-width:75%}.col-10{flex:0 0 83.333333%;max-width:83.333333%}.col-11{flex:0 0 91.666667%;max-width:91.666667%}.col-12{flex:0 0 100%;max-width:100%}.order-first{order:-1}.order-last{order:13}.order-0{order:0}.order-1{order:1}.order-2{order:2}.order-3{order:3}.order-4{order:4}.order-5{order:5}.order-6{order:6}.order-7{order:7}.order-8{order:8}.order-9{order:9}.order-10{order:10}.order-11{order:11}.order-12{order:12}.offset-1{margin-left:8.333333%}.offset-2{margin-left:16.666667%}.offset-3{margin-left:25%}.offset-4{margin-left:33.333333%}.offset-5{margin-left:41.666667%}.offset-6{margin-left:50%}.offset-7{margin-left:58.333333%}.offset-8{margin-left:66.666667%}.offset-9{margin-left:75%}.offset-10{margin-left:83.333333%}.offset-11{margin-left:91.666667%}@media (min-width:576px){.col-sm{flex-basis:0;flex-grow:1;max-width:100%}.col-sm-auto{flex:0 0 auto;width:auto;max-width:100%}.col-sm-1{flex:0 0 8.333333%;max-width:8.333333%}.col-sm-2{flex:0 0 16.666667%;max-width:16.666667%}.col-sm-3{flex:0 0 25%;max-width:25%}.col-sm-4{flex:0 0 33.333333%;max-width:33.333333%}.col-sm-5{flex:0 0 41.666667%;max-width:41.666667%}.col-sm-6{flex:0 0 50%;max-width:50%}.col-sm-7{flex:0 0 58.333333%;max-width:58.333333%}.col-sm-8{flex:0 0 66.666667%;max-width:66.666667%}.col-sm-9{flex:0 0 75%;max-width:75%}.col-sm-10{flex:0 0 83.333333%;max-width:83.333333%}.col-sm-11{flex:0 0 91.666667%;max-width:91.666667%}.col-sm-12{flex:0 0 100%;max-width:100%}.order-sm-first{order:-1}.order-sm-last{order:13}.order-sm-0{order:0}.order-sm-1{order:1}.order-sm-2{order:2}.order-sm-3{order:3}.order-sm-4{order:4}.order-sm-5{order:5}.order-sm-6{order:6}.order-sm-7{order:7}.order-sm-8{order:8}.order-sm-9{order:9}.order-sm-10{order:10}.order-sm-11{order:11}.order-sm-12{order:12}.offset-sm-0{margin-left:0}.offset-sm-1{margin-left:8.333333%}.offset-sm-2{margin-left:16.666667%}.offset-sm-3{margin-left:25%}.offset-sm-4{margin-left:33.333333%}.offset-sm-5{margin-left:41.666667%}.offset-sm-6{margin-left:50%}.offset-sm-7{margin-left:58.333333%}.offset-sm-8{margin-left:66.666667%}.offset-sm-9{margin-left:75%}.offset-sm-10{margin-left:83.333333%}.offset-sm-11{margin-left:91.666667%}}@media (min-width:768px){.col-md{flex-basis:0;flex-grow:1;max-width:100%}.col-md-auto{flex:0 0 auto;width:auto;max-width:100%}.col-md-1{flex:0 0 8.333333%;max-width:8.333333%}.col-md-2{flex:0 0 16.666667%;max-width:16.666667%}.col-md-3{flex:0 0 25%;max-width:25%}.col-md-4{flex:0 0 33.333333%;max-width:33.333333%}.col-md-5{flex:0 0 41.666667%;max-width:41.666667%}.col-md-6{flex:0 0 50%;max-width:50%}.col-md-7{flex:0 0 58.333333%;max-width:58.333333%}.col-md-8{flex:0 0 66.666667%;max-width:66.666667%}.col-md-9{flex:0 0 75%;max-width:75%}.col-md-10{flex:0 0 83.333333%;max-width:83.333333%}.col-md-11{flex:0 0 91.666667%;max-width:91.666667%}.col-md-12{flex:0 0 100%;max-width:100%}.order-md-first{order:-1}.order-md-last{order:13}.order-md-0{order:0}.order-md-1{order:1}.order-md-2{order:2}.order-md-3{order:3}.order-md-4{order:4}.order-md-5{order:5}.order-md-6{order:6}.order-md-7{order:7}.order-md-8{order:8}.order-md-9{order:9}.order-md-10{order:10}.order-md-11{order:11}.order-md-12{order:12}.offset-md-0{margin-left:0}.offset-md-1{margin-left:8.333333%}.offset-md-2{margin-left:16.666667%}.offset-md-3{margin-left:25%}.offset-md-4{margin-left:33.333333%}.offset-md-5{margin-left:41.666667%}.offset-md-6{margin-left:50%}.offset-md-7{margin-left:58.333333%}.offset-md-8{margin-left:66.666667%}.offset-md-9{margin-left:75%}.offset-md-10{margin-left:83.333333%}.offset-md-11{margin-left:91.666667%}}@media (min-width:992px){.col-lg{flex-basis:0;flex-grow:1;max-width:100%}.col-lg-auto{flex:0 0 auto;width:auto;max-width:100%}.col-lg-1{flex:0 0 8.333333%;max-width:8.333333%}.col-lg-2{flex:0 0 16.666667%;max-width:16.666667%}.col-lg-3{flex:0 0 25%;max-width:25%}.col-lg-4{flex:0 0 33.333333%;max-width:33.333333%}.col-lg-5{flex:0 0 41.666667%;max-width:41.666667%}.col-lg-6{flex:0 0 50%;max-width:50%}.col-lg-7{flex:0 0 58.333333%;max-width:58.333333%}.col-lg-8{flex:0 0 66.666667%;max-width:66.666667%}.col-lg-9{flex:0 0 75%;max-width:75%}.col-lg-10{flex:0 0 83.333333%;max-width:83.333333%}.col-lg-11{flex:0 0 91.666667%;max-width:91.666667%}.col-lg-12{flex:0 0 100%;max-width:100%}.order-lg-first{order:-1}.order-lg-last{order:13}.order-lg-0{order:0}.order-lg-1{order:1}.order-lg-2{order:2}.order-lg-3{order:3}.order-lg-4{order:4}.order-lg-5{order:5}.order-lg-6{order:6}.order-lg-7{order:7}.order-lg-8{order:8}.order-lg-9{order:9}.order-lg-10{order:10}.order-lg-11{order:11}.order-lg-12{order:12}.offset-lg-0{margin-left:0}.offset-lg-1{margin-left:8.333333%}.offset-lg-2{margin-left:16.666667%}.offset-lg-3{margin-left:25%}.offset-lg-4{margin-left:33.333333%}.offset-lg-5{margin-left:41.666667%}.offset-lg-6{margin-left:50%}.offset-lg-7{margin-left:58.333333%}.offset-lg-8{margin-left:66.666667%}.offset-lg-9{margin-left:75%}.offset-lg-10{margin-left:83.333333%}.offset-lg-11{margin-left:91.666667%}}@media (min-width:1200px){.col-xl{flex-basis:0;flex-grow:1;max-width:100%}.col-xl-auto{flex:0 0 auto;width:auto;max-width:100%}.col-xl-1{flex:0 0 8.333333%;max-width:8.333333%}.col-xl-2{flex:0 0 16.666667%;max-width:16.666667%}.col-xl-3{flex:0 0 25%;max-width:25%}.col-xl-4{flex:0 0 33.333333%;max-width:33.333333%}.col-xl-5{flex:0 0 41.666667%;max-width:41.666667%}.col-xl-6{flex:0 0 50%;max-width:50%}.col-xl-7{flex:0 0 58.333333%;max-width:58.333333%}.col-xl-8{flex:0 0 66.666667%;max-width:66.666667%}.col-xl-9{flex:0 0 75%;max-width:75%}.col-xl-10{flex:0 0 83.333333%;max-width:83.333333%}.col-xl-11{flex:0 0 91.666667%;max-width:91.666667%}.col-xl-12{flex:0 0 100%;max-width:100%}.order-xl-first{order:-1}.order-xl-last{order:13}.order-xl-0{order:0}.order-xl-1{order:1}.order-xl-2{order:2}.order-xl-3{order:3}.order-xl-4{order:4}.order-xl-5{order:5}.order-xl-6{order:6}.order-xl-7{order:7}.order-xl-8{order:8}.order-xl-9{order:9}.order-xl-10{order:10}.order-xl-11{order:11}.order-xl-12{order:12}.offset-xl-0{margin-left:0}.offset-xl-1{margin-left:8.333333%}.offset-xl-2{margin-left:16.666667%}.offset-xl-3{margin-left:25%}.offset-xl-4{margin-left:33.333333%}.offset-xl-5{margin-left:41.666667%}.offset-xl-6{margin-left:50%}.offset-xl-7{margin-left:58.333333%}.offset-xl-8{margin-left:66.666667%}.offset-xl-9{margin-left:75%}.offset-xl-10{margin-left:83.333333%}.offset-xl-11{margin-left:91.666667%}}.table{width:100%;margin-bottom:1rem;color:#212529}.table td,.table th{padding:.75rem;vertical-align:top;border-top:1px solid #dee2e6}.table thead th{vertical-align:bottom;border-bottom:2px solid #dee2e6}.table tbody+tbody{border-top:2px solid #dee2e6}.table-sm td,.table-sm th{padding:.3rem}.table-bordered,.table-bordered td,.table-bordered th{border:1px solid #dee2e6}.table-bordered thead td,.table-bordered thead th{border-bottom-width:2px}.table-borderless tbody+tbody,.table-borderless td,.table-borderless th,.table-borderless thead th{border:0}.table-striped tbody tr:nth-of-type(odd){background-color:rgba(0,0,0,.05)}.table-hover tbody tr:hover{color:#212529;background-color:rgba(0,0,0,.075)}.table-primary,.table-primary>td,.table-primary>th{background-color:#cacbce}.table-primary tbody+tbody,.table-primary td,.table-primary th,.table-primary thead th{border-color:#9c9ea5}.table-hover .table-primary:hover,.table-hover .table-primary:hover>td,.table-hover .table-primary:hover>th{background-color:#bdbec2}.table-secondary,.table-secondary>td,.table-secondary>th{background-color:#f0cbd2}.table-secondary tbody+tbody,.table-secondary td,.table-secondary th,.table-secondary thead th{border-color:#e39eab}.table-hover .table-secondary:hover,.table-hover .table-secondary:hover>td,.table-hover .table-secondary:hover>th{background-color:#eab7c1}.table-active,.table-active>td,.table-active>th,.table-hover .table-active:hover,.table-hover .table-active:hover>td,.table-hover .table-active:hover>th{background-color:rgba(0,0,0,.075)}.table .thead-dark th{color:#fff;background-color:#343a40;border-color:#454d55}.table .thead-light th{color:#495057;background-color:#e9ecef;border-color:#dee2e6}.table-dark{color:#fff;background-color:#343a40}.table-dark td,.table-dark th,.table-dark thead th{border-color:#454d55}.table-dark.table-bordered{border:0}.table-dark.table-striped tbody tr:nth-of-type(odd){background-color:hsla(0,0%,100%,.05)}.table-dark.table-hover tbody tr:hover{color:#fff;background-color:hsla(0,0%,100%,.075)}@media (max-width:575.98px){.table-responsive-sm{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch}.table-responsive-sm>.table-bordered{border:0}}@media (max-width:767.98px){.table-responsive-md{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch}.table-responsive-md>.table-bordered{border:0}}@media (max-width:991.98px){.table-responsive-lg{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch}.table-responsive-lg>.table-bordered{border:0}}@media (max-width:1199.98px){.table-responsive-xl{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch}.table-responsive-xl>.table-bordered{border:0}}.table-responsive{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch}.table-responsive>.table-bordered{border:0}.btn{display:inline-block;font-weight:400;color:#212529;text-align:center;vertical-align:middle;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background-color:transparent;border:1px solid transparent;padding:.375rem .75rem;font-size:1rem;line-height:1.5;border-radius:.25rem;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.btn{transition:none}}.btn:hover{color:#212529;text-decoration:none}.btn.focus,.btn:focus{outline:0;box-shadow:0 0 0 .2rem rgba(0,123,255,.25)}.btn.disabled,.btn:disabled{opacity:.65}a.btn.disabled,fieldset:disabled a.btn{pointer-events:none}.btn-primary{color:#fff;background-color:#414551;border-color:#414551}.btn-primary:hover{color:#fff;background-color:#30333c;border-color:#2a2d35}.btn-primary.focus,.btn-primary:focus{box-shadow:0 0 0 .2rem rgba(94,97,107,.5)}.btn-primary.disabled,.btn-primary:disabled{color:#fff;background-color:#414551;border-color:#414551}.btn-primary:not(:disabled):not(.disabled).active,.btn-primary:not(:disabled):not(.disabled):active,.show>.btn-primary.dropdown-toggle{color:#fff;background-color:#2a2d35;border-color:#25272e}.btn-primary:not(:disabled):not(.disabled).active:focus,.btn-primary:not(:disabled):not(.disabled):active:focus,.show>.btn-primary.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(94,97,107,.5)}.btn-secondary{color:#fff;background-color:#ca445e;border-color:#ca445e}.btn-secondary:hover{color:#fff;background-color:#b5334c;border-color:#ab3048}.btn-secondary.focus,.btn-secondary:focus{box-shadow:0 0 0 .2rem rgba(210,96,118,.5)}.btn-secondary.disabled,.btn-secondary:disabled{color:#fff;background-color:#ca445e;border-color:#ca445e}.btn-secondary:not(:disabled):not(.disabled).active,.btn-secondary:not(:disabled):not(.disabled):active,.show>.btn-secondary.dropdown-toggle{color:#fff;background-color:#ab3048;border-color:#a12e44}.btn-secondary:not(:disabled):not(.disabled).active:focus,.btn-secondary:not(:disabled):not(.disabled):active:focus,.show>.btn-secondary.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(210,96,118,.5)}.btn-outline-primary{color:#414551;border-color:#414551}.btn-outline-primary:hover{color:#fff;background-color:#414551;border-color:#414551}.btn-outline-primary.focus,.btn-outline-primary:focus{box-shadow:0 0 0 .2rem rgba(65,69,81,.5)}.btn-outline-primary.disabled,.btn-outline-primary:disabled{color:#414551;background-color:transparent}.btn-outline-primary:not(:disabled):not(.disabled).active,.btn-outline-primary:not(:disabled):not(.disabled):active,.show>.btn-outline-primary.dropdown-toggle{color:#fff;background-color:#414551;border-color:#414551}.btn-outline-primary:not(:disabled):not(.disabled).active:focus,.btn-outline-primary:not(:disabled):not(.disabled):active:focus,.show>.btn-outline-primary.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(65,69,81,.5)}.btn-outline-secondary{color:#ca445e;border-color:#ca445e}.btn-outline-secondary:hover{color:#fff;background-color:#ca445e;border-color:#ca445e}.btn-outline-secondary.focus,.btn-outline-secondary:focus{box-shadow:0 0 0 .2rem rgba(202,68,94,.5)}.btn-outline-secondary.disabled,.btn-outline-secondary:disabled{color:#ca445e;background-color:transparent}.btn-outline-secondary:not(:disabled):not(.disabled).active,.btn-outline-secondary:not(:disabled):not(.disabled):active,.show>.btn-outline-secondary.dropdown-toggle{color:#fff;background-color:#ca445e;border-color:#ca445e}.btn-outline-secondary:not(:disabled):not(.disabled).active:focus,.btn-outline-secondary:not(:disabled):not(.disabled):active:focus,.show>.btn-outline-secondary.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(202,68,94,.5)}.btn-link{font-weight:400;color:#007bff;text-decoration:none}.btn-link:hover{color:#0056b3;text-decoration:underline}.btn-link.focus,.btn-link:focus{text-decoration:underline;box-shadow:none}.btn-link.disabled,.btn-link:disabled{color:#6c757d;pointer-events:none}.btn-lg{padding:.5rem 1rem;font-size:1.25rem;line-height:1.5;border-radius:.3rem}.btn-sm{padding:.25rem .5rem;font-size:.875rem;line-height:1.5;border-radius:.2rem}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:.5rem}input[type=button].btn-block,input[type=reset].btn-block,input[type=submit].btn-block{width:100%}.fade{transition:opacity .15s linear}@media (prefers-reduced-motion:reduce){.fade{transition:none}}.fade:not(.show){opacity:0}.collapse:not(.show){display:none}.collapsing{position:relative;height:0;overflow:hidden;transition:height .35s ease}@media (prefers-reduced-motion:reduce){.collapsing{transition:none}}.nav{display:flex;flex-wrap:wrap;padding-left:0;margin-bottom:0;list-style:none}.nav-link{display:block;padding:.5rem 1rem}.nav-link:focus,.nav-link:hover{text-decoration:none}.nav-link.disabled{color:#6c757d;pointer-events:none;cursor:default}.nav-tabs{border-bottom:1px solid #dee2e6}.nav-tabs .nav-item{margin-bottom:-1px}.nav-tabs .nav-link{border:1px solid transparent;border-top-left-radius:.25rem;border-top-right-radius:.25rem}.nav-tabs .nav-link:focus,.nav-tabs .nav-link:hover{border-color:#e9ecef #e9ecef #dee2e6}.nav-tabs .nav-link.disabled{color:#6c757d;background-color:transparent;border-color:transparent}.nav-tabs .nav-item.show .nav-link,.nav-tabs .nav-link.active{color:#495057;background-color:#fff;border-color:#dee2e6 #dee2e6 #fff}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.nav-pills .nav-link{border-radius:.25rem}.nav-pills .nav-link.active,.nav-pills .show>.nav-link{color:#fff;background-color:#007bff}.nav-fill .nav-item{flex:1 1 auto;text-align:center}.nav-justified .nav-item{flex-basis:0;flex-grow:1;text-align:center}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.navbar{position:relative;padding:.5rem 1rem}.navbar,.navbar>.container,.navbar>.container-fluid{display:flex;flex-wrap:wrap;align-items:center;justify-content:space-between}.navbar-brand{display:inline-block;padding-top:.3125rem;padding-bottom:.3125rem;margin-right:1rem;font-size:1.25rem;line-height:inherit;white-space:nowrap}.navbar-brand:focus,.navbar-brand:hover{text-decoration:none}.navbar-nav{display:flex;flex-direction:column;padding-left:0;margin-bottom:0;list-style:none}.navbar-nav .nav-link{padding-right:0;padding-left:0}.navbar-nav .dropdown-menu{position:static;float:none}.navbar-text{display:inline-block;padding-top:.5rem;padding-bottom:.5rem}.navbar-collapse{flex-basis:100%;flex-grow:1;align-items:center}.navbar-toggler{padding:.25rem .75rem;font-size:1.25rem;line-height:1;background-color:transparent;border:1px solid transparent;border-radius:.25rem}.navbar-toggler:focus,.navbar-toggler:hover{text-decoration:none}.navbar-toggler-icon{display:inline-block;width:1.5em;height:1.5em;vertical-align:middle;content:"";background:no-repeat 50%;background-size:100% 100%}@media (max-width:575.98px){.navbar-expand-sm>.container,.navbar-expand-sm>.container-fluid{padding-right:0;padding-left:0}}@media (min-width:576px){.navbar-expand-sm{flex-flow:row nowrap;justify-content:flex-start}.navbar-expand-sm .navbar-nav{flex-direction:row}.navbar-expand-sm .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-sm .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-sm>.container,.navbar-expand-sm>.container-fluid{flex-wrap:nowrap}.navbar-expand-sm .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-sm .navbar-toggler{display:none}}@media (max-width:767.98px){.navbar-expand-md>.container,.navbar-expand-md>.container-fluid{padding-right:0;padding-left:0}}@media (min-width:768px){.navbar-expand-md{flex-flow:row nowrap;justify-content:flex-start}.navbar-expand-md .navbar-nav{flex-direction:row}.navbar-expand-md .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-md .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-md>.container,.navbar-expand-md>.container-fluid{flex-wrap:nowrap}.navbar-expand-md .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-md .navbar-toggler{display:none}}@media (max-width:991.98px){.navbar-expand-lg>.container,.navbar-expand-lg>.container-fluid{padding-right:0;padding-left:0}}@media (min-width:992px){.navbar-expand-lg{flex-flow:row nowrap;justify-content:flex-start}.navbar-expand-lg .navbar-nav{flex-direction:row}.navbar-expand-lg .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-lg .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-lg>.container,.navbar-expand-lg>.container-fluid{flex-wrap:nowrap}.navbar-expand-lg .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-lg .navbar-toggler{display:none}}@media (max-width:1199.98px){.navbar-expand-xl>.container,.navbar-expand-xl>.container-fluid{padding-right:0;padding-left:0}}@media (min-width:1200px){.navbar-expand-xl{flex-flow:row nowrap;justify-content:flex-start}.navbar-expand-xl .navbar-nav{flex-direction:row}.navbar-expand-xl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xl .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-xl>.container,.navbar-expand-xl>.container-fluid{flex-wrap:nowrap}.navbar-expand-xl .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand-xl .navbar-toggler{display:none}}.navbar-expand{flex-flow:row nowrap;justify-content:flex-start}.navbar-expand>.container,.navbar-expand>.container-fluid{padding-right:0;padding-left:0}.navbar-expand .navbar-nav{flex-direction:row}.navbar-expand .navbar-nav .dropdown-menu{position:absolute}.navbar-expand .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand>.container,.navbar-expand>.container-fluid{flex-wrap:nowrap}.navbar-expand .navbar-collapse{display:flex!important;flex-basis:auto}.navbar-expand .navbar-toggler{display:none}.navbar-light .navbar-brand,.navbar-light .navbar-brand:focus,.navbar-light .navbar-brand:hover{color:rgba(0,0,0,.9)}.navbar-light .navbar-nav .nav-link{color:rgba(0,0,0,.5)}.navbar-light .navbar-nav .nav-link:focus,.navbar-light .navbar-nav .nav-link:hover{color:rgba(0,0,0,.7)}.navbar-light .navbar-nav .nav-link.disabled{color:rgba(0,0,0,.3)}.navbar-light .navbar-nav .active>.nav-link,.navbar-light .navbar-nav .nav-link.active,.navbar-light .navbar-nav .nav-link.show,.navbar-light .navbar-nav .show>.nav-link{color:rgba(0,0,0,.9)}.navbar-light .navbar-toggler{color:rgba(0,0,0,.5);border-color:rgba(0,0,0,.1)}.navbar-light .navbar-toggler-icon{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath stroke='rgba(0, 0, 0, 0.5)' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3E%3C/svg%3E")}.navbar-light .navbar-text{color:rgba(0,0,0,.5)}.navbar-light .navbar-text a,.navbar-light .navbar-text a:focus,.navbar-light .navbar-text a:hover{color:rgba(0,0,0,.9)}.navbar-dark .navbar-brand,.navbar-dark .navbar-brand:focus,.navbar-dark .navbar-brand:hover{color:#fff}.navbar-dark .navbar-nav .nav-link{color:hsla(0,0%,100%,.5)}.navbar-dark .navbar-nav .nav-link:focus,.navbar-dark .navbar-nav .nav-link:hover{color:hsla(0,0%,100%,.75)}.navbar-dark .navbar-nav .nav-link.disabled{color:hsla(0,0%,100%,.25)}.navbar-dark .navbar-nav .active>.nav-link,.navbar-dark .navbar-nav .nav-link.active,.navbar-dark .navbar-nav .nav-link.show,.navbar-dark .navbar-nav .show>.nav-link{color:#fff}.navbar-dark .navbar-toggler{color:hsla(0,0%,100%,.5);border-color:hsla(0,0%,100%,.1)}.navbar-dark .navbar-toggler-icon{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath stroke='rgba(255, 255, 255, 0.5)' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3E%3C/svg%3E")}.navbar-dark .navbar-text{color:hsla(0,0%,100%,.5)}.navbar-dark .navbar-text a,.navbar-dark .navbar-text a:focus,.navbar-dark .navbar-text a:hover{color:#fff}.breadcrumb{display:flex;flex-wrap:wrap;padding:.75rem 1rem;margin-bottom:1rem;list-style:none;background-color:#e9ecef;border-radius:.25rem}.breadcrumb-item+.breadcrumb-item{padding-left:.5rem}.breadcrumb-item+.breadcrumb-item:before{display:inline-block;padding-right:.5rem;color:#6c757d;content:"/"}.breadcrumb-item+.breadcrumb-item:hover:before{text-decoration:underline;text-decoration:none}.breadcrumb-item.active{color:#6c757d}.badge{display:inline-block;padding:.25em .4em;font-size:75%;font-weight:700;line-height:1;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25rem;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.badge{transition:none}}a.badge:focus,a.badge:hover{text-decoration:none}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.badge-pill{padding-right:.6em;padding-left:.6em;border-radius:10rem}.badge-primary{color:#fff;background-color:#414551}a.badge-primary:focus,a.badge-primary:hover{color:#fff;background-color:#2a2d35}a.badge-primary.focus,a.badge-primary:focus{outline:0;box-shadow:0 0 0 .2rem rgba(65,69,81,.5)}.badge-secondary{color:#fff;background-color:#ca445e}a.badge-secondary:focus,a.badge-secondary:hover{color:#fff;background-color:#ab3048}a.badge-secondary.focus,a.badge-secondary:focus{outline:0;box-shadow:0 0 0 .2rem rgba(202,68,94,.5)}@-webkit-keyframes spinner-border{to{transform:rotate(1turn)}}@keyframes spinner-border{to{transform:rotate(1turn)}}.spinner-border{display:inline-block;width:2rem;height:2rem;vertical-align:text-bottom;border:.25em solid;border-right:.25em solid transparent;border-radius:50%;-webkit-animation:spinner-border .75s linear infinite;animation:spinner-border .75s linear infinite}.spinner-border-sm{width:1rem;height:1rem;border-width:.2em}@-webkit-keyframes spinner-grow{0%{transform:scale(0)}50%{opacity:1}}@keyframes spinner-grow{0%{transform:scale(0)}50%{opacity:1}}.spinner-grow{display:inline-block;width:2rem;height:2rem;vertical-align:text-bottom;background-color:currentColor;border-radius:50%;opacity:0;-webkit-animation:spinner-grow .75s linear infinite;animation:spinner-grow .75s linear infinite}.spinner-grow-sm{width:1rem;height:1rem}.align-baseline{vertical-align:baseline!important}.align-top{vertical-align:top!important}.align-middle{vertical-align:middle!important}.align-bottom{vertical-align:bottom!important}.align-text-bottom{vertical-align:text-bottom!important}.align-text-top{vertical-align:text-top!important}.bg-primary{background-color:#414551!important}a.bg-primary:focus,a.bg-primary:hover,button.bg-primary:focus,button.bg-primary:hover{background-color:#2a2d35!important}.bg-secondary{background-color:#ca445e!important}a.bg-secondary:focus,a.bg-secondary:hover,button.bg-secondary:focus,button.bg-secondary:hover{background-color:#ab3048!important}.bg-white{background-color:#fff!important}.bg-transparent{background-color:transparent!important}.border{border:1px solid #dee2e6!important}.border-top{border-top:1px solid #dee2e6!important}.border-right{border-right:1px solid #dee2e6!important}.border-bottom{border-bottom:1px solid #dee2e6!important}.border-left{border-left:1px solid #dee2e6!important}.border-0{border:0!important}.border-top-0{border-top:0!important}.border-right-0{border-right:0!important}.border-bottom-0{border-bottom:0!important}.border-left-0{border-left:0!important}.border-primary{border-color:#414551!important}.border-secondary{border-color:#ca445e!important}.border-white{border-color:#fff!important}.rounded-sm{border-radius:.2rem!important}.rounded{border-radius:.25rem!important}.rounded-top{border-top-left-radius:.25rem!important}.rounded-right,.rounded-top{border-top-right-radius:.25rem!important}.rounded-bottom,.rounded-right{border-bottom-right-radius:.25rem!important}.rounded-bottom,.rounded-left{border-bottom-left-radius:.25rem!important}.rounded-left{border-top-left-radius:.25rem!important}.rounded-lg{border-radius:.3rem!important}.rounded-circle{border-radius:50%!important}.rounded-pill{border-radius:50rem!important}.rounded-0{border-radius:0!important}.clearfix:after{display:block;clear:both;content:""}.d-none{display:none!important}.d-inline{display:inline!important}.d-inline-block{display:inline-block!important}.d-block{display:block!important}.d-table{display:table!important}.d-table-row{display:table-row!important}.d-table-cell{display:table-cell!important}.d-flex{display:flex!important}.d-inline-flex{display:inline-flex!important}@media (min-width:576px){.d-sm-none{display:none!important}.d-sm-inline{display:inline!important}.d-sm-inline-block{display:inline-block!important}.d-sm-block{display:block!important}.d-sm-table{display:table!important}.d-sm-table-row{display:table-row!important}.d-sm-table-cell{display:table-cell!important}.d-sm-flex{display:flex!important}.d-sm-inline-flex{display:inline-flex!important}}@media (min-width:768px){.d-md-none{display:none!important}.d-md-inline{display:inline!important}.d-md-inline-block{display:inline-block!important}.d-md-block{display:block!important}.d-md-table{display:table!important}.d-md-table-row{display:table-row!important}.d-md-table-cell{display:table-cell!important}.d-md-flex{display:flex!important}.d-md-inline-flex{display:inline-flex!important}}@media (min-width:992px){.d-lg-none{display:none!important}.d-lg-inline{display:inline!important}.d-lg-inline-block{display:inline-block!important}.d-lg-block{display:block!important}.d-lg-table{display:table!important}.d-lg-table-row{display:table-row!important}.d-lg-table-cell{display:table-cell!important}.d-lg-flex{display:flex!important}.d-lg-inline-flex{display:inline-flex!important}}@media (min-width:1200px){.d-xl-none{display:none!important}.d-xl-inline{display:inline!important}.d-xl-inline-block{display:inline-block!important}.d-xl-block{display:block!important}.d-xl-table{display:table!important}.d-xl-table-row{display:table-row!important}.d-xl-table-cell{display:table-cell!important}.d-xl-flex{display:flex!important}.d-xl-inline-flex{display:inline-flex!important}}@media print{.d-print-none{display:none!important}.d-print-inline{display:inline!important}.d-print-inline-block{display:inline-block!important}.d-print-block{display:block!important}.d-print-table{display:table!important}.d-print-table-row{display:table-row!important}.d-print-table-cell{display:table-cell!important}.d-print-flex{display:flex!important}.d-print-inline-flex{display:inline-flex!important}}.embed-responsive{position:relative;display:block;width:100%;padding:0;overflow:hidden}.embed-responsive:before{display:block;content:""}.embed-responsive .embed-responsive-item,.embed-responsive embed,.embed-responsive iframe,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-21by9:before{padding-top:42.857143%}.embed-responsive-16by9:before{padding-top:56.25%}.embed-responsive-4by3:before{padding-top:75%}.embed-responsive-1by1:before{padding-top:100%}.flex-row{flex-direction:row!important}.flex-column{flex-direction:column!important}.flex-row-reverse{flex-direction:row-reverse!important}.flex-column-reverse{flex-direction:column-reverse!important}.flex-wrap{flex-wrap:wrap!important}.flex-nowrap{flex-wrap:nowrap!important}.flex-wrap-reverse{flex-wrap:wrap-reverse!important}.flex-fill{flex:1 1 auto!important}.flex-grow-0{flex-grow:0!important}.flex-grow-1{flex-grow:1!important}.flex-shrink-0{flex-shrink:0!important}.flex-shrink-1{flex-shrink:1!important}.justify-content-start{justify-content:flex-start!important}.justify-content-end{justify-content:flex-end!important}.justify-content-center{justify-content:center!important}.justify-content-between{justify-content:space-between!important}.justify-content-around{justify-content:space-around!important}.align-items-start{align-items:flex-start!important}.align-items-end{align-items:flex-end!important}.align-items-center{align-items:center!important}.align-items-baseline{align-items:baseline!important}.align-items-stretch{align-items:stretch!important}.align-content-start{align-content:flex-start!important}.align-content-end{align-content:flex-end!important}.align-content-center{align-content:center!important}.align-content-between{align-content:space-between!important}.align-content-around{align-content:space-around!important}.align-content-stretch{align-content:stretch!important}.align-self-auto{align-self:auto!important}.align-self-start{align-self:flex-start!important}.align-self-end{align-self:flex-end!important}.align-self-center{align-self:center!important}.align-self-baseline{align-self:baseline!important}.align-self-stretch{align-self:stretch!important}@media (min-width:576px){.flex-sm-row{flex-direction:row!important}.flex-sm-column{flex-direction:column!important}.flex-sm-row-reverse{flex-direction:row-reverse!important}.flex-sm-column-reverse{flex-direction:column-reverse!important}.flex-sm-wrap{flex-wrap:wrap!important}.flex-sm-nowrap{flex-wrap:nowrap!important}.flex-sm-wrap-reverse{flex-wrap:wrap-reverse!important}.flex-sm-fill{flex:1 1 auto!important}.flex-sm-grow-0{flex-grow:0!important}.flex-sm-grow-1{flex-grow:1!important}.flex-sm-shrink-0{flex-shrink:0!important}.flex-sm-shrink-1{flex-shrink:1!important}.justify-content-sm-start{justify-content:flex-start!important}.justify-content-sm-end{justify-content:flex-end!important}.justify-content-sm-center{justify-content:center!important}.justify-content-sm-between{justify-content:space-between!important}.justify-content-sm-around{justify-content:space-around!important}.align-items-sm-start{align-items:flex-start!important}.align-items-sm-end{align-items:flex-end!important}.align-items-sm-center{align-items:center!important}.align-items-sm-baseline{align-items:baseline!important}.align-items-sm-stretch{align-items:stretch!important}.align-content-sm-start{align-content:flex-start!important}.align-content-sm-end{align-content:flex-end!important}.align-content-sm-center{align-content:center!important}.align-content-sm-between{align-content:space-between!important}.align-content-sm-around{align-content:space-around!important}.align-content-sm-stretch{align-content:stretch!important}.align-self-sm-auto{align-self:auto!important}.align-self-sm-start{align-self:flex-start!important}.align-self-sm-end{align-self:flex-end!important}.align-self-sm-center{align-self:center!important}.align-self-sm-baseline{align-self:baseline!important}.align-self-sm-stretch{align-self:stretch!important}}@media (min-width:768px){.flex-md-row{flex-direction:row!important}.flex-md-column{flex-direction:column!important}.flex-md-row-reverse{flex-direction:row-reverse!important}.flex-md-column-reverse{flex-direction:column-reverse!important}.flex-md-wrap{flex-wrap:wrap!important}.flex-md-nowrap{flex-wrap:nowrap!important}.flex-md-wrap-reverse{flex-wrap:wrap-reverse!important}.flex-md-fill{flex:1 1 auto!important}.flex-md-grow-0{flex-grow:0!important}.flex-md-grow-1{flex-grow:1!important}.flex-md-shrink-0{flex-shrink:0!important}.flex-md-shrink-1{flex-shrink:1!important}.justify-content-md-start{justify-content:flex-start!important}.justify-content-md-end{justify-content:flex-end!important}.justify-content-md-center{justify-content:center!important}.justify-content-md-between{justify-content:space-between!important}.justify-content-md-around{justify-content:space-around!important}.align-items-md-start{align-items:flex-start!important}.align-items-md-end{align-items:flex-end!important}.align-items-md-center{align-items:center!important}.align-items-md-baseline{align-items:baseline!important}.align-items-md-stretch{align-items:stretch!important}.align-content-md-start{align-content:flex-start!important}.align-content-md-end{align-content:flex-end!important}.align-content-md-center{align-content:center!important}.align-content-md-between{align-content:space-between!important}.align-content-md-around{align-content:space-around!important}.align-content-md-stretch{align-content:stretch!important}.align-self-md-auto{align-self:auto!important}.align-self-md-start{align-self:flex-start!important}.align-self-md-end{align-self:flex-end!important}.align-self-md-center{align-self:center!important}.align-self-md-baseline{align-self:baseline!important}.align-self-md-stretch{align-self:stretch!important}}@media (min-width:992px){.flex-lg-row{flex-direction:row!important}.flex-lg-column{flex-direction:column!important}.flex-lg-row-reverse{flex-direction:row-reverse!important}.flex-lg-column-reverse{flex-direction:column-reverse!important}.flex-lg-wrap{flex-wrap:wrap!important}.flex-lg-nowrap{flex-wrap:nowrap!important}.flex-lg-wrap-reverse{flex-wrap:wrap-reverse!important}.flex-lg-fill{flex:1 1 auto!important}.flex-lg-grow-0{flex-grow:0!important}.flex-lg-grow-1{flex-grow:1!important}.flex-lg-shrink-0{flex-shrink:0!important}.flex-lg-shrink-1{flex-shrink:1!important}.justify-content-lg-start{justify-content:flex-start!important}.justify-content-lg-end{justify-content:flex-end!important}.justify-content-lg-center{justify-content:center!important}.justify-content-lg-between{justify-content:space-between!important}.justify-content-lg-around{justify-content:space-around!important}.align-items-lg-start{align-items:flex-start!important}.align-items-lg-end{align-items:flex-end!important}.align-items-lg-center{align-items:center!important}.align-items-lg-baseline{align-items:baseline!important}.align-items-lg-stretch{align-items:stretch!important}.align-content-lg-start{align-content:flex-start!important}.align-content-lg-end{align-content:flex-end!important}.align-content-lg-center{align-content:center!important}.align-content-lg-between{align-content:space-between!important}.align-content-lg-around{align-content:space-around!important}.align-content-lg-stretch{align-content:stretch!important}.align-self-lg-auto{align-self:auto!important}.align-self-lg-start{align-self:flex-start!important}.align-self-lg-end{align-self:flex-end!important}.align-self-lg-center{align-self:center!important}.align-self-lg-baseline{align-self:baseline!important}.align-self-lg-stretch{align-self:stretch!important}}@media (min-width:1200px){.flex-xl-row{flex-direction:row!important}.flex-xl-column{flex-direction:column!important}.flex-xl-row-reverse{flex-direction:row-reverse!important}.flex-xl-column-reverse{flex-direction:column-reverse!important}.flex-xl-wrap{flex-wrap:wrap!important}.flex-xl-nowrap{flex-wrap:nowrap!important}.flex-xl-wrap-reverse{flex-wrap:wrap-reverse!important}.flex-xl-fill{flex:1 1 auto!important}.flex-xl-grow-0{flex-grow:0!important}.flex-xl-grow-1{flex-grow:1!important}.flex-xl-shrink-0{flex-shrink:0!important}.flex-xl-shrink-1{flex-shrink:1!important}.justify-content-xl-start{justify-content:flex-start!important}.justify-content-xl-end{justify-content:flex-end!important}.justify-content-xl-center{justify-content:center!important}.justify-content-xl-between{justify-content:space-between!important}.justify-content-xl-around{justify-content:space-around!important}.align-items-xl-start{align-items:flex-start!important}.align-items-xl-end{align-items:flex-end!important}.align-items-xl-center{align-items:center!important}.align-items-xl-baseline{align-items:baseline!important}.align-items-xl-stretch{align-items:stretch!important}.align-content-xl-start{align-content:flex-start!important}.align-content-xl-end{align-content:flex-end!important}.align-content-xl-center{align-content:center!important}.align-content-xl-between{align-content:space-between!important}.align-content-xl-around{align-content:space-around!important}.align-content-xl-stretch{align-content:stretch!important}.align-self-xl-auto{align-self:auto!important}.align-self-xl-start{align-self:flex-start!important}.align-self-xl-end{align-self:flex-end!important}.align-self-xl-center{align-self:center!important}.align-self-xl-baseline{align-self:baseline!important}.align-self-xl-stretch{align-self:stretch!important}}.float-left{float:left!important}.float-right{float:right!important}.float-none{float:none!important}@media (min-width:576px){.float-sm-left{float:left!important}.float-sm-right{float:right!important}.float-sm-none{float:none!important}}@media (min-width:768px){.float-md-left{float:left!important}.float-md-right{float:right!important}.float-md-none{float:none!important}}@media (min-width:992px){.float-lg-left{float:left!important}.float-lg-right{float:right!important}.float-lg-none{float:none!important}}@media (min-width:1200px){.float-xl-left{float:left!important}.float-xl-right{float:right!important}.float-xl-none{float:none!important}}.overflow-auto{overflow:auto!important}.overflow-hidden{overflow:hidden!important}.position-static{position:static!important}.position-relative{position:relative!important}.position-absolute{position:absolute!important}.position-fixed{position:fixed!important}.position-sticky{position:-webkit-sticky!important;position:sticky!important}.fixed-top{top:0}.fixed-bottom,.fixed-top{position:fixed;right:0;left:0;z-index:1030}.fixed-bottom{bottom:0}@supports ((position:-webkit-sticky) or (position:sticky)){.sticky-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}.sr-only{position:absolute;width:1px;height:1px;padding:0;overflow:hidden;clip:rect(0,0,0,0);white-space:nowrap;border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;overflow:visible;clip:auto;white-space:normal}.shadow-sm{box-shadow:0 .125rem .25rem rgba(0,0,0,.075)!important}.shadow{box-shadow:0 .5rem 1rem rgba(0,0,0,.15)!important}.shadow-lg{box-shadow:0 1rem 3rem rgba(0,0,0,.175)!important}.shadow-none{box-shadow:none!important}.w-25{width:25%!important}.w-50{width:50%!important}.w-75{width:75%!important}.w-100{width:100%!important}.w-auto{width:auto!important}.h-25{height:25%!important}.h-50{height:50%!important}.h-75{height:75%!important}.h-100{height:100%!important}.h-auto{height:auto!important}.mw-100{max-width:100%!important}.mh-100{max-height:100%!important}.min-vw-100{min-width:100vw!important}.min-vh-100{min-height:100vh!important}.vw-100{width:100vw!important}.vh-100{height:100vh!important}.stretched-link:after{position:absolute;top:0;right:0;bottom:0;left:0;z-index:1;pointer-events:auto;content:"";background-color:transparent}.m-0{margin:0!important}.mt-0,.my-0{margin-top:0!important}.mr-0,.mx-0{margin-right:0!important}.mb-0,.my-0{margin-bottom:0!important}.ml-0,.mx-0{margin-left:0!important}.m-1{margin:.25rem!important}.mt-1,.my-1{margin-top:.25rem!important}.mr-1,.mx-1{margin-right:.25rem!important}.mb-1,.my-1{margin-bottom:.25rem!important}.ml-1,.mx-1{margin-left:.25rem!important}.m-2{margin:.5rem!important}.mt-2,.my-2{margin-top:.5rem!important}.mr-2,.mx-2{margin-right:.5rem!important}.mb-2,.my-2{margin-bottom:.5rem!important}.ml-2,.mx-2{margin-left:.5rem!important}.m-3{margin:1rem!important}.mt-3,.my-3{margin-top:1rem!important}.mr-3,.mx-3{margin-right:1rem!important}.mb-3,.my-3{margin-bottom:1rem!important}.ml-3,.mx-3{margin-left:1rem!important}.m-4{margin:1.5rem!important}.mt-4,.my-4{margin-top:1.5rem!important}.mr-4,.mx-4{margin-right:1.5rem!important}.mb-4,.my-4{margin-bottom:1.5rem!important}.ml-4,.mx-4{margin-left:1.5rem!important}.m-5{margin:3rem!important}.mt-5,.my-5{margin-top:3rem!important}.mr-5,.mx-5{margin-right:3rem!important}.mb-5,.my-5{margin-bottom:3rem!important}.ml-5,.mx-5{margin-left:3rem!important}.p-0{padding:0!important}.pt-0,.py-0{padding-top:0!important}.pr-0,.px-0{padding-right:0!important}.pb-0,.py-0{padding-bottom:0!important}.pl-0,.px-0{padding-left:0!important}.p-1{padding:.25rem!important}.pt-1,.py-1{padding-top:.25rem!important}.pr-1,.px-1{padding-right:.25rem!important}.pb-1,.py-1{padding-bottom:.25rem!important}.pl-1,.px-1{padding-left:.25rem!important}.p-2{padding:.5rem!important}.pt-2,.py-2{padding-top:.5rem!important}.pr-2,.px-2{padding-right:.5rem!important}.pb-2,.py-2{padding-bottom:.5rem!important}.pl-2,.px-2{padding-left:.5rem!important}.p-3{padding:1rem!important}.pt-3,.py-3{padding-top:1rem!important}.pr-3,.px-3{padding-right:1rem!important}.pb-3,.py-3{padding-bottom:1rem!important}.pl-3,.px-3{padding-left:1rem!important}.p-4{padding:1.5rem!important}.pt-4,.py-4{padding-top:1.5rem!important}.pr-4,.px-4{padding-right:1.5rem!important}.pb-4,.py-4{padding-bottom:1.5rem!important}.pl-4,.px-4{padding-left:1.5rem!important}.p-5{padding:3rem!important}.pt-5,.py-5{padding-top:3rem!important}.pr-5,.px-5{padding-right:3rem!important}.pb-5,.py-5{padding-bottom:3rem!important}.pl-5,.px-5{padding-left:3rem!important}.m-n1{margin:-.25rem!important}.mt-n1,.my-n1{margin-top:-.25rem!important}.mr-n1,.mx-n1{margin-right:-.25rem!important}.mb-n1,.my-n1{margin-bottom:-.25rem!important}.ml-n1,.mx-n1{margin-left:-.25rem!important}.m-n2{margin:-.5rem!important}.mt-n2,.my-n2{margin-top:-.5rem!important}.mr-n2,.mx-n2{margin-right:-.5rem!important}.mb-n2,.my-n2{margin-bottom:-.5rem!important}.ml-n2,.mx-n2{margin-left:-.5rem!important}.m-n3{margin:-1rem!important}.mt-n3,.my-n3{margin-top:-1rem!important}.mr-n3,.mx-n3{margin-right:-1rem!important}.mb-n3,.my-n3{margin-bottom:-1rem!important}.ml-n3,.mx-n3{margin-left:-1rem!important}.m-n4{margin:-1.5rem!important}.mt-n4,.my-n4{margin-top:-1.5rem!important}.mr-n4,.mx-n4{margin-right:-1.5rem!important}.mb-n4,.my-n4{margin-bottom:-1.5rem!important}.ml-n4,.mx-n4{margin-left:-1.5rem!important}.m-n5{margin:-3rem!important}.mt-n5,.my-n5{margin-top:-3rem!important}.mr-n5,.mx-n5{margin-right:-3rem!important}.mb-n5,.my-n5{margin-bottom:-3rem!important}.ml-n5,.mx-n5{margin-left:-3rem!important}.m-auto{margin:auto!important}.mt-auto,.my-auto{margin-top:auto!important}.mr-auto,.mx-auto{margin-right:auto!important}.mb-auto,.my-auto{margin-bottom:auto!important}.ml-auto,.mx-auto{margin-left:auto!important}@media (min-width:576px){.m-sm-0{margin:0!important}.mt-sm-0,.my-sm-0{margin-top:0!important}.mr-sm-0,.mx-sm-0{margin-right:0!important}.mb-sm-0,.my-sm-0{margin-bottom:0!important}.ml-sm-0,.mx-sm-0{margin-left:0!important}.m-sm-1{margin:.25rem!important}.mt-sm-1,.my-sm-1{margin-top:.25rem!important}.mr-sm-1,.mx-sm-1{margin-right:.25rem!important}.mb-sm-1,.my-sm-1{margin-bottom:.25rem!important}.ml-sm-1,.mx-sm-1{margin-left:.25rem!important}.m-sm-2{margin:.5rem!important}.mt-sm-2,.my-sm-2{margin-top:.5rem!important}.mr-sm-2,.mx-sm-2{margin-right:.5rem!important}.mb-sm-2,.my-sm-2{margin-bottom:.5rem!important}.ml-sm-2,.mx-sm-2{margin-left:.5rem!important}.m-sm-3{margin:1rem!important}.mt-sm-3,.my-sm-3{margin-top:1rem!important}.mr-sm-3,.mx-sm-3{margin-right:1rem!important}.mb-sm-3,.my-sm-3{margin-bottom:1rem!important}.ml-sm-3,.mx-sm-3{margin-left:1rem!important}.m-sm-4{margin:1.5rem!important}.mt-sm-4,.my-sm-4{margin-top:1.5rem!important}.mr-sm-4,.mx-sm-4{margin-right:1.5rem!important}.mb-sm-4,.my-sm-4{margin-bottom:1.5rem!important}.ml-sm-4,.mx-sm-4{margin-left:1.5rem!important}.m-sm-5{margin:3rem!important}.mt-sm-5,.my-sm-5{margin-top:3rem!important}.mr-sm-5,.mx-sm-5{margin-right:3rem!important}.mb-sm-5,.my-sm-5{margin-bottom:3rem!important}.ml-sm-5,.mx-sm-5{margin-left:3rem!important}.p-sm-0{padding:0!important}.pt-sm-0,.py-sm-0{padding-top:0!important}.pr-sm-0,.px-sm-0{padding-right:0!important}.pb-sm-0,.py-sm-0{padding-bottom:0!important}.pl-sm-0,.px-sm-0{padding-left:0!important}.p-sm-1{padding:.25rem!important}.pt-sm-1,.py-sm-1{padding-top:.25rem!important}.pr-sm-1,.px-sm-1{padding-right:.25rem!important}.pb-sm-1,.py-sm-1{padding-bottom:.25rem!important}.pl-sm-1,.px-sm-1{padding-left:.25rem!important}.p-sm-2{padding:.5rem!important}.pt-sm-2,.py-sm-2{padding-top:.5rem!important}.pr-sm-2,.px-sm-2{padding-right:.5rem!important}.pb-sm-2,.py-sm-2{padding-bottom:.5rem!important}.pl-sm-2,.px-sm-2{padding-left:.5rem!important}.p-sm-3{padding:1rem!important}.pt-sm-3,.py-sm-3{padding-top:1rem!important}.pr-sm-3,.px-sm-3{padding-right:1rem!important}.pb-sm-3,.py-sm-3{padding-bottom:1rem!important}.pl-sm-3,.px-sm-3{padding-left:1rem!important}.p-sm-4{padding:1.5rem!important}.pt-sm-4,.py-sm-4{padding-top:1.5rem!important}.pr-sm-4,.px-sm-4{padding-right:1.5rem!important}.pb-sm-4,.py-sm-4{padding-bottom:1.5rem!important}.pl-sm-4,.px-sm-4{padding-left:1.5rem!important}.p-sm-5{padding:3rem!important}.pt-sm-5,.py-sm-5{padding-top:3rem!important}.pr-sm-5,.px-sm-5{padding-right:3rem!important}.pb-sm-5,.py-sm-5{padding-bottom:3rem!important}.pl-sm-5,.px-sm-5{padding-left:3rem!important}.m-sm-n1{margin:-.25rem!important}.mt-sm-n1,.my-sm-n1{margin-top:-.25rem!important}.mr-sm-n1,.mx-sm-n1{margin-right:-.25rem!important}.mb-sm-n1,.my-sm-n1{margin-bottom:-.25rem!important}.ml-sm-n1,.mx-sm-n1{margin-left:-.25rem!important}.m-sm-n2{margin:-.5rem!important}.mt-sm-n2,.my-sm-n2{margin-top:-.5rem!important}.mr-sm-n2,.mx-sm-n2{margin-right:-.5rem!important}.mb-sm-n2,.my-sm-n2{margin-bottom:-.5rem!important}.ml-sm-n2,.mx-sm-n2{margin-left:-.5rem!important}.m-sm-n3{margin:-1rem!important}.mt-sm-n3,.my-sm-n3{margin-top:-1rem!important}.mr-sm-n3,.mx-sm-n3{margin-right:-1rem!important}.mb-sm-n3,.my-sm-n3{margin-bottom:-1rem!important}.ml-sm-n3,.mx-sm-n3{margin-left:-1rem!important}.m-sm-n4{margin:-1.5rem!important}.mt-sm-n4,.my-sm-n4{margin-top:-1.5rem!important}.mr-sm-n4,.mx-sm-n4{margin-right:-1.5rem!important}.mb-sm-n4,.my-sm-n4{margin-bottom:-1.5rem!important}.ml-sm-n4,.mx-sm-n4{margin-left:-1.5rem!important}.m-sm-n5{margin:-3rem!important}.mt-sm-n5,.my-sm-n5{margin-top:-3rem!important}.mr-sm-n5,.mx-sm-n5{margin-right:-3rem!important}.mb-sm-n5,.my-sm-n5{margin-bottom:-3rem!important}.ml-sm-n5,.mx-sm-n5{margin-left:-3rem!important}.m-sm-auto{margin:auto!important}.mt-sm-auto,.my-sm-auto{margin-top:auto!important}.mr-sm-auto,.mx-sm-auto{margin-right:auto!important}.mb-sm-auto,.my-sm-auto{margin-bottom:auto!important}.ml-sm-auto,.mx-sm-auto{margin-left:auto!important}}@media (min-width:768px){.m-md-0{margin:0!important}.mt-md-0,.my-md-0{margin-top:0!important}.mr-md-0,.mx-md-0{margin-right:0!important}.mb-md-0,.my-md-0{margin-bottom:0!important}.ml-md-0,.mx-md-0{margin-left:0!important}.m-md-1{margin:.25rem!important}.mt-md-1,.my-md-1{margin-top:.25rem!important}.mr-md-1,.mx-md-1{margin-right:.25rem!important}.mb-md-1,.my-md-1{margin-bottom:.25rem!important}.ml-md-1,.mx-md-1{margin-left:.25rem!important}.m-md-2{margin:.5rem!important}.mt-md-2,.my-md-2{margin-top:.5rem!important}.mr-md-2,.mx-md-2{margin-right:.5rem!important}.mb-md-2,.my-md-2{margin-bottom:.5rem!important}.ml-md-2,.mx-md-2{margin-left:.5rem!important}.m-md-3{margin:1rem!important}.mt-md-3,.my-md-3{margin-top:1rem!important}.mr-md-3,.mx-md-3{margin-right:1rem!important}.mb-md-3,.my-md-3{margin-bottom:1rem!important}.ml-md-3,.mx-md-3{margin-left:1rem!important}.m-md-4{margin:1.5rem!important}.mt-md-4,.my-md-4{margin-top:1.5rem!important}.mr-md-4,.mx-md-4{margin-right:1.5rem!important}.mb-md-4,.my-md-4{margin-bottom:1.5rem!important}.ml-md-4,.mx-md-4{margin-left:1.5rem!important}.m-md-5{margin:3rem!important}.mt-md-5,.my-md-5{margin-top:3rem!important}.mr-md-5,.mx-md-5{margin-right:3rem!important}.mb-md-5,.my-md-5{margin-bottom:3rem!important}.ml-md-5,.mx-md-5{margin-left:3rem!important}.p-md-0{padding:0!important}.pt-md-0,.py-md-0{padding-top:0!important}.pr-md-0,.px-md-0{padding-right:0!important}.pb-md-0,.py-md-0{padding-bottom:0!important}.pl-md-0,.px-md-0{padding-left:0!important}.p-md-1{padding:.25rem!important}.pt-md-1,.py-md-1{padding-top:.25rem!important}.pr-md-1,.px-md-1{padding-right:.25rem!important}.pb-md-1,.py-md-1{padding-bottom:.25rem!important}.pl-md-1,.px-md-1{padding-left:.25rem!important}.p-md-2{padding:.5rem!important}.pt-md-2,.py-md-2{padding-top:.5rem!important}.pr-md-2,.px-md-2{padding-right:.5rem!important}.pb-md-2,.py-md-2{padding-bottom:.5rem!important}.pl-md-2,.px-md-2{padding-left:.5rem!important}.p-md-3{padding:1rem!important}.pt-md-3,.py-md-3{padding-top:1rem!important}.pr-md-3,.px-md-3{padding-right:1rem!important}.pb-md-3,.py-md-3{padding-bottom:1rem!important}.pl-md-3,.px-md-3{padding-left:1rem!important}.p-md-4{padding:1.5rem!important}.pt-md-4,.py-md-4{padding-top:1.5rem!important}.pr-md-4,.px-md-4{padding-right:1.5rem!important}.pb-md-4,.py-md-4{padding-bottom:1.5rem!important}.pl-md-4,.px-md-4{padding-left:1.5rem!important}.p-md-5{padding:3rem!important}.pt-md-5,.py-md-5{padding-top:3rem!important}.pr-md-5,.px-md-5{padding-right:3rem!important}.pb-md-5,.py-md-5{padding-bottom:3rem!important}.pl-md-5,.px-md-5{padding-left:3rem!important}.m-md-n1{margin:-.25rem!important}.mt-md-n1,.my-md-n1{margin-top:-.25rem!important}.mr-md-n1,.mx-md-n1{margin-right:-.25rem!important}.mb-md-n1,.my-md-n1{margin-bottom:-.25rem!important}.ml-md-n1,.mx-md-n1{margin-left:-.25rem!important}.m-md-n2{margin:-.5rem!important}.mt-md-n2,.my-md-n2{margin-top:-.5rem!important}.mr-md-n2,.mx-md-n2{margin-right:-.5rem!important}.mb-md-n2,.my-md-n2{margin-bottom:-.5rem!important}.ml-md-n2,.mx-md-n2{margin-left:-.5rem!important}.m-md-n3{margin:-1rem!important}.mt-md-n3,.my-md-n3{margin-top:-1rem!important}.mr-md-n3,.mx-md-n3{margin-right:-1rem!important}.mb-md-n3,.my-md-n3{margin-bottom:-1rem!important}.ml-md-n3,.mx-md-n3{margin-left:-1rem!important}.m-md-n4{margin:-1.5rem!important}.mt-md-n4,.my-md-n4{margin-top:-1.5rem!important}.mr-md-n4,.mx-md-n4{margin-right:-1.5rem!important}.mb-md-n4,.my-md-n4{margin-bottom:-1.5rem!important}.ml-md-n4,.mx-md-n4{margin-left:-1.5rem!important}.m-md-n5{margin:-3rem!important}.mt-md-n5,.my-md-n5{margin-top:-3rem!important}.mr-md-n5,.mx-md-n5{margin-right:-3rem!important}.mb-md-n5,.my-md-n5{margin-bottom:-3rem!important}.ml-md-n5,.mx-md-n5{margin-left:-3rem!important}.m-md-auto{margin:auto!important}.mt-md-auto,.my-md-auto{margin-top:auto!important}.mr-md-auto,.mx-md-auto{margin-right:auto!important}.mb-md-auto,.my-md-auto{margin-bottom:auto!important}.ml-md-auto,.mx-md-auto{margin-left:auto!important}}@media (min-width:992px){.m-lg-0{margin:0!important}.mt-lg-0,.my-lg-0{margin-top:0!important}.mr-lg-0,.mx-lg-0{margin-right:0!important}.mb-lg-0,.my-lg-0{margin-bottom:0!important}.ml-lg-0,.mx-lg-0{margin-left:0!important}.m-lg-1{margin:.25rem!important}.mt-lg-1,.my-lg-1{margin-top:.25rem!important}.mr-lg-1,.mx-lg-1{margin-right:.25rem!important}.mb-lg-1,.my-lg-1{margin-bottom:.25rem!important}.ml-lg-1,.mx-lg-1{margin-left:.25rem!important}.m-lg-2{margin:.5rem!important}.mt-lg-2,.my-lg-2{margin-top:.5rem!important}.mr-lg-2,.mx-lg-2{margin-right:.5rem!important}.mb-lg-2,.my-lg-2{margin-bottom:.5rem!important}.ml-lg-2,.mx-lg-2{margin-left:.5rem!important}.m-lg-3{margin:1rem!important}.mt-lg-3,.my-lg-3{margin-top:1rem!important}.mr-lg-3,.mx-lg-3{margin-right:1rem!important}.mb-lg-3,.my-lg-3{margin-bottom:1rem!important}.ml-lg-3,.mx-lg-3{margin-left:1rem!important}.m-lg-4{margin:1.5rem!important}.mt-lg-4,.my-lg-4{margin-top:1.5rem!important}.mr-lg-4,.mx-lg-4{margin-right:1.5rem!important}.mb-lg-4,.my-lg-4{margin-bottom:1.5rem!important}.ml-lg-4,.mx-lg-4{margin-left:1.5rem!important}.m-lg-5{margin:3rem!important}.mt-lg-5,.my-lg-5{margin-top:3rem!important}.mr-lg-5,.mx-lg-5{margin-right:3rem!important}.mb-lg-5,.my-lg-5{margin-bottom:3rem!important}.ml-lg-5,.mx-lg-5{margin-left:3rem!important}.p-lg-0{padding:0!important}.pt-lg-0,.py-lg-0{padding-top:0!important}.pr-lg-0,.px-lg-0{padding-right:0!important}.pb-lg-0,.py-lg-0{padding-bottom:0!important}.pl-lg-0,.px-lg-0{padding-left:0!important}.p-lg-1{padding:.25rem!important}.pt-lg-1,.py-lg-1{padding-top:.25rem!important}.pr-lg-1,.px-lg-1{padding-right:.25rem!important}.pb-lg-1,.py-lg-1{padding-bottom:.25rem!important}.pl-lg-1,.px-lg-1{padding-left:.25rem!important}.p-lg-2{padding:.5rem!important}.pt-lg-2,.py-lg-2{padding-top:.5rem!important}.pr-lg-2,.px-lg-2{padding-right:.5rem!important}.pb-lg-2,.py-lg-2{padding-bottom:.5rem!important}.pl-lg-2,.px-lg-2{padding-left:.5rem!important}.p-lg-3{padding:1rem!important}.pt-lg-3,.py-lg-3{padding-top:1rem!important}.pr-lg-3,.px-lg-3{padding-right:1rem!important}.pb-lg-3,.py-lg-3{padding-bottom:1rem!important}.pl-lg-3,.px-lg-3{padding-left:1rem!important}.p-lg-4{padding:1.5rem!important}.pt-lg-4,.py-lg-4{padding-top:1.5rem!important}.pr-lg-4,.px-lg-4{padding-right:1.5rem!important}.pb-lg-4,.py-lg-4{padding-bottom:1.5rem!important}.pl-lg-4,.px-lg-4{padding-left:1.5rem!important}.p-lg-5{padding:3rem!important}.pt-lg-5,.py-lg-5{padding-top:3rem!important}.pr-lg-5,.px-lg-5{padding-right:3rem!important}.pb-lg-5,.py-lg-5{padding-bottom:3rem!important}.pl-lg-5,.px-lg-5{padding-left:3rem!important}.m-lg-n1{margin:-.25rem!important}.mt-lg-n1,.my-lg-n1{margin-top:-.25rem!important}.mr-lg-n1,.mx-lg-n1{margin-right:-.25rem!important}.mb-lg-n1,.my-lg-n1{margin-bottom:-.25rem!important}.ml-lg-n1,.mx-lg-n1{margin-left:-.25rem!important}.m-lg-n2{margin:-.5rem!important}.mt-lg-n2,.my-lg-n2{margin-top:-.5rem!important}.mr-lg-n2,.mx-lg-n2{margin-right:-.5rem!important}.mb-lg-n2,.my-lg-n2{margin-bottom:-.5rem!important}.ml-lg-n2,.mx-lg-n2{margin-left:-.5rem!important}.m-lg-n3{margin:-1rem!important}.mt-lg-n3,.my-lg-n3{margin-top:-1rem!important}.mr-lg-n3,.mx-lg-n3{margin-right:-1rem!important}.mb-lg-n3,.my-lg-n3{margin-bottom:-1rem!important}.ml-lg-n3,.mx-lg-n3{margin-left:-1rem!important}.m-lg-n4{margin:-1.5rem!important}.mt-lg-n4,.my-lg-n4{margin-top:-1.5rem!important}.mr-lg-n4,.mx-lg-n4{margin-right:-1.5rem!important}.mb-lg-n4,.my-lg-n4{margin-bottom:-1.5rem!important}.ml-lg-n4,.mx-lg-n4{margin-left:-1.5rem!important}.m-lg-n5{margin:-3rem!important}.mt-lg-n5,.my-lg-n5{margin-top:-3rem!important}.mr-lg-n5,.mx-lg-n5{margin-right:-3rem!important}.mb-lg-n5,.my-lg-n5{margin-bottom:-3rem!important}.ml-lg-n5,.mx-lg-n5{margin-left:-3rem!important}.m-lg-auto{margin:auto!important}.mt-lg-auto,.my-lg-auto{margin-top:auto!important}.mr-lg-auto,.mx-lg-auto{margin-right:auto!important}.mb-lg-auto,.my-lg-auto{margin-bottom:auto!important}.ml-lg-auto,.mx-lg-auto{margin-left:auto!important}}@media (min-width:1200px){.m-xl-0{margin:0!important}.mt-xl-0,.my-xl-0{margin-top:0!important}.mr-xl-0,.mx-xl-0{margin-right:0!important}.mb-xl-0,.my-xl-0{margin-bottom:0!important}.ml-xl-0,.mx-xl-0{margin-left:0!important}.m-xl-1{margin:.25rem!important}.mt-xl-1,.my-xl-1{margin-top:.25rem!important}.mr-xl-1,.mx-xl-1{margin-right:.25rem!important}.mb-xl-1,.my-xl-1{margin-bottom:.25rem!important}.ml-xl-1,.mx-xl-1{margin-left:.25rem!important}.m-xl-2{margin:.5rem!important}.mt-xl-2,.my-xl-2{margin-top:.5rem!important}.mr-xl-2,.mx-xl-2{margin-right:.5rem!important}.mb-xl-2,.my-xl-2{margin-bottom:.5rem!important}.ml-xl-2,.mx-xl-2{margin-left:.5rem!important}.m-xl-3{margin:1rem!important}.mt-xl-3,.my-xl-3{margin-top:1rem!important}.mr-xl-3,.mx-xl-3{margin-right:1rem!important}.mb-xl-3,.my-xl-3{margin-bottom:1rem!important}.ml-xl-3,.mx-xl-3{margin-left:1rem!important}.m-xl-4{margin:1.5rem!important}.mt-xl-4,.my-xl-4{margin-top:1.5rem!important}.mr-xl-4,.mx-xl-4{margin-right:1.5rem!important}.mb-xl-4,.my-xl-4{margin-bottom:1.5rem!important}.ml-xl-4,.mx-xl-4{margin-left:1.5rem!important}.m-xl-5{margin:3rem!important}.mt-xl-5,.my-xl-5{margin-top:3rem!important}.mr-xl-5,.mx-xl-5{margin-right:3rem!important}.mb-xl-5,.my-xl-5{margin-bottom:3rem!important}.ml-xl-5,.mx-xl-5{margin-left:3rem!important}.p-xl-0{padding:0!important}.pt-xl-0,.py-xl-0{padding-top:0!important}.pr-xl-0,.px-xl-0{padding-right:0!important}.pb-xl-0,.py-xl-0{padding-bottom:0!important}.pl-xl-0,.px-xl-0{padding-left:0!important}.p-xl-1{padding:.25rem!important}.pt-xl-1,.py-xl-1{padding-top:.25rem!important}.pr-xl-1,.px-xl-1{padding-right:.25rem!important}.pb-xl-1,.py-xl-1{padding-bottom:.25rem!important}.pl-xl-1,.px-xl-1{padding-left:.25rem!important}.p-xl-2{padding:.5rem!important}.pt-xl-2,.py-xl-2{padding-top:.5rem!important}.pr-xl-2,.px-xl-2{padding-right:.5rem!important}.pb-xl-2,.py-xl-2{padding-bottom:.5rem!important}.pl-xl-2,.px-xl-2{padding-left:.5rem!important}.p-xl-3{padding:1rem!important}.pt-xl-3,.py-xl-3{padding-top:1rem!important}.pr-xl-3,.px-xl-3{padding-right:1rem!important}.pb-xl-3,.py-xl-3{padding-bottom:1rem!important}.pl-xl-3,.px-xl-3{padding-left:1rem!important}.p-xl-4{padding:1.5rem!important}.pt-xl-4,.py-xl-4{padding-top:1.5rem!important}.pr-xl-4,.px-xl-4{padding-right:1.5rem!important}.pb-xl-4,.py-xl-4{padding-bottom:1.5rem!important}.pl-xl-4,.px-xl-4{padding-left:1.5rem!important}.p-xl-5{padding:3rem!important}.pt-xl-5,.py-xl-5{padding-top:3rem!important}.pr-xl-5,.px-xl-5{padding-right:3rem!important}.pb-xl-5,.py-xl-5{padding-bottom:3rem!important}.pl-xl-5,.px-xl-5{padding-left:3rem!important}.m-xl-n1{margin:-.25rem!important}.mt-xl-n1,.my-xl-n1{margin-top:-.25rem!important}.mr-xl-n1,.mx-xl-n1{margin-right:-.25rem!important}.mb-xl-n1,.my-xl-n1{margin-bottom:-.25rem!important}.ml-xl-n1,.mx-xl-n1{margin-left:-.25rem!important}.m-xl-n2{margin:-.5rem!important}.mt-xl-n2,.my-xl-n2{margin-top:-.5rem!important}.mr-xl-n2,.mx-xl-n2{margin-right:-.5rem!important}.mb-xl-n2,.my-xl-n2{margin-bottom:-.5rem!important}.ml-xl-n2,.mx-xl-n2{margin-left:-.5rem!important}.m-xl-n3{margin:-1rem!important}.mt-xl-n3,.my-xl-n3{margin-top:-1rem!important}.mr-xl-n3,.mx-xl-n3{margin-right:-1rem!important}.mb-xl-n3,.my-xl-n3{margin-bottom:-1rem!important}.ml-xl-n3,.mx-xl-n3{margin-left:-1rem!important}.m-xl-n4{margin:-1.5rem!important}.mt-xl-n4,.my-xl-n4{margin-top:-1.5rem!important}.mr-xl-n4,.mx-xl-n4{margin-right:-1.5rem!important}.mb-xl-n4,.my-xl-n4{margin-bottom:-1.5rem!important}.ml-xl-n4,.mx-xl-n4{margin-left:-1.5rem!important}.m-xl-n5{margin:-3rem!important}.mt-xl-n5,.my-xl-n5{margin-top:-3rem!important}.mr-xl-n5,.mx-xl-n5{margin-right:-3rem!important}.mb-xl-n5,.my-xl-n5{margin-bottom:-3rem!important}.ml-xl-n5,.mx-xl-n5{margin-left:-3rem!important}.m-xl-auto{margin:auto!important}.mt-xl-auto,.my-xl-auto{margin-top:auto!important}.mr-xl-auto,.mx-xl-auto{margin-right:auto!important}.mb-xl-auto,.my-xl-auto{margin-bottom:auto!important}.ml-xl-auto,.mx-xl-auto{margin-left:auto!important}}.text-monospace{font-family:Fira Code,Andale Mono,monospace!important}.text-justify{text-align:justify!important}.text-wrap{white-space:normal!important}.text-nowrap{white-space:nowrap!important}.text-truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.text-left{text-align:left!important}.text-right{text-align:right!important}.text-center{text-align:center!important}@media (min-width:576px){.text-sm-left{text-align:left!important}.text-sm-right{text-align:right!important}.text-sm-center{text-align:center!important}}@media (min-width:768px){.text-md-left{text-align:left!important}.text-md-right{text-align:right!important}.text-md-center{text-align:center!important}}@media (min-width:992px){.text-lg-left{text-align:left!important}.text-lg-right{text-align:right!important}.text-lg-center{text-align:center!important}}@media (min-width:1200px){.text-xl-left{text-align:left!important}.text-xl-right{text-align:right!important}.text-xl-center{text-align:center!important}}.text-lowercase{text-transform:lowercase!important}.text-uppercase{text-transform:uppercase!important}.text-capitalize{text-transform:capitalize!important}.font-weight-light{font-weight:300!important}.font-weight-lighter{font-weight:lighter!important}.font-weight-normal{font-weight:400!important}.font-weight-bold{font-weight:700!important}.font-weight-bolder{font-weight:bolder!important}.font-italic{font-style:italic!important}.text-white{color:#fff!important}.text-primary{color:#414551!important}a.text-primary:focus,a.text-primary:hover{color:#1f2127!important}.text-secondary{color:#ca445e!important}a.text-secondary:focus,a.text-secondary:hover{color:#972b40!important}.text-body{color:#212529!important}.text-muted{color:#6c757d!important}.text-black-50{color:rgba(0,0,0,.5)!important}.text-white-50{color:hsla(0,0%,100%,.5)!important}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.text-decoration-none{text-decoration:none!important}.text-break{word-break:break-word!important;overflow-wrap:break-word!important}.text-reset{color:inherit!important}.visible{visibility:visible!important}.invisible{visibility:hidden!important}:root{--font-family-toolbar-main:"Lato",sans-serif;--font-weight-toolbar-main:400;--font-family-toolbar-details:"Lato",sans-serif;--font-weight-toolbar-details:400;--font-family-sidebar:"Lato",sans-serif;--font-size-sidebar-category:18px;--font-weight-sidebar-category:400;--font-size-sidebar-page:1rem;--font-weight-sidebar-page:400} \ No newline at end of file diff --git a/scala3doc/dotty-docs/docs/css/color-brewer.css b/scala3doc/dotty-docs/docs/css/color-brewer.css new file mode 100644 index 000000000000..9c3972d0219d --- /dev/null +++ b/scala3doc/dotty-docs/docs/css/color-brewer.css @@ -0,0 +1,66 @@ +/* + +Colorbrewer theme +Original: https://github.com/mbostock/colorbrewer-theme (c) Mike Bostock +Ported by Fabrício Tavares de Oliveira + +*/ + +.hljs { + background: transparent; +} + +.hljs, +.hljs-subst { + color: #000; +} + +.hljs-string, +.hljs-meta, +.hljs-symbol, +.hljs-template-tag, +.hljs-template-variable, +.hljs-addition { + color: #756bb1; +} + +.hljs-comment, +.hljs-quote { + color: #636363; +} + +.hljs-number, +.hljs-regexp, +.hljs-literal, +.hljs-bullet, +.hljs-link { + color: #31a354; +} + +.hljs-deletion, +.hljs-variable { + color: #88f; +} + +.hljs-keyword, +.hljs-selector-tag, +.hljs-title, +.hljs-section, +.hljs-built_in, +.hljs-doctag, +.hljs-type, +.hljs-tag, +.hljs-name, +.hljs-selector-id, +.hljs-selector-class, +.hljs-strong { + color: #3182bd; +} + +.hljs-emphasis { + font-style: italic; +} + +.hljs-attribute { + color: #e6550d; +} diff --git a/scala3doc/dotty-docs/docs/css/dottydoc.css b/scala3doc/dotty-docs/docs/css/dottydoc.css new file mode 100644 index 000000000000..f20833bf98db --- /dev/null +++ b/scala3doc/dotty-docs/docs/css/dottydoc.css @@ -0,0 +1,273 @@ +html, body { + font-weight: 300; + height: 100%; +} + +main.container { + min-height: 100vh; + padding: 15px 15px; + padding-bottom: 45px; /* prevents the content to be hidden by the gitter sidecar */ +} + +.container img { + width: 100%; + height: auto; +} + +/* headers */ +main header { + border-bottom: 1px solid rgba(0,0,0,.1); + margin-bottom: 16px; + padding-bottom: 16px; +} + +main > h1 { + margin-bottom: 20px; +} + +.byline, .byline a { + color: grey; +} +.byline .author { + display: block; +} + +/* indexes */ +ul.post-list { + list-style: none; + padding-left: 0; +} +.post-list h2 { + margin-bottom: 0; +} + +/* headings anchors */ +a.anchor { + color: white; + margin-left: -23px; + padding-right: 3px; + transition: color .4s ease-out; +} + +a.anchor::before { + content: "\f0c1"; + font-family: "Font Awesome 5 Free"; + font-weight: 900; + font-size: 20px; +} + +h1:hover a.anchor, +h2:hover a.anchor, +h3:hover a.anchor, +h4:hover a.anchor, +h5:hover a.anchor { + color: lightgrey; + text-decoration: none; +} + +h1:hover a.anchor:hover, +h2:hover a.anchor:hover, +h3:hover a.anchor:hover, +h4:hover a.anchor:hover, +h5:hover a.anchor:hover { + color: var(--secondary); +} + + +/* footer */ +footer { + color: grey; +} +footer img#author-img { + width: auto; + height: auto; + max-width:100px; + max-height:100px; + border-radius: 50%; +} + +/* api docs */ +.api span.letter-anchor { + float: left; + width: 50px; + height: 50px; + border-radius: 50px; + color: white; + margin-top: 6px; + margin-right: 8px; + line-height: 50px; + text-align: center; + text-decoration: none; + font-size: 43px; + font-family: var(--font-family-sans-serif); +} +.api span.letter-anchor.object { + line-height: 48px; +} +.api span.letter-anchor.class { + line-height: 48px; + padding-right: 3px; +} +.letter-anchor.object { + background: #2c6c8d; +} +.letter-anchor.class { + background: #44ad7d; +} +.letter-anchor.trait { + background: #19aacf; +} +.letter-anchor.enum { + background: #7803fc; +} +.letter-anchor.package { + background: #2c6c8d; +} + +.api header { + font-family: var(--font-family-sans-serif); +} +.api header .name-prefix { + display: block; +} +.api header .name-suffix { + display: inline-block; +} + +.api header h1 { + margin: -13px 8px 0 0; + display: inline-block; +} +.api h2 { + margin-top: 1rem; +} +.api h3 { + display: inline; + margin: 0; + font: inherit; + font-weight: bold; +} + +/* improved display and wrapping of parameters */ +.api .params, .api .type-params { + display: inline-flex; + flex-flow: wrap; +} + +/* api layout */ +.wide-table { + display: table; + width: 100%; +} +.api .member:hover { + background: var(--doc-bg); + cursor: pointer; +} +.api .left-column { + white-space: nowrap; + padding-left: 1em; + border-left: 3px solid transparent;/* table rows cannot have borders*/ + font-family: var(--font-family-monospace); + text-align: right; + width: 1px; +} +.api .member:hover .left-column { + border-left: 3px solid var(--secondary); +} +.api .right-column { + display: inline; + text-align: right; + font-family: var(--font-family-monospace); +} + +/* code */ +pre, code { + font-variant-ligatures: none; +} +pre { + padding: 0; + font-size: 13px; + background: var(--pre-bg); + border-radius: 2px; + border: 1px solid rgba(0, 0, 0, 0.1); +} + +pre > code { + display: block; + padding: 0.5rem; + overflow-x: auto; + background: transparent; +} + +/* admonitions */ +blockquote { + padding: 0 1em; + color: #777; + border-left: 0.25em solid #ddd; +} + +aside { + padding: 15px; + margin: 10px 0; +} + +aside.warning { + border-left: 3px solid #d62c2c; + background-color: #ffe4e4; +} + +aside.notice { + border-left: 3px solid #4c97e4; + background-color: #e4ebff; +} + +aside.success { + border-left: 3px solid #36bf1d; + background-color: #ebfddd; +} + +/* gitter chat */ +.gitter-open-chat-button { + background-color: grey; +} +.gitter-open-chat-button:focus, .gitter-open-chat-button:hover { + background-color: var(--primary); +} +.gitter-open-chat-button:focus { + box-shadow: 0 0 8px var(--primary); +} +.gitter-chat-embed { + top: 40px; /* 50px (navbar) - 10px (aside's margin) */ + bottom: -10px; +} + +/* media queries for bigger screens (dottydoc is mobile-first) */ +@media (min-width: 576px) { + .byline .author { + display: inline; + margin-left: 1em; + } + main.container { + padding: 15px 30px; + } +} +@media (min-width: 768px) { + .api .member { + display: table-row; + } + .api .left-column { + display: table-cell; + } + .api .right-column { + display: flex; + flex-flow: wrap; + } + main.container { + padding: 15px 45px; + } +} + +header { + position: static !important; + width: 100% !important; +} \ No newline at end of file diff --git a/scala3doc/dotty-docs/docs/css/frontpage.css b/scala3doc/dotty-docs/docs/css/frontpage.css new file mode 100644 index 000000000000..ae9107f9a5c0 --- /dev/null +++ b/scala3doc/dotty-docs/docs/css/frontpage.css @@ -0,0 +1,169 @@ + @import url('https://fonts.googleapis.com/css?family=Lobster&display=swap'); + +body { + height: 100%; + width: 100%; + margin: 0; + padding: 0; + font-family: var(--font-family-sans-serif); + font-weight: 300; + scroll-behavior: smooth; +} + +h1, h2, h3 { + font-family: "Lobster", serif; + font-weight: 500; + text-align: center; + padding-top: 20px; +} + +h1 { + font-size: 64px; +} + +h1#main { + font-size: 15vh; +} + +/* navigation */ +header { + font-size: 24px; +} + +header .nav-item i { + font-size: 1.5em; +} + +.navbar-dark .navbar-nav .nav-link, .navbar-dark .navbar-toggler { + color: white; +} + +.navbar-dark .navbar-toggler { + border: none; + padding: 0; + font-size: 1.5em; +} + +.navbar #navbarContent { + /* above the logo when opened on small screens */ + background: #ca445e; + z-index: 100; + box-shadow: 0px 10px 40px #ca445e; +} + +.nav-item { + margin-left: .5em; +} + +/* layout */ +section.page { + min-height: 100vh; + width: 100%; + padding: 0; +} + +section .container { + max-width: 750px; +} + +section p { + margin-bottom: 0px; + padding-bottom: 15px; + line-height: 3vh; +} + +section p ~ pre { + margin-top: -8px; + margin-bottom: 25px; +} + +.centered-table table { + width: 100%; + margin-left: auto; + margin-right: auto; +} + +/* background colors */ +.bg-red { + background-color: #ca445e; +} +.bg-blue { + background-color: #002B36; +} +.bg-teal { + background-color: #224951; +} + +/* text colors */ +.bg-dark { + color: white; +} +.bg-dark a { + color: #f26527; +} +.bg-dark a:hover { + color: #da4323; +} + +/* code */ +pre, code { + background-color: #efefef; + border-radius: 4px; + padding: 3px 8px; + line-height: 1.75em; +} + +/* gitter chat */ +.gitter-chat-embed { + top: 0; + bottom: 0; +} + +aside { + margin: 0; + padding: 0; +} + +/* logo animation */ +@keyframes fadeInLogo { + from { + transform: translateY(0) scale(0.8); + opacity: 0; + } + + to { + transform: translateY(0) scale(1); + opacity: 1; + } +} + +.scala-logo-container { + height: calc(100vh - 80px); + margin: auto; + position: absolute; + top: 80px; + right: 0; + bottom: 0; + left: 0; + animation: 2s fadeInLogo; + animation-timing-function: cubic-bezier(0.6, 0.2, 0.1, 1) 0.1s; +} + +.scala-logo-container img { + height: 60%; + display: block; + width: 100%; +} + +.scala-logo-container .subtitle { + bottom: 0; + position: absolute; + width: 100%; + font-size: 1.5em; + line-height: 3vh; +} + +.scala-logo-container .subtitle h1 { + font-size: 100px; + font-size: 15vh; +} diff --git a/scala3doc/dotty-docs/docs/css/search.css b/scala3doc/dotty-docs/docs/css/search.css new file mode 100644 index 000000000000..cb1c868ab875 --- /dev/null +++ b/scala3doc/dotty-docs/docs/css/search.css @@ -0,0 +1,36 @@ +ul { + list-style: none; + padding: 0; +} + +ul li { + margin-bottom: 5px; + font-family: var(--font-family-sans-serif); +} + +h3 { + padding-top: .5rem; +} +h4 { + display: inline-block; + font-size: 1.25rem; +} + +.package-name a { + color: inherit; +} + +.entity-kinds { + display: inline; +} + +.member-result { + padding-left: 1em; + font-family: var(--font-family-monospace); +} + +@media(min-width: 768px) { + .tab-content > .tab-pane { + display: block; + } +} diff --git a/scala3doc/dotty-docs/docs/css/sidebar.css b/scala3doc/dotty-docs/docs/css/sidebar.css new file mode 100644 index 000000000000..5f96c0889deb --- /dev/null +++ b/scala3doc/dotty-docs/docs/css/sidebar.css @@ -0,0 +1,149 @@ +.sidebar { + position: fixed; + top: 50px; + left: 0; + z-index: 1000; + width: 275px; + height: 100%; + overflow-x: hidden; /* Safari and some others don't support overflow: x y */ + overflow-y: auto; + -webkit-overflow-scrolling: touch; /* nicer scrolling on touch screens */ + font-family: var(--font-family-sidebar); + background: var(--sidebar); + margin-left: -275px; /* invisible by default, shown by toggle */ + transition: margin .25s ease-out; +} + +.sidebar.toggled { + margin-left: 0; + box-shadow: -2px 0 8px var(--primary); +} + +.sidebar ul.toc { + padding-bottom: 60px; /* avoids unreachable elements at the end of toc */ + padding-left: 1em; + padding-top: 1em; + margin-bottom: 0; +} + +.sidebar ul { + list-style-type: none; + padding-left: 0; +} + +.sidebar li { + margin-top: .5em; +} + +.sidebar li.section ul { + padding-left: 1em; + display: none; +} +.sidebar li.section ul.toggled { + display: block; +} + +.sidebar li.section.index-entities ul { + padding-left: 0; +} + +.sidebar .index-entity.entity-package { + margin-left: 5px; + margin-top: .25em; +} + +.sidebar .toc > li:not(.index-entities) > ul { + border-left: 2px solid var(--sidebar-active); + padding-left: 1em; + margin-left: 5px; +} + +.sidebar li a { + font-size: var(--font-size-sidebar-page); + font-weight: var(--font-weight-sidebar-page); +} + +.sidebar > ul > li.leaf > a, .sidebar li.section > a { + font-size: var(--font-size-sidebar-category); + font-weight: var(--font-weight-sidebar-category); + text-transform: capitalize; + cursor: pointer; + color: var(--sidebar-category); +} + +.sidebar .entity-package > .entity-name { + text-transform: none; +} + +.sidebar a { + width: 100%; + color: var(--sidebar-page); + transition: color .2s ease-out; +} +.sidebar a:hover, .sidebar a.toggled { + color: var(--sidebar-active) !important; +} + +/* API Documentation */ +.package-toggle i.fas { + font-size: 15px; + margin-right: 1px; + color: var(--sidebar-active); +} + +.entity-kinds { + display: inline-flex; + /* so that it is aligned with the text AND allows to select the type + and its companion if any. */ +} + +.entity-kinds > a.letter-anchor { + float: left; + width: 1.5em; + height: 1.5em; + color: white; + text-align: center; + text-decoration: none; + margin-right: 5px; + border-radius: 1em; +} + +.entity-kinds a.object { + background: #2c6c8d; +} +.entity-kinds a.class { + background: #44ad7d; + padding-right: 1px; +} +.entity-kinds a.trait { + background: #19aacf; + padding-right: 1px; +} + +.with-companion .entity-kinds:not(:hover) a.object { + display:none; +} +.with-companion .entity-kinds:not(:hover) a.class { + background: linear-gradient(45deg, #2c6c8d 49%, #44ad7d 51%); +} +.with-companion .entity-kinds:not(:hover) a.trait { + background: linear-gradient(45deg, #2c6c8d 49%, #19aacf 51%); +} + +@media (min-width: 768px) { + .sidebar { /* visible by default, hidden by toggle */ + margin-left: 0; + box-shadow: -2px 0 8px var(--primary); + } + .sidebar.toggled { + margin-left: -275px; + box-shadow: none; + } + #content-wrapper { + margin-left: 275px !important; + transition: margin .25s ease-out; + } + .sidebar.toggled ~ #content-wrapper { + margin-left: 0 !important; + } +} diff --git a/scala3doc/dotty-docs/docs/css/toolbar.css b/scala3doc/dotty-docs/docs/css/toolbar.css new file mode 100644 index 000000000000..faf051e5dde8 --- /dev/null +++ b/scala3doc/dotty-docs/docs/css/toolbar.css @@ -0,0 +1,98 @@ +body { + margin-top: 50px; +} + +nav.navbar { + height: 50px; + line-height: 1; + font-size: 24px; + font-family: var(--font-family-toolbar-details); + background-color: var(--toolbar); + box-shadow: 0 0 6px; +} +nav.navbar-dark * { + color: var(--toolbar-entry); + transition: color .2s ease-out; +} + +nav.navbar-dark a:hover *, nav.navbar-dark a:focus * { + color: var(--toolbar-active); +} +nav.navbar a:hover { + text-decoration: none; +} +nav.navbar a { + cursor: pointer; +} + +.navbar-brand { + margin-right: auto; + margin-left: auto; + font-size: inherit; + display: flex; + align-items: center; +} + +.navbar-brand .project-logo { + display: none; + height: 40px; + margin-right: 7px; +} + +.navbar-brand .project-details * { + margin: 0; + line-height: inherit; +} +.navbar-brand .project-details h1 { + font-size: 1em; + font-family: var(--font-family-toolbar-main); + font-weight: var(--font-weight-toolbar-main); +} +.navbar-brand .project-details h2 { + font-size: 0.5em; + margin-top: 2px; + font-weight: var(--font-weight-toolbar-details); +} + +#search-api-input { + color: initial; + width: 10em; +} +#searchbar { + display: none; +} +#searchbar.shown { + display: initial; + margin-left: auto; + margin-right: auto; +} +#searchbar.shown + .navbar-brand { + display: none; +} + +#search-icon { + margin-left: .5em; +} + +@media (min-width: 420px) { + .navbar-brand .project-logo { + display: inline; + } +} + +@media (min-width: 768px) { + #search-icon, #searchbar.shown { + margin-left: 1em; + margin-right: initial; + } + #searchbar.shown + .navbar-brand { + display: flex; + } +} + +@media (min-width: 1200px) { + #searchbar.shown { + position: absolute; + margin-left: 4em; + } +} diff --git a/scala3doc/dotty-docs/docs/docs/contributing/checklist.sh b/scala3doc/dotty-docs/docs/docs/contributing/checklist.sh new file mode 100755 index 000000000000..3ef73d58e6e0 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/contributing/checklist.sh @@ -0,0 +1,59 @@ +# #!/usr/bin/env bash +stable=$1 + +rc="$(($stable+1))" +next="$(($rc+1))" + +stable_version="0.$stable.0" +rc_version="0.$rc.0-RC1" +next_version="0.$next.0" +stable_branch="0.$stable.x" +rc_branch="0.$rc.x" + +LIST='- [ ] Publish artifacts to Maven via CI + - [ ] On branch ``, set `baseVersion` to `` and `git tag` it as ``. This will publish artefacts to Sonatype and GitHub Release + - [ ] Merge branch `` into `master` to guarantee that all of the `` commits are propagated to `master` + - [ ] Look at the milestone of the RC version being released. Move all the open issues from it to the next milestone. + - [ ] Create branch `` from `master` + - [ ] On ``, set `baseVersion` to `` and `git tag` it as ``. This will publish artefacts to Sonatype and GitHub Release. + - [ ] On `master`, set `baseVersion` to `` +- [ ] Update `scalaVersion` (and, if applicable, the `sbt-dotty` version) in the Dotty ecosystem projects + - [ ] https://github.com/lampepfl/dotty-example-project [![Build Status](https://travis-ci.org/lampepfl/dotty-example-project.svg?branch=master)](https://travis-ci.org/lampepfl/dotty-example-project) + - [ ] Committed to `master` + - [ ] https://github.com/lampepfl/dotty-example-project/tree/mill + - [ ] https://github.com/lampepfl/dotty.g8 [![Build Status](https://travis-ci.org/lampepfl/dotty.g8.svg?branch=master)](https://travis-ci.org/lampepfl/dotty.g8/) + - [ ] Committed to `master` + - [ ] https://github.com/lampepfl/dotty-cross.g8 [![Build Status](https://travis-ci.org/lampepfl/dotty-cross.g8.svg?branch=master)](https://travis-ci.org/lampepfl/dotty-cross.g8/) + - [ ] Committed to `master` + - [ ] https://github.com/lampepfl/homebrew-brew [![Build Status](https://travis-ci.org/lampepfl/homebrew-brew.svg?branch=master)](https://travis-ci.org/lampepfl/homebrew-brew) + - [ ] Committed to `master` + - SHA256 sum for the artifact: `wget -q -O- https://github.com/lampepfl/dotty/releases/download//sha256sum.txt | grep ".tar.gz"` + - [ ] https://github.com/lampepfl/packtest [![Build Status](https://travis-ci.org/lampepfl/packtest.svg?branch=master)](https://travis-ci.org/lampepfl/packtest) + - [ ] Committed to `master` + - [ ] https://github.com/lampepfl/xml-interpolator [![Build Status](https://travis-ci.org/lampepfl/xml-interpolator.svg?branch=master)](https://travis-ci.org/lampepfl/xml-interpolator) + - [ ] PR submitted + - [ ] PR merged + - [ ] https://github.com/scalacenter/scastie + - [ ] PR submitted + - [ ] PR merged + - [ ] https://scastie.scala-lang.org/ -> Build Settings -> Dotty mentions `` + - [ ] Dotty reference compiler [![Build Status](http://dotty-ci.epfl.ch/api/badges/lampepfl/dotty/status.svg)](http://dotty-ci.epfl.ch/lampepfl/dotty) + - [ ] PR submitted + - [ ] PR merged + - [ ] Scalac [![Build Status](https://travis-ci.org/scala/scala.svg?branch=2.13.x)](https://travis-ci.org/scala/scala) + - [ ] PR submitted + - [ ] PR merged +- [ ] Announce the release + - [ ] Publish releases for the RC and stable versions on GitHub Releases + - [ ] Publish Blog Post on dotty.epfl.ch + - [ ] Make an announcement thread on https://contributors.scala-lang.org + - [ ] Tweet the announcement blog post on https://twitter.com/scala_lang + +[Instructions on how to release](https://dotty.epfl.ch/docs/contributing/release.html)' + +echo "$LIST" |\ + sed "s//$stable_version/g" |\ + sed "s//$rc_version/g" |\ + sed "s//$next_version/g" |\ + sed "s//$stable_branch/g" |\ + sed "s//$rc_branch/g" diff --git a/scala3doc/dotty-docs/docs/docs/contributing/contribute-knowledge.md b/scala3doc/dotty-docs/docs/docs/contributing/contribute-knowledge.md new file mode 100644 index 000000000000..7164774ac1df --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/contributing/contribute-knowledge.md @@ -0,0 +1,12 @@ +--- +layout: doc-page +title: Contributing Knowledge +--- + +# Contribute Internals-related Knowledge +If you know anything useful at all about Dotty, feel free to log this knowledge: + +- [📜Log the Knowledge](https://github.com/lampepfl/dotty-knowledge/issues/new) +- [🎓More about Logging the Knowledge](https://github.com/lampepfl/dotty-knowledge/blob/master/README.md) + +In short, no need to make it pretty, particularly human-readable or give it a particular structure. Just dump the knowledge you have and we'll take it from there. \ No newline at end of file diff --git a/scala3doc/dotty-docs/docs/docs/contributing/debug-tests.md b/scala3doc/dotty-docs/docs/docs/contributing/debug-tests.md new file mode 100644 index 000000000000..89a220e9c0dd --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/contributing/debug-tests.md @@ -0,0 +1,124 @@ +--- +layout: doc-page +title: Tests for Debuggability +--- + +## Tools Requires + +- JDB +- expect + +Both are usually pre-installed on macOS and linux distributions. + +## Debug Manually with JDB + +First, compile the file `tests/debug/while.scala`: + +```shell +$ dotc tests/debug/while.scala +``` + +Second, run the compiled class with debugging enabled (suppose the main class is `Test`): + +```shell +$ dotr -d Test +``` + +Third, start JDB: + +```shell +$ jdb -attach 5005 -sourcepath tests/debug/ +``` + +You can run `help` for commands that supported by JDB. + +## Debug Automatically with Expect + +### 1. Annotate the source code with debug information. + +Following file (`tests/debug/while.scala`) is an example of annotated source code: + +```Scala +object Test { + + def main(args: Array[String]): Unit = { + var a = 1 + 2 + a = a + 3 + a = 4 + 5 // [break] [step: while] + + while (a * 8 < 100) { // [step: a += 1] + a += 1 // [step: while] [cont: print] + } + + print(a) // [break] [cont] + } +} +``` + +The debugging information is annotated as comments to the code in brackets: + +```scala +val x = f(3) // [break] [next: line=5] +val y = 5 +``` + +1. A JDB command must be wrapped in brackets, like `[step]`. All JDB commands can be used. +2. To check output of JDB for a command, use `[cmd: expect]`. +3. If `expect` is wrapped in double quotes, regex is supported. +4. Break commands are collected and set globally. +5. Other commands will be send to jdb in the order they appear in the source file + +Note that JDB uses line number starts from 1. + +### 2. Generate Expect File + +Now we can run the following command to generate an expect file: + +```shell +compiler/test/debug/Gen tests/debug/while.scala > robot +``` + +### 3. Run the Test + +First, compile the file `tests/debug/while.scala`: + +```shell +$ dotc tests/debug/while.scala +``` + +Second, run the compiled class with debugging enabled: + +```shell +$ dotr -d Test +``` + +Finally, run the expect script: + +```shell +expect robot +``` + +## Other Tips + +### Adding a New Test + +Just put the annotated source file under `tests/debug/`, it will be automatically +run by the test infrastructure. + +### Run All Debug Tests + +```shell +./compiler/test/debug/test +``` + +### Debug a Debug Test + +If there is any problem with a debug test, first check if the problematic +test work correctly with JDB without automation. + +Then, uncomment the following line in the generated expect file to check the +output of expect: + +``` +# exp_internal 1 +``` diff --git a/scala3doc/dotty-docs/docs/docs/contributing/debugging.md b/scala3doc/dotty-docs/docs/docs/contributing/debugging.md new file mode 100644 index 000000000000..6b52a88b2b95 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/contributing/debugging.md @@ -0,0 +1,366 @@ +--- +layout: doc-page +title: Debugging Techniques +--- + +# Debugging Techniques +- [Setting up the playground](#setting-up-the-playground) +- [Show for human readable output](#show-for-human-readable-output) +- [How to disable color](#how-to-disable-color) +- [Reporting as a non-intrusive println](#reporting-as-a-non-intrusive-println) +- [Printing out trees after phases](#printing-out-trees-after-phases) +- [Printing out stack traces of compile time errors](#printing-out-stack-traces-of-compile-time-errors) +- [Configuring the printer output](#configuring-the-printer-output) +- [Figuring out an object creation site](#figuring-out-an-object-creation-site) + * [Via ID](#via-id) + * [Via tracer](#via-tracer) +- [Built-in Logging Architecture](#built-in-logging-architecture) + * [Printers](#printers) + * [Tracing](#tracing) + * [Reporter](#reporter) + +Table of contents generated with markdown-toc + +## Setting up the playground +Consider the `../issues/Playground.scala` (relative to the Dotty directory) file is: + +```scala +object Playground { + def main(args: Array[String]) = { + println("Hello World") + } +} +``` + +Then, you can debug Dotty by compiling this file via `dotc ../issues/Playground.scala` (from the SBT console) and collecting various debug output in process. This section documents techniques you can use to collect the debug info. + +[This](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/typer/Typer.scala#L2231) is the entry point to the Typer. The job of the Typer is to take an untyped tree, compute its type and turn it into a typed tree by attaching the type information to that tree. We will use this entry point to practice debugging techniques. E.g.: + +```scala + def typed(tree: untpd.Tree, pt: Type, locked: TypeVars)(implicit ctx: Context): Tree = + trace(i"typing $tree", typr, show = true) { + println("Hello Debug!") + /*...*/ +``` + +Then: + +```shell +dotc ../issues/Playground.scala +``` + +The techniques discussed below can be tried out in place of `println("Hello Debug")` in that location. They are of course applicable throughout the codebase. + +## Show for human readable output +Many objects in the compiler have a `show` method available on them via implicit rich wrapper: + +```scala +println(tree.show) +``` + +This will output every single tree passing through the typer (or wherever else you inject it) in a human readable form. Try calling `show` on anything you want to be human-readable, and chances are it will be possible to do so. + +## How to disable color +Note that the `show` command above outputs the code in color. This is achieved by injecting special characters into the strings which terminals interpret as commands to change color of the output. This however may not be what you want, e.g. if you want to zero-in on a particular tree: + +```scala +if (tree.show == """println("Hello World")""") + println(s"${tree.show}\n${pt.show}\n${tree.uniqueId}\n===\n") +``` + +The intention above is to output an extended debug info on a tree that matches a particular human-readable representation. However, because of the color characters, the comparison will fail. + +To disable color output from `show`, run `dotc` as follows: + +`dotc -color:never ../issues/Playground.scala` + +## Reporting as a non-intrusive println +Consider you want to debug the `tree` that goes into `assertPositioned(tree)` in the `typed` method. You can do: + +```scala +println(tree.show) +assertPositioned(tree) +``` + +But you can also do: + +```scala +assertPositioned(tree.reporting(s"Tree is: $result")) +``` + +`extension (a: A) def reporting(f: WrappedResult[T] ?=> String, p: Printer = Printers.default): A` is defined on all types. The function `f` can be written without the argument since it is a context function`. The `result` variable is a part of the `WrapperResult` – a tiny framework powering the `reporting` function. Basically, whenever you are using `reporting` on an object `A`, you can use the `result: A` variable from this function and it will be equal to the object you are calling `reporting` on. + +## Printing out trees after phases +To print out the trees you are compiling after the FrontEnd (scanner, parser, namer, typer) phases: + +```shell +dotc -Xprint:typer ../issues/Playground.scala +``` + +To print out the trees after Frontend and CollectSuperCalls phases: + +```shell +dotc -Xprint:typer,collectSuperCalls ../issues/Playground.scala +``` + +To print out the trees after all phases: + +```shell +dotc -Xprint:all ../issues/Playground.scala +``` + +To find out the list of all the phases and their names, check out [this](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/Compiler.scala#L34) line in `Compiler.scala`. Each `Phase` object has `phaseName` defined on it, this is the phase name. + +## Printing out stack traces of compile time errors +You can use the flag `-Ydebug-error` to get the stack trace of all the compile-time errors. Consider the following file: + +```scala +object Foo +object Foo +``` + +Clearly we cannot define an object `Foo` twice. Now compile it as follows: `dotc -Ydebug-error ../issues/Playground.scala` (use whatever path you saved it under). The result will be as follows: + +```scala +-- Error: ../issues/Playground.scala:2:0 --------------------------------------- +2 |object Foo + |^ + |object Foo has already been compiled once during this run +java.lang.Thread.getStackTrace(Thread.java:1552) +dotty.tools.dotc.reporting.Reporting.error(Reporter.scala:139) +dotty.tools.dotc.core.Contexts$Context.error(Contexts.scala:71) +dotty.tools.dotc.typer.Namer.errorName$2(Namer.scala:300) +dotty.tools.dotc.typer.Namer.checkNoConflict$1(Namer.scala:306) +dotty.tools.dotc.typer.Namer.createSymbol(Namer.scala:353) +dotty.tools.dotc.typer.Namer.recur$1(Namer.scala:490) +dotty.tools.dotc.typer.Namer.recur$3$$anonfun$2(Namer.scala:495) +... +``` + +So, the error happened in the Namer's `checkNoConflict` method (after which all the stack frames represent the mechanics of issuing an error, not an intent that produced the error in the first place). + +## Configuring the printer output +Printing from the `show` and `-Xprint` is done from the Printers framework (discussed in more details below). The following settings influence the output of the printers: + +```scala +val printLines = BooleanSetting("-print-lines" , "Show source code line numbers.") withAbbreviation "--print-lines" +val uniqid = BooleanSetting("-uniqid" , "Uniquely tag all identifiers in debugging output.") withAbbreviation "--unique-id" +val XprintInline = BooleanSetting("-Xprint-inline" , "Show where inlined code comes from") +val XprintTypes = BooleanSetting("-Xprint-types" , "Print tree types (debugging option).") +val Ydebug = BooleanSetting("-Ydebug" , "Increase the quantity of debugging output.") +val YdebugFlags = BooleanSetting("-Ydebug-flags" , "Print all flags of definitions") +val YdebugMissingRefs = BooleanSetting("-Ydebug-missing-refs", "Print a stacktrace when a required symbol is missing") +val YdebugNames = BooleanSetting("-Ydebug-names" , "Show internal representation of names") +val YdebugPos = BooleanSetting("-Ydebug-pos" , "Show full source positions including spans") +val YdebugTrace = BooleanSetting("-Ydebug-trace" , "Trace core operations") +val YdebugTreeWithId = IntSetting ("-Ydebug-tree-with-id", "Print the stack trace when the tree with the given id is created", Int.MinValue) +val YprintDebug = BooleanSetting("-Yprint-debug" , "when printing trees, print some extra information useful for debugging.") +val YprintDebugOwners = BooleanSetting("-Yprint-debug-owners", "when printing trees, print owners of definitions.") +val YprintPos = BooleanSetting("-Yprint-pos" , "show tree positions.") +val YprintPosSyms = BooleanSetting("-Yprint-pos-syms" , "show symbol definitions positions.") +val YprintSyms = BooleanSetting("-Yprint-syms" , "when printing trees print info in symbols instead of corresponding info in trees.") +val YshowTreeIds = BooleanSetting("-Yshow-tree-ids" , "Uniquely tag all tree nodes in debugging output.") +val YshowVarBounds = BooleanSetting("-Yshow-var-bounds" , "Print type variables with their bounds") +val YtestPickler = BooleanSetting("-Ytest-pickler" , "self-test for pickling functionality; should be used with -Ystop-after:pickler") +``` + +They are defined in [ScalaSettings.scala](https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala). E.g. `YprintPos` is defined as: + +```scala +val YprintPos: Setting[Boolean] = BooleanSetting("-Yprint-pos", "show tree positions.") +``` + +And is to be used as: + +```scala +dotc -Yprint-pos ../issues/Playground.scala +``` + +If used, all the trees output with `show` or via `-Xprint:typer` will also have positions attached to them, e.g.: + +```scala +package @ { + module object Playground { + def main( + args: + Array@[String@]@< + Playground.scala:2 + > + @ + ) = + { + println@("Hello World"@)@< + Playground.scala:3 + > + }@ + @ + }@ +}@ +@ +``` + +## Figuring out an object creation site +### Via ID +Every [Positioned](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/ast/Positioned.scala) (a parent class of `Tree`) object has a `uniqueId` field. It is an integer that is unique for that tree and doesn't change from compile run to compile run. You can output these IDs from any printer (such as the ones used by `.show` and `-Xprint`) via `-Yshow-tree-ids` flag, e.g.: + +```shell +dotc -Xprint:typer -Yshow-tree-ids ../issues/Playground.scala +``` + +Gives: + +```scala +package #1047 { + final lazy module val Playground: Playground$#1049 = + new Playground$#1049#1050#1051()#1052 + #1053 + final module class Playground$() extends Object#1090#1091#1092()#1093, _root_# + 1061 + .scala#1062.Serializable#1063 { this: Playground#1054.type#1055 => + def main(args: Array#1028[String#1033]#1034#1038): Unit#1039 = + { + println#1094("Hello World"#1041)#1095 + }#1096 + #1097 + }#1099 +}#1100 +``` + +You can then use these IDs to locate the creation site of a given tree using that ID via `-Ydebug-tree-with-id`, e.g.: + +```shell +dotc -Ydebug-tree-with-id 1049 ../issues/Playground.scala +``` + +When the tree with the correspond id is allocated, the following prompt will appear: + +``` +Debug tree (id=1049) creation +Ident(Playground$) + + +a)bort, s)tack, r)esume +``` + +If you input `s`, you will get a stack trace like this: + +``` +java.lang.Throwable + at dotty.tools.dotc.reporting.Reporter$.loop$1(Reporter.scala:55) + at dotty.tools.dotc.reporting.Reporter$.displayPrompt(Reporter.scala:63) + at dotty.tools.dotc.ast.Positioned.printTrace$1(Positioned.scala:32) + at dotty.tools.dotc.ast.Positioned.uniqueId_$eq(Positioned.scala:34) + at dotty.tools.dotc.ast.Positioned.(Positioned.scala:45) + at dotty.tools.dotc.ast.Trees$Tree.(Trees.scala:53) + at dotty.tools.dotc.ast.Trees$DenotingTree.(Trees.scala:266) + at dotty.tools.dotc.ast.Trees$NameTree.(Trees.scala:292) + at dotty.tools.dotc.ast.Trees$RefTree.(Trees.scala:298) + at dotty.tools.dotc.ast.Trees$Ident.(Trees.scala:375) + at dotty.tools.dotc.ast.untpd$.Ident(untpd.scala:301) + at dotty.tools.dotc.ast.desugar$.moduleDef(Desugar.scala:804) + at dotty.tools.dotc.ast.desugar$.defTree(Desugar.scala:1038) + at dotty.tools.dotc.typer.Namer.expand(Namer.scala:441) + at dotty.tools.dotc.typer.Namer.index$$anonfun$1(Namer.scala:722) + at dotty.runtime.function.JProcedure1.apply(JProcedure1.java:15) + at dotty.runtime.function.JProcedure1.apply(JProcedure1.java:10) + at scala.collection.immutable.List.foreach(List.scala:392) + at dotty.tools.dotc.typer.Namer.index(Namer.scala:722) + at dotty.tools.dotc.typer.Namer.recur$1(Namer.scala:484) + at dotty.tools.dotc.typer.Namer.indexExpanded(Namer.scala:501) + at dotty.tools.dotc.typer.Namer.index(Namer.scala:474) + at dotty.tools.dotc.typer.FrontEnd.enterSyms$$anonfun$1(FrontEnd.scala:69) + at dotty.runtime.function.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12) + at dotty.tools.dotc.typer.FrontEnd.monitor(FrontEnd.scala:41) + at dotty.tools.dotc.typer.FrontEnd.enterSyms(FrontEnd.scala:71) + at dotty.tools.dotc.typer.FrontEnd.runOn(FrontEnd.scala:100) + at dotty.tools.dotc.Run.runPhases$4$$anonfun$4(Run.scala:158) + at dotty.runtime.function.JProcedure1.apply(JProcedure1.java:15) + at dotty.runtime.function.JProcedure1.apply(JProcedure1.java:10) + at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36) + at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33) + at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198) + at dotty.tools.dotc.Run.runPhases$5(Run.scala:170) + at dotty.tools.dotc.Run.compileUnits$$anonfun$1(Run.scala:178) + at dotty.runtime.function.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12) + at dotty.tools.dotc.util.Stats$.maybeMonitored(Stats.scala:102) + at dotty.tools.dotc.Run.compileUnits(Run.scala:185) + at dotty.tools.dotc.Run.compileSources(Run.scala:120) + at dotty.tools.dotc.Run.compile(Run.scala:104) + at dotty.tools.dotc.Driver.doCompile(Driver.scala:34) + at dotty.tools.dotc.Driver.process(Driver.scala:172) + at dotty.tools.dotc.Driver.process(Driver.scala:141) + at dotty.tools.dotc.Driver.process(Driver.scala:153) + at dotty.tools.dotc.Driver.main(Driver.scala:180) + at dotty.tools.dotc.Main.main(Main.scala) +``` + +So that tree was created at: + +``` + at dotty.tools.dotc.ast.desugar$.moduleDef(Desugar.scala:804) +``` + +Since all the stack frames above it are technical frames executing the tree creation command, and the frame in question is the location where the intent of the tree creation was expressed. + +### Via tracer +Some objects may not be `Positioned` and hence their creation site is not debuggable via the technique in the section above. Say you target a tree at `Typer`'s `typed` method as follows: + +```scala +if (tree.show == """println("Hello World")""") { + val creationSite = "" + println(creationSite) +} +``` + +In other words, you have a reference to the object and want to know were it was created. To do so, go to the class definition of that object. In our case, `tree` is a [`Tree`](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/ast/Trees.scala#L52). Now, create a new `val` member of that type: + +```scala +val tracer = Thread.currentThread.getStackTrace.mkString("\n") +``` + +Then, from the `typed` method (or wherever you want to access the trace): + +```scala +if (tree.show == """println("Hello World")""") { + val creationSite = tree.tracer + println(creationSite) +} +``` + +## Built-in Logging Architecture +Dotty has a lot of debug calls scattered throughout the code, most of which are disabled by default. At least three (possibly intertwined) architectures for logging are used for that: + +- Printer +- Tracing +- Reporter + +These do not follow any particular system and so probably it will be easier to go with `println` most of the times instead. + +### Printers +Defined in [Printers.scala](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/config/Printers.scala) as a set of variables, each responsible for its own domain. To enable them, replace `noPrinter` with `default`. [Example](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/typer/Typer.scala#L2226) from the code: + +```scala +typr.println(i"make contextual function $tree / $pt ---> $ifun") +``` + +`typr` is a printer. + +### Tracing +Defined in [trace.scala](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/reporting/trace.scala). [Example](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/typer/Typer.scala#L2232) from the code: + +```scala +trace(i"typing $tree", typr, show = true) { // ... +``` + +To enable globally, change [tracingEnabled](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/config/Config.scala#L164) to `true` (will recompile a lot of code). + +You also need to set the printer referenced in the call (in the example, `typr`) to `default` as explained in the section on printers. + +To enable for a single trace, do the following: + +```scala +trace.force(i"typing $tree", typr, show = true) { // ... +``` + +### Reporter +Defined in [Reporter.scala](https://github.com/lampepfl/dotty/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/compiler/src/dotty/tools/dotc/reporting/Reporter.scala). Enables calls such as `report.log`. To enable, run dotc with `-Ylog:typer` option. diff --git a/scala3doc/dotty-docs/docs/docs/contributing/getting-started.md b/scala3doc/dotty-docs/docs/docs/contributing/getting-started.md new file mode 100644 index 000000000000..142c9282d64f --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/contributing/getting-started.md @@ -0,0 +1,78 @@ +--- +layout: doc-page +title: Getting Started +--- + + + +Requirements +------------ +Make sure that you are using macOS or Linux (or WSL on Windows) with Java 8 or newer. You can determine which version of the JDK is the +default by typing `java -version` in a Terminal window. + +Compiling and Running +--------------------- +Start by cloning the repository: + +```bash +$ git clone https://github.com/lampepfl/dotty.git +$ cd dotty +$ sbt managedSources # Needed for IDE import to succeed +``` + +Dotty provides a standard sbt build: compiling, running and starting a repl can +all be done from within sbt: + +```bash +$ sbt +> dotc tests/pos/HelloWorld.scala +> dotr HelloWorld +hello world +``` + +There are also bash scripts that can be used in the same way. Assuming that you have cloned the Dotty repo locally, append +the following line on your `.bash_profile`: + +```shell +$ export PATH=$HOME/dotty/bin:$PATH +``` + +and you will be able to run the corresponding commands directly from your console: + +```shell +# Compile code using Dotty +$ dotc tests/pos/HelloWorld.scala + +# Run it with the proper classpath +$ dotr HelloWorld +``` + + +Starting a REPL +--------------- +```bash +$ sbt +> repl +Welcome to Scala.next (pre-alpha) (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_101). +Type in expressions to have them evaluated. +Type :help for more information. +scala> +``` + +or via bash: + +```bash +$ dotr +``` + + +Generating Documentation +------------------------- +To generate this page and other static page docs, run +```bash +$ sbt +> genDocs +``` + +Before contributing to Dotty, we invite you to consult the +[Dotty Developer Guidelines](https://github.com/lampepfl/dotty/blob/master/CONTRIBUTING.md). diff --git a/scala3doc/dotty-docs/docs/docs/contributing/procedures/release.md b/scala3doc/dotty-docs/docs/docs/contributing/procedures/release.md new file mode 100644 index 000000000000..14386e152238 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/contributing/procedures/release.md @@ -0,0 +1,155 @@ +--- +layout: doc-page +title: Release Procedure +--- + +# Model +The easiest way to produce a release of a GitHub-based open-source software is to tag the most recent commit on the `master` with the version number at regular intervals of time or once a previously agreed milestone is reached. However, this approach to releasing would rest on the assumption that each commit at the `master` branch can potentially be made into a release. We cannot provide the release-grade quality guarantees for each of the `master` commits, though. + +Consequently, in Dotty, we are using the method above – releasing-by-tagging – to mark release candidates (RC’s) and not the stable releases. The stable releases are also marked by a tag, but we have a procedure to assure their quality. + +An RC is promoted to a stable release in one release cycle after its creation. The idea is that this way, we have one release cycle's time to examine the release candidate and find critical issues which cannot be allowed into a stable release. + +If such issues are found, their fixes end up on a separate branch dedicated to that release. In one release cycle after the RC creation, the RC, along with all its subsequent fixes, is promoted to a stable release by means of tagging it. + +# Example +Say we want to release the 0.14.0 version. In this section we describe the process to do so (at a glance). + +## At the Dotty Repo +1. Tag the latest `master` commit as `0.14.0-RC1`. This commit is the release candidate for the `0.14.0` version. +2. Create a branch from that commit called `0.14.x`. This branch is intended to host the subsequent fixes to the RC for the issues that cannot be allowed in the `0.14.0` stable release. +3. Up until the next release date, if we find some issues with `0.14.0-RC1` that cannot end up in the release, we push the fixes to the `0.14.x` branch. +4. At the next release date, we release `0.14.0` from the branch `0.14.x`. We do so by tagging the latest commit at the `0.14.x` branch as `0.14.0`. Some things to note here: + 1. At this point, `0.14.x` (the branch) and `0.14.0-RC1` (the tag at which `0.14.x` branched from `master`) may differ, and the `0.14.x` branch is a more mature version of the `0.14.0-RC1` tag. + 2. `0.14.0` is not the same as the `master`. Only the commits critical for the `0.14.0` release end up at `0.14.x` branch. Not all of the commits made to the `master` during the release cycle are critical to `0.14.0`. However, all of the commits from `0.14.x` must end up on the `master` branch, so we merge `0.14.x` into `master`. +5. After the `0.14.0` version is released, we start the process for releasing `0.15.0` – repeat this algorithm from the beginning with the version set to `0.15.0-RC1` at step (1). + +## At the CI +CI is set to automatically detect the tags of the format discussed above and perform the required release operations. Precisely, it will do two things for the release tags: + +- Publish the release jars to Maven +- Create the drafts at the GitHub [release](https://github.com/lampepfl/dotty/releases) page of the repository with the artefacts of the release. + +The CI operation is entirely automatic provided you have tagged the release correctly. No need to do anything here. + +### Canceling CI builds +**The below guidelines are needed only to speed up things. It is no mistake if you skip this section. However, if you do things wrong here, there may be trouble. So do it only if you feel yourself confident with the release cycle and the workings of the CI.** + +Note that after the first stage of the release cycle (see "Publishing artifacts to Maven via CI" section of the checklist below) only three test runs are required to be run at the CI: + +- `master` branch's latest *commit* with the updated `baseVersion` +- `` *tag* of the stable version being released +- `` *tag* of the RC version being released + +However, you may end up with as many as 6 tasks being run. The auxiliary tasks may include: + +- *commit* tests of the *tags* specified above. You may have two of these, corresponding to the two tags. You should see them appearing to have the same commit hash in the CI, but one of them will have the tag next to it and the other one will not. The *tag* one must remain, as the CI tasks on tags publish to maven. CI tasks on commits do not. So it is safe to cancel the task running on the commit, if the commit hash is the same as that of the tag's task commit. +- Older commit from the `master` branch. Look for all the tasks run on the `master` branch in the CI and see if there are more than one of these. Then, find the one testing the most recent commit of the branch. The others can safely be canceled. + +## Documentation +### Release Procedure Checklist +Before we start the release procedure, we create an issue with a release checklist. As we go through the release, we update the checklist. To generate the checklist, run the following command: + +`bash <(curl -sL https://raw.githubusercontent.com/lampepfl/dotty/master/docs/docs/contributing/checklist.sh) ` + +Above, `` is the stable version being released. For example, if you are releasing `0.14.0` and `0.15.0-RC1`, this variable is `14` and the command is as follows: + +`bash <(curl -sL https://raw.githubusercontent.com/lampepfl/dotty/master/docs/docs/contributing/checklist.sh) 14` + +Copy and paste the output into the release issue. + +The ecosystem update section for some projects also mentions a set of criteria upon which the project is to be marked in the checklist. When the Travis build status is specified next to the project's name, it is to be understood that this build must pass after all of the other criteria of that project are checked. Note that due to caching, the label image next to the link may not reflect the real status of the build. Therefore, to verify the status, click on the link and make sure that your recent commit passes. + +When no criteria is specified, common sense is to be used. + +### GitHub Releases and Blog Post +After the release is done, we document it as follows: + +- On the GitHub release page, modify the release drafts created by CI. The RC draft should include notable changes introduced since the previous RC. E.g. for `0.14.0-RC1` these are generated by `gren changelog -G --override -D prs --tags=0.13.0-RC1..0.14.0-RC1`. `gren` is available [here](https://github.com/github-tools/github-release-notes), and before running the above command, please make sure that (1) the `origin` branch points to the `lampepfl/dotty` repository and (2) the two tags mentioned in the command are pushed to the `master` branch of that repo. Otherwise, the command won't pick up the tags. +- Create a blog article documenting the most important changes done by the release. + +## Ecosystem +After releasing a new version of Dotty, we need to make sure to update the following related projects: + +- [Example Project](https://github.com/lampepfl/dotty-example-project) + - To deploy locally: `git clone https://github.com/lampepfl/dotty-example-project.git && cd dotty-example-project/` + - To test locally: `sbt run` + - [Commit](https://github.com/lampepfl/dotty-example-project/commit/76bf0b4d708206b1901fa7f291f07cd470506e79) updating the Dotty version (only `README` and `build.sbt` files) +- [Example Project with Mill](https://github.com/lampepfl/dotty-example-project/tree/mill) + - Deploy: `git clone https://github.com/lampepfl/dotty-example-project && cd dotty-example-project && git checkout mill` + - Test: `mill root.run` + - [Commit 1](https://github.com/lampepfl/dotty-example-project/commit/e1ad1905ef38d07943e0c176333ba24e306a2078) – `build.sc` only; [Commit 2](https://github.com/lampepfl/dotty-example-project/commit/23bc5df89e72b782ab8e19157d6bbcb67eef30cd) +- [Dotty G8 template](https://github.com/lampepfl/dotty.g8) + - Deploy: `git clone https://github.com/lampepfl/dotty.g8.git` + - Test (note the relative path as argument to `sbt new`, hence this command should be run after Deploy in the same directory as Deploy): `sbt new file://./dotty.g8 --name=foo --description=bar && cd foo && sbt run` + - [Commit](https://github.com/lampepfl/dotty.g8/commit/0cde8fa843e15e916f07f22a196f35a5988b26af) +- [Dotty G8 template with cross build support](https://github.com/lampepfl/dotty-cross.g8) + - Deploy: `git clone https://github.com/lampepfl/dotty-cross.g8.git` + - Test: `sbt new file://./dotty-cross.g8 --name=foo --description=bar && cd foo && sbt run` + - [Commit](https://github.com/lampepfl/dotty-cross.g8/commit/0e3ea2ae8ba8d001e63e5098ff60d728036d358f) +- [Dotty Homebrew Formula](https://github.com/lampepfl/homebrew-brew) + - Deploy: `git clone https://github.com/lampepfl/homebrew-brew.git && cd homebrew-brew` + - [Commit](https://github.com/lampepfl/homebrew-brew/commit/04f7284564387754a360a354159f2f8d6156a6c7). SHA256 sum comes from the issue checklist computed for the release as specified above. The file with checksums is available at [GitHub Releases](https://github.com/lampepfl/dotty/releases) -> release in question -> assets -> `sha256sum.txt` -> `*.tar.gz` file sum. +- [Dotty test under various OSs](https://github.com/lampepfl/packtest) + - Deploy: `git clone https://github.com/lampepfl/packtest.git && cd packtest` + - [Commit](https://github.com/lampepfl/packtest/commit/6d3edf7333e0e788af7c4f4ab976b56905ddf9ed) +- [Scastie](https://github.com/scalacenter/scastie/) + - Deploy (copy-paste the command and append the release id, e.g. `15` for `0.15.0-RC1`): `git clone https://github.com/scalacenter/scastie.git && cd scastie && git remote add staging https://github.com/dotty-staging/scastie && git checkout -b dotty-release-` + - [PR](https://github.com/scalacenter/scastie/pull/433) – push your changes to `staging` repo (as defined in "Deploy" above) with `git push -u staging`, then submit the PR from there. +- [Dotty](https://github.com/lampepfl/dotty/) + - If you are reading this, Dotty should already be deployed on your machine :) + - [PR](https://github.com/lampepfl/dotty/pull/6557) +- [Scalac](https://github.com/scala/scala) + - Deploy: `git clone https://github.com/scala/scala.git && cd scala && git remote add staging https://github.com/dotty-staging/scala && git checkout -b dotty-release-` + - [PR](https://github.com/scala/scala/pull/7993) + +For each need to do the following: + +- Update Dotty version to the latest RC +- Update the sbt-dotty SBT plugin version to the latest published one +- Update the projects' source code to follow the Dotty developments if necessary + +# Procedure in Bash Scripts +The below procedure is compiled from [this](https://github.com/lampepfl/dotty/issues/5907#issue-409313505) and [this](https://github.com/lampepfl/dotty/issues/6235#issue-429265748) checklists. It assumes we want to publish the `0.14.0` given the `0.14.0-RC1` release candidate. + +Note that at the same time we will also publish the `0.15.0-RC1` release. We publish two releases at the same time as per the logic outlined at the [Example/At the Dotty Repo](#at-the-dotty-repo) and the [Model](#model) sections above: the step (5) in the algorithm outlined in the [Example](#at-the-dotty-repo) for the release cycle of `0.14.0` is the step (1) in the release cycle of `0.15.0`. + +The following commands assume a remote tracking repository named `origin` pointing to the main Dotty repository: `https://github.com/lampepfl/dotty.git`. + + +```bash + +######## Publish the 0.14.0 stable version – end the release cycle for 0.14.0 ######## +git checkout 0.14.x + +# Change `val baseVersion = "0.14.0-RC1"` to `val baseVersion = "0.14.0"` in project/Build.scala + +git commit -am 'Release Dotty 0.14.0' +git tag 0.14.0 +git push origin 0.14.0 + +git checkout master +git merge 0.14.x + +# Make sure the merge doesn't break anything. In doubt, create a PR to run the CL +git push origin master + +######## Publish the 0.15.0-RC1 unstable version – begin the release cycle for 0.15.0 ######## +# Move all the unfinished tasks from Milestone 15 to Milestone 16 on GitHub – see https://github.com/lampepfl/dotty/milestones + +git checkout -b 0.15.x + +# Change val baseVersion = "0.15.0" to val baseVersion = "0.15.0-RC1" + +git commit -am 'Release Dotty 0.15.0-RC1' +git tag 0.15.0-RC1 +git push origin 0.15.x +git push origin 0.15.0-RC1 + +git checkout master + +# Change val baseVersion = "0.15.0" to val baseVersion = "0.16.0" - this will be the next version after `0.15.0-RC1` is promoted to `0.15.0`. + +git commit -am 'Set baseVersion to 0.16.0' +git push origin master +``` \ No newline at end of file diff --git a/scala3doc/dotty-docs/docs/docs/contributing/procedures/vulpix.md b/scala3doc/dotty-docs/docs/docs/contributing/procedures/vulpix.md new file mode 100644 index 000000000000..5e8a2eab425b --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/contributing/procedures/vulpix.md @@ -0,0 +1,35 @@ +--- +layout: doc-page +title: Test Vulpix Framework +--- + +# Test Vulpix Framework +If you are modifying the Vulpix framework and need a playground with dummy tests to try out your modifications, do the following. + +Create the directory structure for the playground: + +```bash +mkdir -pv tests/playground/run tests/playground/neg +echo "stuff" > tests/playground/neg/Sample.scala +echo 'object Test { def main(args: Array[String]): Unit = {println("Hi")} }' > tests/playground/run/Sample.scala +``` + +In `CompilationTests.scala`: + +```scala + @Test def exampleNeg: Unit = { + implicit val testGroup: TestGroup = TestGroup("exampleNeg") + compileFilesInDir("tests/playground/neg", defaultOptions).checkExpectedErrors() + } + + @Test def exampleRun: Unit = { + implicit val testGroup: TestGroup = TestGroup("exampleRun") + compileFilesInDir("tests/playground/run", defaultOptions).checkRuns() + } +``` + +SBT: + +```scala +testOnly dotty.tools.dotc.CompilationTests -- *example* +``` diff --git a/scala3doc/dotty-docs/docs/docs/contributing/scala2-vs-scala3.md b/scala3doc/dotty-docs/docs/docs/contributing/scala2-vs-scala3.md new file mode 100644 index 000000000000..19e12ff6e687 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/contributing/scala2-vs-scala3.md @@ -0,0 +1,43 @@ +--- +layout: doc-page +title: Divergences between Scala 2 and Dotty +--- + +# Divergences between Scala 2 and Dotty +The following issues encountered when compiling Scala 2 code as-is under Dotty: + +## Scalafix candidates +- If a method is defined `toSet()`, it cannot be called `toSet`. +- “result type of implicit definition needs to be given explicitly” +- There are no `'Symbol`s in Scala 3, you must construct symbols via `new Symbol("foo")` instead of old `'foo` + +## Trivial +- Scala 2.13 libraries cannot be used from Dotty because the dotty-library is compiled against the 2.12 standard library which is not binary-compatible with the 2.13 one. We can't be compatible with both at the same time. +- To use Scala 2.12 dependencies from SBT with Dotty, use `withDottyCompat` as documented [here](https://github.com/lampepfl/dotty-example-project#getting-your-project-to-compile-with-dotty). +- Feature warnings about implicits `scala.language.implicitConversions` are output by default, unlike in Scala 2. This creates noise. Unclear how to turn off. + +Implicit conversions must be applied explicitly: + +```scala +implicit def IterablePath[T](s: Iterable[T])(implicit conv: T => RelPath): RelPath = { + s.foldLeft(rel){_ / conv(_)} +} +``` + +Stronger compile time guarantees on variance. Scala 2 does not assert variance on default parameters to parameters of the function value type. E.g. in geny: + +```scala +# Dotty +def count(f: A => Boolean = (a: A) => true): Int = +| ^^^^^^^^^^^^^^ +|covariant type A occurs in contravariant position in type => A => Boolean of method count$default$1 +``` + +Fix: +```scala +# Dotty +def count[B >: A](f: B => Boolean = (_: B) => true): Int = +``` + +## Tricky +- Scala 3 macros are completely different from Scala 2 ones, requires a migration strategy of its own diff --git a/scala3doc/dotty-docs/docs/docs/contributing/testing.md b/scala3doc/dotty-docs/docs/docs/contributing/testing.md new file mode 100644 index 000000000000..cd2b6431a8de --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/contributing/testing.md @@ -0,0 +1,161 @@ +--- +layout: doc-page +title: Testing in Dotty +--- + +Running all tests in Dotty is as simple as: + +```bash +$ sbt test +``` + +Specifically, `sbt test` runs all tests that do _not_ require a bootstrapped +compiler. In practice, this means that it runs all compilation tests meeting +this criterion, as well as all non-compiler tests. + +The entire suite of tests can be run using the bootstrapped compiler as follows: + +```bash +$ sbt +> dotty-bootstrapped/test +``` + +There are currently several forms of tests in Dotty. These can be split into +two categories: + +## Unit tests +These tests can be found in `/test` and are used to check +functionality of specific parts of the codebase in isolation e.g: parsing, +scanning and message errors. + +To run all tests in e.g., for the compiler test-suite you can write: + +```bash +$ sbt +> dotty-compiler/test +``` + +To run a single test class you use `testOnly` and the fully qualified class name. +For example: + +```bash +> testOnly dotty.tools.dotc.transform.TreeTransformerTest +``` + +The test command follows a regular expression-based syntax `testOnly * -- *`. +The right-hand side picks a range of names for methods and the left-hand side picks a range of class names and their +fully-qualified paths. + +Consequently, you can restrict the aforementioned executed test to a subset of methods by appending ``-- *method_name``. +The example below picks up all methods with the name `canOverwrite`: + +```bash +> testOnly dotty.tools.dotc.transform.TreeTransformerTest -- *canOverwrite +``` + +Additionally, you can run all tests named `method_name`, in any class, without providing a class name: + +```bash +> testOnly -- *canOverwrite +``` + +You can also run all paths of classes of a certain name: + +```bash +> testOnly *.TreeTransformerTest +``` + +### Testing with checkfiles +Some tests support checking the output of the run or the compilation against a checkfile. A checkfile is a file in which the expected output of the compilation or run is defined. A test against a checkfile fails if the actual output mismatches the expected output. + +Currently, the `run` and `neg` (compilation must fail for the test to succeed) tests support the checkfiles. `run`'s checkfiles contain an expected run output of the successfully compiled program. `neg`'s checkfiles contain an expected error output during compilation. + +Absence of a checkfile is **not** a condition for the test failure. E.g. if a `neg` test fails with the expected number of errors and there is no checkfile for it, the test still passes. + +Checkfiles are located in the same directories as the tests they check, have the same name as these tests with the extension `*.check`. E.g. if you have a test named `tests/neg/foo.scala`, you can create a checkfile for it named `tests/neg/foo.check`. And if you have a test composed of several files in a single directory, e.g. `tests/neg/manyScalaFiles`, the checkfile will be `tests/neg/manyScalaFiles.check`. + +If the actual output mismatches the expected output, the test framework will dump the actual output in the file `*.check.out` and fail the test suite. It will also output the instructions to quickly replace the expected output with the actual output, in the following format: + +``` +Test output dumped in: tests/playground/neg/Sample.check.out + See diff of the checkfile + > diff tests/playground/neg/Sample.check tests/playground/neg/Sample.check.out + Replace checkfile with current output + > mv tests/playground/neg/Sample.check.out tests/playground/neg/Sample.check +``` + +To create a checkfile for a test, you can do one of the following: + +- Create a dummy checkfile with a random content, run the test, and, when it fails, use the `mv` command reported by the test to replace the dummy checkfile with the actual output. +- Manually compile the file you are testing with `dotc` and copy-paste whatever console output the compiler produces to the checkfile. + +## Integration tests +These tests are Scala source files expected to compile with Dotty (pos tests), +along with their expected output (run tests) or errors (neg tests). + +All of these tests are contained in the `./tests/*` directories and can be run with the `testCompilation` command. Tests in folders named `with-compiler` are an exception, see next section. + +Currently to run these tests you need to invoke from sbt: + +```bash +$ sbt +> testCompilation +``` + +(which is effectively the same with `testOnly dotty.tools.dotc.CompilationTests`) + +It is also possible to run tests filtered, again from sbt: + +```bash +$ sbt +> testCompilation companions +``` + +This will run both the test `./tests/pos/companions.scala` and +`./tests/neg/companions.scala` since both of these match the given string. +This also means that you could run `testCompilation` with no arguments to run all integration tests. + +When complex checkfiles must be updated, `testCompilation` can run in a mode where it overrides the checkfiles with the test outputs. +```bash +$ sbt +> testCompilation --update-checkfiles +``` + +Use `--help` to see all the options +```bash +$ sbt +> testCompilation --help +``` + +### Bootstrapped-only tests + +To run `testCompilation` on a bootstrapped Dotty compiler, use +`dotty-compiler-bootstrapped/testCompilation` (with the same syntax as above). +Some tests can only be run in bootstrapped compilers; that includes all tests +with `with-compiler` in their name. + +### From TASTy tests + +`testCompilation` has an additional mode to run tests that compile code from a `.tasty` file. + Modify blacklist and whitelists in `compiler/test/dotc` to enable or disable tests from `.tasty` files. + + ```bash + $ sbt + > testCompilation --from-tasty + ``` + + This mode can be run under `dotty-compiler-bootstrapped/testCompilation` to test on a bootstrapped Dotty compiler. + +### SemanticDB tests + +The output of the `extractSemanticDB` phase, enabled with `-Ysemanticdb` is tested with the bootstrapped JUnit test +`dotty.tools.dotc.semanticdb.SemanticdbTests`. It uses source files in `tests/semanticdb/expect` to generate +two kinds of output file that are compared with "expect files": placement of semanticdb symbol occurrences inline in +sourcecode (`*.expect.scala`), for human verification by inspection; and secondly metap formatted output which outputs +all information stored in semanticdb (`metac.expect`). +Expect files are used as regression tests to detect changes in the compiler. + +The test suite will create a new file if it detects any difference, which can be compared with the +original expect file, or if the user wants to globally replace all expect files for semanticdb they can use +`dotty-compiler-bootstrapped/test:runMain dotty.tools.dotc.semanticdb.updateExpect`, and compare the changes via version +control. diff --git a/scala3doc/dotty-docs/docs/docs/contributing/tools/mill.md b/scala3doc/dotty-docs/docs/docs/contributing/tools/mill.md new file mode 100644 index 000000000000..7d372835395f --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/contributing/tools/mill.md @@ -0,0 +1,21 @@ +--- +layout: doc-page +title: Basic Operations with Mill +--- + +Here's an example of how to test a project that uses mill: + +```bash +mill utest.jvm[2.12.8].test +``` + +- `utest.jvm` - the name of the compiled module (obtain from `build.sc`) +- `2.12.8` – Scala cross-compile version +- `test` – task to run on the module specified with the specified Scala version + +To get mill of the most recent version, first, find it in https://github.com/lihaoyi/mill/releases (e.g. `0.4.2-1-020e28`). Copy the download link and substitute it in the following command instead of `https://github.com/lihaoyi/mill/releases/download/0.4.1/0.4.1`: + +```bash +# From http://www.lihaoyi.com/mill/ +sudo sh -c '(echo "#!/usr/bin/env sh" && curl -L https://github.com/lihaoyi/mill/releases/download/0.4.1/0.4.1) > /usr/local/bin/mill && chmod +x /usr/local/bin/mill' +``` diff --git a/scala3doc/dotty-docs/docs/docs/contributing/tools/scalafix.md b/scala3doc/dotty-docs/docs/docs/contributing/tools/scalafix.md new file mode 100644 index 000000000000..58c7d0eb7b3a --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/contributing/tools/scalafix.md @@ -0,0 +1,20 @@ +--- +layout: doc-page +title: Working with Scalafix +--- + +# Working with Scalafix + +First, create a new rule as follows (command from https://scalacenter.github.io/scalafix/docs/developers/setup.html): + +```bash +sbt new scalacenter/scalafix.g8 --repo="Repository Name" +``` + +To run the rule against some codebase: + +```bash +scalafix -r file:scalafix/rules/src/main/scala/fix/YourRule.scala your/code/base/ +``` + +Where `YourRule.scala` is the rule you developed and `your/code/base` is the code base you are running the rule against. diff --git a/scala3doc/dotty-docs/docs/docs/contributing/workflow.md b/scala3doc/dotty-docs/docs/docs/contributing/workflow.md new file mode 100644 index 000000000000..f847cb708be0 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/contributing/workflow.md @@ -0,0 +1,77 @@ +--- +layout: doc-page +title: Workflow +--- + +Check [Getting Started](getting-started.md) for instructions on how to obtain the source code of dotty. +This document details common workflow patterns when working with Dotty. + +## Compiling files with dotc ## + +As we have seen you can compile a test file either from sbt: + +```bash +$ sbt +> dotc +``` + +or from terminal: + +```bash +$ dotc +``` + +Here are some useful debugging ``: + +* `-Xprint:PHASE1,PHASE2,...` or `-Xprint:all`: prints the `AST` after each + specified phase. Phase names can be found by examining the + `dotty.tools.dotc.transform.*` classes for their `phaseName` field e.g., `-Xprint:erasure`. + You can discover all phases in the `dotty.tools.dotc.Compiler` class +* `-Ylog:PHASE1,PHASE2,...` or `-Ylog:all`: enables `ctx.log("")` logging for + the specified phase. +* `-Ycheck:all` verifies the consistency of `AST` nodes between phases, in + particular checks that types do not change. Some phases currently can't be + `Ycheck`ed, therefore in the tests we run: + `-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef`. +* the last frontier of debugging (before actual debugging) is the range of logging capabilities that +can be enabled through the `dotty.tools.dotc.config.Printers` object. Change any of the desired printer from `noPrinter` to +`default` and this will give you the full logging capability of the compiler. + +## Inspecting Trees with Type Stealer ## + +There is no power mode for the REPL yet, but you can inspect types with the +type stealer: + +```bash +$ sbt +> repl +scala> import dotty.tools.DottyTypeStealer._; import dotty.tools.dotc.core._; import Contexts._,Types._ +``` + +Now, you can define types and access their representation. For example: + +```scala +scala> val s = stealType("class O { type X }", "O#X") +scala> implicit val ctx: Context = s._1 +scala> val t = s._2(0) +t: dotty.tools.dotc.core.Types.Type = TypeRef(TypeRef(ThisType(TypeRef(NoPrefix,)),O),X) +scala> val u = t.asInstanceOf[TypeRef].underlying +u: dotty.tools.dotc.core.Types.Type = TypeBounds(TypeRef(ThisType(TypeRef(NoPrefix,scala)),Nothing), TypeRef(ThisType(TypeRef(NoPrefix,scala)),Any)) +``` + +## Pretty-printing ## +Many objects in the dotc compiler implement a `Showable` trait (e.g. `Tree`, +`Symbol`, `Type`). These objects may be prettyprinted using the `.show` +method + +## SBT Commands Cheat Sheet ## +The basics of working with Dotty codebase are documented [here](https://dotty.epfl.ch/docs/contributing/getting-started.html) and [here](https://dotty.epfl.ch/docs/contributing/workflow.html). Below is a cheat sheet of some frequently used commands (to be used from SBT console – `sbt`). + + +| Command | Description | +|------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------| +| `dotc ../issues/Playground.scala` | Compile the given file – path relative to the Dotty directory. Output the compiled class files to the Dotty directory itself. | +| `dotr Playground` | Run the compiled class `Playground`. Dotty directory is on classpath by default. | +| `repl` | Start REPL | +| `testOnly dotty.tools.dotc.CompilationTests -- *pos` | Run test (method) `pos` from `CompilationTests` suite. | +| `testCompilation sample` | In all test suites, run test files containing the word `sample` in their title. | diff --git a/scala3doc/dotty-docs/docs/docs/index.md b/scala3doc/dotty-docs/docs/docs/index.md new file mode 100644 index 000000000000..b83d4419e4c6 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/index.md @@ -0,0 +1,22 @@ +--- +layout: doc-page +title: "Dotty Documentation" +--- + +Dotty is the project name for technologies that are considered for inclusion in Scala 3. Scala has +pioneered the fusion of object-oriented and functional programming in a typed setting. Scala 3 will +be a big step towards realizing the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviors, +- build on strong foundations to ensure the design hangs well together, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and performance. + +In this documentation you will find information on how to use the Dotty compiler on your machine, +navigate through the code, setup Dotty with your favorite IDE and more! + +Table of Contents +================= +{% assign titles = sidebar.titles %} +{% include "table-of-contents" %} diff --git a/scala3doc/dotty-docs/docs/docs/internals/backend.md b/scala3doc/dotty-docs/docs/docs/internals/backend.md new file mode 100644 index 000000000000..5642931395fa --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/backend.md @@ -0,0 +1,126 @@ +--- +layout: doc-page +title: "Backend Internals" +--- + +The code for the backend is split up by functionality and assembled in the +object `GenBCode`. + +``` +object GenBCode --- [defines] --> PlainClassBuilder GenBCode also defines class BCodePhase, the compiler phase + | | + [extends] [extends] + | | +BCodeSyncAndTry ----------------> SyncAndTryBuilder + | | +BCodeBodyBuilder ----------------> PlainBodyBuilder + | | +BCodeSkelBuilder ----------------> PlainSkelBuilder + | / | \ + BCodeHelpers ----------------> BCClassGen BCAnnotGen ... (more components) + | \ \ + | \ \-------------> helper methods + | \ \------------> JMirrorBuilder, JBeanInfoBuilder (uses some components, e.g. BCInnerClassGen) + | \ + | BytecodeWriters ---------> methods and classes to write byte code files + | + BCodeTypes ----------------> maps and fields for common BTypes, class Tracked, methods to collect information on classes, tests for BTypes (conforms), ... + | +BCodeIdiomatic ----------------> utilities for code generation, e.g. genPrimitiveArithmetic + | + BCodeGlue ----------------> BType class, predefined BTypes +``` + +### Data Flow ### +Compiler creates a `BCodePhase`, calls `runOn(compilationUnits)`. + +* initializes fields of `GenBCode` defined in `BCodeTypes` (BType maps, + common BTypes like `StringReference`) +* initialize `primitives` map defined in `scalaPrimitives` (maps primitive + members, like `int.+`, to bytecode instructions) +* creates `BytecodeWriter`, `JMirrorBuilder` and `JBeanInfoBuilder` instances + (on each compiler run) +* `buildAndSendToDisk(units)`: uses work queues, see below. + - `BCodePhase.addToQ1` adds class trees to `q1` + - `Worker1.visit` creates ASM `ClassNodes`, adds to `q2`. It creates one + `PlainClassBuilder` for each compilation unit. + - `Worker2.addToQ3` adds byte arrays (one for each class) to `q3` + - `BCodePhase.drainQ3` writes byte arrays to disk + + +### Architecture ### +The architecture of `GenBCode` is the same as in Scalac. It can be partitioned +into weakly coupled components (called "subsystems" below): + + +#### (a) The queue subsystem #### +Queues mediate between processors, queues don't know what each processor does. + +The first queue contains AST trees for compilation units, the second queue +contains ASM ClassNodes, and finally the third queue contains byte arrays, +ready for serialization to disk. + +Currently the queue subsystem is all sequential, but as can be seen in +http://magarciaepfl.github.io/scala/ the above design enables overlapping (a.1) +building of `ClassNodes`, (a.2) intra-method optimizations, and (a.3) +serialization to disk. + +This subsystem is described in detail in `GenBCode.scala` + +#### (b) Bytecode-level types, BType #### +The previous bytecode emitter goes to great lengths to reason about +bytecode-level types in terms of Symbols. + +`GenBCode` uses `BType` as a more direct representation. A `BType` is immutable, and +a value class (once the rest of GenBCode is merged from +http://magarciaepfl.github.io/scala/ ). + +Whether value class or not, its API is the same. That API doesn't reach into +the type checker. Instead, each method on a `BType` answers a question that can +be answered based on the `BType` itself. Sounds too simple to be good? It's a +good building block, that's what it is. + +The internal representation of a `BType` is based on what the JVM uses: internal +names (e.g. `Ljava/lang/String;` ) and method descriptors; as defined in the JVM +spec (that's why they aren't documented in `GenBCode`, just read the [JVM 8 spec](https://docs.oracle.com/javase/specs/jvms/se8/html/)). + +All things `BType` can be found in `BCodeGlue.scala` + +#### (c) Utilities offering a more "high-level" API to bytecode emission #### +Bytecode can be emitted one opcode at a time, but there are recurring patterns +that call for a simpler API. + +For example, when emitting a load-constant, a dedicated instruction exists for +emitting load-zero. Similarly, emitting a switch can be done according to one +of two strategies. + +All these utilities are encapsulated in file `BCodeIdiomatic.scala`. They know +nothing about the type checker (because, just between us, they don't need to). + +#### (d) Mapping between type-checker types and BTypes #### +So that (c) can remain oblivious to what AST trees contain, some bookkeepers +are needed: + + - Tracked: for a bytecode class (BType), its superclass, directly declared + interfaces, and inner classes. + +To understand how it's built, see: + +```scala +final def exemplar(csym0: Symbol): Tracked = { ... } +``` + +Details in `BCodeTypes.scala` + +#### (e) More "high-level" utilities for bytecode emission #### +In the spirit of `BCodeIdiomatic`, utilities are added in `BCodeHelpers` for +emitting: + +- bean info class +- mirror class and their forwarders +- android-specific creator classes +- annotations + + +#### (f) Building an ASM ClassNode given an AST TypeDef #### +It's done by `PlainClassBuilder`(see `GenBCode.scala`). diff --git a/scala3doc/dotty-docs/docs/docs/internals/classpaths.md b/scala3doc/dotty-docs/docs/docs/internals/classpaths.md new file mode 100644 index 000000000000..4e504674cb5f --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/classpaths.md @@ -0,0 +1,47 @@ +--- +layout: doc-page +title: Classpaths +--- + +When ran from the `dotty` script, this is the classloader stack: + +``` +===================================================== +class sun.misc.Launcher$AppClassLoader <= corresponds to java.class.path +sun.misc.Launcher$AppClassLoader@591ce4fe +file:/mnt/data-local/Work/Workspace/dev-2.11/dotty/target/scala-2.11.0-M7/dotty_2.11.0-M7-0.1-SNAPSHOT.jar:file:/home/sun/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.0-M7.jar:file:/home/sun/.ivy2/cache/org.scala-lang/scala-reflect/jars/scala-reflect-2.11.0-M7.jar +===================================================== +class sun.misc.Launcher$ExtClassLoader <= corresponds to sun.boot.class.path +sun.misc.Launcher$ExtClassLoader@77fe0d66 +file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunpkcs11.jar:file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/localedata.jar:file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/zipfs.jar:file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunec.jar:file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunjce_provider.jar:file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/dnsns.jar +===================================================== +``` + +When running from sbt or Eclipse, the classloader stack is: + +``` +===================================================== +class sbt.classpath.ClasspathUtilities$$anon$1 +sbt.classpath.ClasspathUtilities$$anon$1@22a29f97 +file:/mnt/data-local/Work/Workspace/dev-2.11/dotty/target/scala-2.11.0-M7/classes/:file:/home/sun/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.0-M7.jar:file:/home/sun/.ivy2/cache/org.scala-lang/scala-reflect/jars/scala-reflect-2.11.0-M7.jar:file:/home/sun/.ivy2/cache/org.scala-lang.modules/scala-xml_2.11.0-M7/bundles/scala-xml_2.11.0-M7-1.0.0-RC7.jar +===================================================== +class java.net.URLClassLoader +java.net.URLClassLoader@2167c879 +file:/home/sun/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.0-M7.jar:file:/home/sun/.ivy2/cache/org.scala-lang/scala-compiler/jars/scala-compiler-2.11.0-M7.jar:file:/home/sun/.ivy2/cache/org.scala-lang/scala-reflect/jars/scala-reflect-2.11.0-M7.jar:file:/home/sun/.ivy2/cache/org.scala-lang.modules/scala-xml_2.11.0-M6/bundles/scala-xml_2.11.0-M6-1.0.0-RC6.jar:file:/home/sun/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.11.0-M6/bundles/scala-parser-combinators_2.11.0-M6-1.0.0-RC4.jar:file:/home/sun/.ivy2/cache/jline/jline/jars/jline-2.11.jar +===================================================== +class xsbt.boot.BootFilteredLoader +xsbt.boot.BootFilteredLoader@73c74402 +not a URL classloader +===================================================== +class sun.misc.Launcher$AppClassLoader <= corresponds to java.class.path +sun.misc.Launcher$AppClassLoader@612dcb8c +file:/home/sun/.sbt/.lib/0.13.0/sbt-launch.jar +===================================================== +class sun.misc.Launcher$ExtClassLoader <= corresponds to sun.boot.class.path +sun.misc.Launcher$ExtClassLoader@58e862c +file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunpkcs11.jar:file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/localedata.jar:file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/zipfs.jar:file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunec.jar:file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunjce_provider.jar:file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/dnsns.jar +===================================================== +``` +Since scala/dotty only pick up `java.class.path` and `sun.boot.class.path`, +it's clear why Dotty crashes in sbt and Eclipse unless we set the boot +classpath explicitly. diff --git a/scala3doc/dotty-docs/docs/docs/internals/contexts.md b/scala3doc/dotty-docs/docs/docs/internals/contexts.md new file mode 100644 index 000000000000..3674f03e2e51 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/contexts.md @@ -0,0 +1,54 @@ +--- +layout: doc-page +title: Contexts +--- + +The `Context` contains the state of the compiler, for example + + * `settings` + * `freshNames` (`FreshNameCreator`) + * `period` (run and phase id) + * `compilationUnit` + * `phase` + * `tree` (current tree) + * `typer` (current typer) + * `mode` (type checking mode) + * `typerState` (for example undetermined type variables) + * ... + +### Contexts in the typer ### +The type checker passes contexts through all methods and adapts fields where +necessary, e.g. + +```scala +case tree: untpd.Block => typedBlock(desugar.block(tree), pt)(ctx.fresh.withNewScope) +``` + +A number of fields in the context are typer-specific (`mode`, `typerState`). + +### In other phases ### +Other phases need a context for many things, for example to access the +denotation of a symbols (depends on the period). However they typically don't +need to modify / extend the context while traversing the AST. For these phases +the context can be simply an implicit class parameter that is then available in +all members. + +**Careful**: beware of memory leaks. Don't hold on to contexts in long lived +objects. + +### Using contexts ### +Nested contexts should be named `ctx` to enable implicit shadowing: + +```scala +scala> class A + +scala> def foo(implicit a: A) { def bar(implicit b: A) { println(implicitly[A]) } } +:8: error: ambiguous implicit values: + both value a of type A + and value b of type A + match expected type A + def foo(implicit a: A) { def bar(implicit b: A) { println(implicitly[A]) } } + +scala> def foo(implicit a: A) { def bar(implicit a: A) { println(implicitly[A]) } } +foo: (implicit a: A)Unit +``` diff --git a/scala3doc/dotty-docs/docs/docs/internals/core-data-structures.md b/scala3doc/dotty-docs/docs/docs/internals/core-data-structures.md new file mode 100644 index 000000000000..623114aa3270 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/core-data-structures.md @@ -0,0 +1,117 @@ +--- +layout: doc-page +title: Core Data Structures +--- + +(The following is work in progress) + +## Symbols and SymDenotations + + - why symbols are not enough: their contents change all the time + - they change themselvesSo a `Symbol` + - reference: string + sig + + +Dotc is different from most other compilers in that it is centered around the idea of +maintaining views of various artifacts associated with code. These views are indexed +by tne + +A symbol refers to a definition in a source program. Traditionally, + compilers store context-dependent data in a _symbol table_. The + symbol then is the central reference to address context-dependent + data. But for `dotc`'s requirements it turns out that symbols are + both too little and too much for this task. + +Too little: The attributes of a symbol depend on the phase. Examples: +Types are gradually simplified by several phases. Owners are changed +in phases `LambdaLift` (when methods are lifted out to an enclosing +class) and Flatten (when all classes are moved to top level). Names +are changed when private members need to be accessed from outside +their class (for instance from a nested class or a class implementing +a trait). So a functional compiler, a `Symbol` by itself met mean +much. Instead we are more interested in the attributes of a symbol at +a given phase. + +`dotc` has a concept for "attributes of a symbol at + +Too much: If a symbol is used to refer to a definition in another +compilation unit, we get problems for incremental recompilation. The +unit containing the symbol might be changed and recompiled, which +might mean that the definition referred to by the symbol is deleted or +changed. This leads to the problem of stale symbols that refer to +definitions that no longer exist in this form. `scalac` tried to +address this problem by _rebinding_ symbols appearing in certain cross +module references, but it turned out to be too difficult to do this +reliably for all kinds of references. `dotc` attacks the problem at +the root instead. The fundamental problem is that symbols are too +specific to serve as a cross-module reference in a system with +incremental compilation. They refer to a particular definition, but +that definition may not persist unchanged after an edit. + +`dotc` uses instead a different approach: A cross module reference is +always type, either a `TermRef` or ` TypeRef`. A reference type contains +a prefix type and a name. The definition the type refers to is established +dynamically based on these fields. + + +a system where sources can be recompiled at any instance, + + the concept of a `Denotation`. + + Since definitions are transformed by phases, + + +The [Dotty project](https://github.com/lampepfl/dotty) +is a platform to develop new technology for Scala +tooling and to try out concepts of future Scala language versions. +Its compiler is a new design intended to reflect the +lessons we learned from work with the Scala compiler. A clean redesign +today will let us iterate faster with new ideas in the future. + +Today we reached an important milestone: The Dotty compiler can +compile itself, and the compiled compiler can act as a drop-in for the +original one. This is what one calls a *bootstrap*. + +## Why is this important? + +The main reason is that this gives us a some validation of the +*trustworthiness* of the compiler itself. Compilers are complex beasts, +and many things can go wrong. By far the worst things that can go +wrong are bugs where incorrect code is produced. It's not fun debugging code that looks perfectly +fine, yet gets translated to something subtly wrong by the compiler. + +Having the compiler compile itself is a good test to demonstrate that +the generated code has reached a certain level of quality. Not only is +a compiler a large program (44k lines in the case of dotty), it is +also one that exercises a large part of the language in quite +intricate ways. Moreover, bugs in the code of a compiler don't tend to +go unnoticed, precisely because every part of a compiler feeds into +other parts and all together are necessary to produce a correct +translation. + +## Are We Done Yet? + +Far from it! The compiler is still very rough. A lot more work is +needed to + + - make it more robust, in particular when analyzing incorrect programs, + - improve error messages and warnings, + - improve the efficiency of some of the generated code, + - embed it in external tools such as sbt, REPL, IDEs, + - remove restrictions on what Scala code can be compiled, + - help in migrating Scala code that will have to be changed. + +## What Are the Next Steps? + +Over the coming weeks and months, we plan to work on the following topics: + + - Make snapshot releases. + - Get the Scala standard library to compile. + - Work on SBT integration of the compiler. + - Work on IDE support. + - Investigate the best way to obtaining a REPL. + - Work on the build infrastructure. + +If you want to get your hands dirty with any of this, now is a good moment to get involved! +To get started: . + diff --git a/scala3doc/dotty-docs/docs/docs/internals/debug-macros.md b/scala3doc/dotty-docs/docs/docs/internals/debug-macros.md new file mode 100644 index 000000000000..afe008f14c2f --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/debug-macros.md @@ -0,0 +1,65 @@ +--- +layout: doc-page +title: "Debug Macros" +--- + +Complex macros may break invariants of the compiler, which leads to compiler crashes. +Here we list common compiler crashes and how to deal with them. + +## position not set + +For this problem, here is the log that is usually shown: + +``` +[error] assertion failed: position not set for org.scalactic.anyvals.PosZInt.+$extension3(SizeParam.this.minSize)( +[error] org.scalactic.anyvals.PosZInt.widenToInt(SizeParam.this.sizeRange) +[error] ) # 2326942 of class dotty.tools.dotc.ast.Trees$Apply in library/src-bootstrapped/scala/tasty/reflect/utils/TreeUtils.scala +``` + +To debug why the position is not set, note the tree id `2326942`, and enable +the following compiler option: + +``` +-Ydebug-tree-with-id 2326942 +``` + +With the option above, the compiler will crash when the tree is created. From +the stack trace, we will be able to figure out where the tree is created. + +If the position is in the compiler, then either report a compiler bug or +fix the problem with `.withSpan(tree.span)`. The following fix is an example: + +- https://github.com/lampepfl/dotty/pull/6581 + +## unresolved symbols in pickling + +Here is the usually stacktrace for unresolved symbols in pickling: + +``` +[error] java.lang.AssertionError: assertion failed: unresolved symbols: value pos (line 5565) when pickling scalatest/scalatest-test.dotty/target/scala-0.17/src_managed/test/org/scalatest/AssertionsSpec.scala +[error] at dotty.DottyPredef$.assertFail(DottyPredef.scala:16) +[error] at dotty.tools.dotc.core.tasty.TreePickler.pickle(TreePickler.scala:699) +[error] at dotty.tools.dotc.transform.Pickler.run$$anonfun$10$$anonfun$8(Pickler.scala:60) +[error] at dotty.runtime.function.JProcedure1.apply(JProcedure1.java:15) +[error] at dotty.runtime.function.JProcedure1.apply(JProcedure1.java:10) +[error] at scala.collection.immutable.List.foreach(List.scala:392) +[error] at dotty.tools.dotc.transform.Pickler.run$$anonfun$2(Pickler.scala:83) +[error] at dotty.runtime.function.JProcedure1.apply(JProcedure1.java:15) +[error] at dotty.runtime.function.JProcedure1.apply(JProcedure1.java:10) +[error] at scala.collection.immutable.List.foreach(List.scala:392) +[error] at dotty.tools.dotc.transform.Pickler.run(Pickler.scala:83) +[error] at dotty.tools.dotc.core.Phases$Phase.runOn$$anonfun$1(Phases.scala:316) +[error] at scala.collection.immutable.List.map(List.scala:286) +[error] at dotty.tools.dotc.core.Phases$Phase.runOn(Phases.scala:318) +[error] at dotty.tools.dotc.transform.Pickler.runOn(Pickler.scala:87) +``` + +From the stack trace, we know `pos` at line 5565 cannot be resolved. For the +compiler, it means that the name `pos` (usually a local name, but could also be +a class member) is used in the code but its definition cannot be found. + +A possible cause of the problem is that the macro implementation accidentally +dropped the definition of the referenced name. + +If you are confident that the macro implementation is correct, then it might be +a bug of the compiler. Try to minimize the code and report a compiler bug. diff --git a/scala3doc/dotty-docs/docs/docs/internals/dotc-scalac.md b/scala3doc/dotty-docs/docs/docs/internals/dotc-scalac.md new file mode 100644 index 000000000000..8949d9cf1f9a --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/dotc-scalac.md @@ -0,0 +1,98 @@ +--- +layout: doc-page +title: "Differences between Scalac and Dotty" +--- + +Overview explanation how symbols, named types and denotations hang together: +[Denotations1] + +### Denotation ### +Comment with a few details: [Denotations2] + +A `Denotation` is the result of a name lookup during a given period + +* Most properties of symbols are now in the denotation (name, type, owner, + etc.) +* Denotations usually have a reference to the selected symbol +* Denotations may be overloaded (`MultiDenotation`). In this case the symbol + may be `NoSymbol` (the two variants have symbols). +* Non-overloaded denotations have an `info` + +Denotations of methods have a signature ([Signature1]), which +uniquely identifies overloaded methods. + +#### Denotation vs. SymDenotation #### +A `SymDenotation` is an extended denotation that has symbol-specific properties +(that may change over phases) +* `flags` +* `annotations` +* `info` + +`SymDenotation` implements lazy types (similar to scalac). The type completer +assigns the denotation's `info`. + +#### Implicit Conversion #### +There is an implicit conversion: +```scala +core.Symbols.toDenot(sym: Symbol)(implicit ctx: Context): SymDenotation +``` + +Because the class `Symbol` is defined in the object `core.Symbols`, the +implicit conversion does **not** need to be imported, it is part of the +implicit scope of the type `Symbol` (check the Scala spec). However, it can +only be applied if an implicit `Context` is in scope. + +### Symbol ### +* `Symbol` instances have a `SymDenotation` +* Most symbol properties in scalac are now in the denotation (in dotc) + +Most of the `isFooBar` properties in scalac don't exist anymore in dotc. Use +flag tests instead, for example: + +```scala +if (sym.isPackageClass) // scalac +if (sym is Flags.PackageClass) // dotc (*) +``` + +`(*)` Symbols are implicitly converted to their denotation, see above. Each +`SymDenotation` has flags that can be queried using the `is` method. + +### Flags ### +* Flags are instances of the value class `FlagSet`, which encapsulates a + `Long` +* Each flag is either valid for types, terms, or both + +``` +000..0001000..01 + ^ ^^ + flag | \ + | valid for term + valid for type +``` + +* Example: `Module` is valid for both module values and module classes, + `ModuleVal` / `ModuleClass` for either of the two. +* `flags.is(Method | Param)`: true if `flags` has either of the two + +### Tree ### +* Trees don't have symbols + - `tree.symbol` is `tree.denot.symbol` + - `tree.denot` is `tree.tpe.denot` where the `tpe` is a `NamdedType` (see + next point) +* Subclasses of `DenotingTree` (`Template`, `ValDef`, `DefDef`, `Select`, + `Ident`, etc.) have a `NamedType`, which has a `denot` field. The + denotation has a symbol. + - The `denot` of a `NamedType` (prefix + name) for the current period is + obtained from the symbol that the type refers to. This symbol is searched + using `prefix.member(name)`. + + +### Type ### + * `MethodType(paramSyms, resultType)` from scalac => + `mt @ MethodType(paramNames, paramTypes)`. Result type is `mt.resultType` + +`@todo` + +[Denotations1]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Denotations.scala#L27-L72 +[Denotations2]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Denotations.scala#L77-L103 +[Signature1]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Signature.scala#L9-L33 diff --git a/scala3doc/dotty-docs/docs/docs/internals/dotty-internals-1-notes.md b/scala3doc/dotty-docs/docs/docs/internals/dotty-internals-1-notes.md new file mode 100644 index 000000000000..8f3110bd91fb --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/dotty-internals-1-notes.md @@ -0,0 +1,155 @@ +--- +layout: doc-page +title: Dotty Internals 1: Trees & Symbols (Meeting Notes) +--- + +These are meeting notes for the [Dotty Internals 1: Trees & Symbols](https://www.youtube.com/watch?v=yYd-zuDd3S8) talk by [Dmitry Petrashko](http://twitter.com/darkdimius) on Mar 21, 2017. + +# Entry point +`dotc/Compiler.scala` + +The entry point to the compiler contains the list of phases and their order. + +# Phases + +Some phases executed independently, but others (miniphases) are grouped for efficiency. +See the paper "[Miniphases: Compilation using Modular and Efficient Tree Transformation](https://infoscience.epfl.ch/record/228518/files/paper.pdf)" for details. + +# Trees +`dotc/ast/Trees.scala` + +Trees represent code written by the user (e.g. methods, classes, expressions). There are two kinds of trees: untyped and typed. + +Unlike other compilers (but like `scalac`), dotty doesn't use multiple intermediate representations (IRs) during the compilation pipeline. Instead, it uses trees for all phases. + +Dotty trees are immutable, so they can be shared. + +## Untyped trees +`dotc/ast/untpd.scala` + +These are the trees as output by the parser. + +Some trees only exist as untyped: e.g. `WhileDo` and `ForDo`. These are desugared by the typechecker. + +## Typed trees +`dotc/ast/tpd.scala` + +Typed trees contain not only the user-written code, but also semantic information (the types) about the code. + +## Notes on some tree types + + * `RefTree`: trees that refer to something. There are multiple subtypes + - `Ident`: by-name reference + - `Select`: select (e.g. a field) from another tree (e.g. `a.foo` is represented as `Select(Ident(a), foo)`) + * `This`: the this pointer + * `Apply`: function application: e.g. `a.foo(1, 2)(3, 4)` becomes `Apply(Apply(Select(Ident(a), foo), List(1, 2)), List(3, 4))` + * `TypeApply`: type application: `def foo[T](a: T) = ??? foo[Int](1)` becomes `Apply(TypeApply(Ident(foo), List(Int)), List(1))` + * `Literal`: constants (e.g. integer constant 1) + * `Typed`: type ascription (e.g. for widening, as in `(1: Any)`) + * `NamedArg`: named arguments (can appear out-of-order in untyped trees, but will appear in-order in typed ones) + * `Assign`: assignment. The node has a `lhs` and a `rhs`, but the `lhs` can be arbitrarily complicated (e.g. `(new C).f = 0`). + * `If`: the condition in an if-expression can be arbitrarily complex (e.g. it can contain class definitions) + * `Closure`: the free variables are stored in the `env` field, but are only accessible "around" the `LambdaLift` phase. + * `Match` and `CaseDef`: pattern-matching trees. The `pat` field in `CaseDef` (the pattern) is, in turn, populated with a subset of trees like `Bind` and `Unapply`. + * `Return`: return from a method. If the `from` field is empty, then we return from the closest enclosing method. + The `expr` field should have a types that matches the return type of the method, but the `Return` node itself has type bottom. + * `TypeTree`: tree representing a type (e.g. for `TypeApply`). + * `AndType`, `OrType`, etc.: these are other trees that represent types that can be written by the user. These are a strict subset of all types, since + some types *cannot* be written by the user. + * `ValDef`: defines fields or local variables. To differentiate between the two cases, we can look at the denotation. + The `preRhs` field is lazy because sometimes we want to "load" a definition without know what's on the rhs (for example, to look up its type). + * `DefDef`: method definition. + * `TypeDef`: type definition. Both `type A = ???` and `class A {}` are represented with a `TypeDef`. To differentiate between the two, look at the type of the node (better), or in the case of classes there should be a `Template` node in the rhs. + * `Template`: describes the "body" of a class, including inheritance information and constructor. The `constr` field will be populated only after the `Constructors` phase; before that the constructor lives in the `preBody` field. + * `Thicket`: allows us to return multiple trees when a single one is expected. This kind of tree is not user-visible. + For example, `transformDefDef` in `LabelDefs` takes in a `DefDef` and needs to be able to sometimes break up the method into multiple methods, which are then returned as a single tree (via a `Thicket`). If we return a thicket in a location where multiple trees are expected, the compiler will flatten them, but if only one tree is expected (for example, in the constructor field of a class), then the compiler will throw. + +### ThisTree + +Tree classes have a `ThisTree` type field which is used to implement functionality that's common for *all* trees while returning +a specific tree type. See `withType` in the `Tree` base class, for an example. + +Additionally, both `Tree` and `ThisTree` are polymorphic so they can represent both untyped and typed trees. + +For example, `withType` has signature `def withType(tpe: Type)(implicit ctx: Context): ThisTree[Type]`. +This means that `withType` can return the most-specific tree type for the current tree, while at the same time guaranteeing that +the returned tree will be typed. + +## Creating trees + +You should use the creation methods in `untpd.scala` and `tpd.scala` to instantiate tree objects (as opposed to +creating them directly using the case classes in `Trees.scala`). + +## Meaning of trees + +In general, the best way to know what a tree represents is to look at its type or denotation; pattern matching +on the structure of a tree is error-prone. + +## Errors +`dotc/typer/ErrorReporting.scala` + +Sometimes there's an error during compilation, but we want to continue compilling (as opposed to failing outright), to +uncover additional errors. + +In cases where a tree is expected but there's an error, we can use the `errorTree` methods in `ErrorReporting` to create +placeholder trees that explicitly mark the presence of errors. + +Similarly, there exist `ErrorType` and `ErrorSymbol` classes. + +## Assignment + +The closest in Dotty to what a programming language like C calls an "l-value" is a `RefTree` (so an `Ident` or a `Select`). +However, keep in mind that arbitrarily complex expressions can appear in the lhs of an assignment: e.g. +```scala +trait T { + var s = 0 +} +{ + class T2 extends T + while (true) 1 + new Bla +}.s = 10 +``` +Another caveat, before typechecking there can be some trees where the lhs isn't a `RefTree`: e.g. `(a, b) = (3, 4)`. + +# Symbols +`dotc/core/Symbols.scala` + +Symbols are references to definitions (e.g. of variables, fields, classes). Symbols can be used to refer to definitions for which we don't have ASTs (for example, from the Java standard library). + +`NoSymbol` is used to indicate the lack of a symbol. + +Symbols uniquely identify definitions, but they don't say what the definitions *mean*. To understand the meaning of a symbol +we need to look at its *denotation* (spefically for symbols, a `SymDenotation`). + +Symbols can not only represent terms, but also types (hence the `isTerm`/`isType` methods in the `Symbol` class). + +## ClassSymbol + +`ClassSymbol` represents either a `class`, or an `trait`, or an `object`. For example, an object +```scala +object O { + val s = 1 +} +``` +is represented (after `Typer`) as +```scala +class O$ { this: O.type => + val s = 1 +} +val O = new O$ +``` +where we have a type symbol for `class O$` and a term symbol for `val O`. Notice the use of the selftype `O.type` to indicate that `this` has a singleton type. + +## SymDenotation +`dotc/core/SymDenotations.scala` + +Symbols contain `SymDenotation`s. The denotation, in turn, refers to: + + * the source symbol (so the linkage is cyclic) + * the "owner" of the symbol: + - if the symbol is a variable, the owner is the enclosing method + - if it's a field, the owner is the enclosing class + - if it's a class, then the owner is the enclosing class + * a set of flags that contain semantic information about the definition (e.g. whether it's a trait or mutable). Flags are defined in `Flags.scala`. + * the type of the definition (through the `info` method) diff --git a/scala3doc/dotty-docs/docs/docs/internals/explicit-nulls.md b/scala3doc/dotty-docs/docs/docs/internals/explicit-nulls.md new file mode 100644 index 000000000000..6164a6d330c1 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/explicit-nulls.md @@ -0,0 +1,141 @@ +--- +layout: doc-page +title: "Explicit Nulls" +--- + +The explicit nulls feature (enabled via a flag) changes the Scala type hierarchy +so that reference types (e.g. `String`) are non-nullable. We can still express nullability +with union types: e.g. `val x: String|Null = null`. + +The implementation of the feature in dotty can be conceptually divided in several parts: + 1. changes to the type hierarchy so that `Null` is only a subtype of `Any` + 2. a "translation layer" for Java interop that exposes the nullability in Java APIs + 3. a "magic" `UncheckedNull` type (an alias for `Null`) that is recognized by the compiler and + allows unsound member selections (trading soundness for usability) + +## Feature Flag + +Explicit nulls are disabled by default. They can be enabled via `-Yexplicit-nulls` defined in +`ScalaSettings.scala`. All of the explicit-nulls-related changes should be gated behind the flag. + +## Type Hierarchy + +We change the type hierarchy so that `Null` is only a subtype of `Any` by: + - modifying the notion of what is a nullable class (`isNullableClass`) in `SymDenotations` + to include _only_ `Null` and `Any` + - changing the parent of `Null` in `Definitions` to point to `Any` and not `AnyRef` + - changing `isBottomType` and `isBottomClass` in `Definitions` + +## Java Interop + +The problem we're trying to solve here is: if we see a Java method `String foo(String)`, +what should that method look like to Scala? + - since we should be able to pass `null` into Java methods, the argument type should be `String|UncheckedNull` + - since Java methods might return `null`, the return type should be `String|UncheckedNull` + +`UncheckedNull` here is a type alias for `Null` with "magic" properties (see below). + +At a high-level: + - we track the loading of Java fields and methods as they're loaded by the compiler + - we do this in two places: `Namer` (for Java sources) and `ClassFileParser` (for bytecode) + - whenever we load a Java member, we "nullify" its argument and return types + +The nullification logic lives in `compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala`. + +The entry point is the function +`def nullifyMember(sym: Symbol, tp: Type, isEnumValueDef: Boolean)(implicit ctx: Context): Type` +which, given a symbol, its "regular" type, and a boolean whether it is a Enum value definition, +produces what the type of the symbol should be in the explicit nulls world. + +1. If the symbol is a Enum value definition or a `TYPE_` field, we don't nullify the type +2. If it is `toString()` method or the constructor, or it has a `@NotNull` annotation, + we nullify the type, without a `UncheckedNull` at the outmost level. +3. Otherwise, we nullify the type in regular way. + +See `JavaNullMap` in `JavaNullInterop.scala` for more details about how we nullify different types. + +## UncheckedNull + +`UncheckedNull` is just an alias for `Null`, but with magic power. `UncheckedNull`'s magic (anti-)power is that +it's unsound. + +```scala +val s: String|UncheckedNull = "hello" +s.length // allowed, but might throw NPE +``` + +`UncheckedNull` is defined as `UncheckedNullAlias` in `Definitions.scala`. +The logic to allow member selections is defined in `findMember` in `Types.scala`: + - if we're finding a member in a type union + - and the union contains `UncheckedNull` on the r.h.s. after normalization (see below) + - then we can continue with `findMember` on the l.h.s of the union (as opposed to failing) + +## Working with Nullable Unions + +Within `Types.scala`, we defined some extractors to work with nullable unions: +`OrNull` and `OrUncheckedNull`. + +```scala +(tp: Type) match { + case OrNull(tp1) => // if tp is a nullable union: tp1 | Null + case _ => // otherwise +} +``` + +This extractor will call utility methods in `NullOpsDecorator.scala`. All of these +are methods of the `Type` class, so call them with `this` as a receiver: + +- `stripNull` syntactically strips all `Null` types in the union: + e.g. `String|Null => String`. +- `stripUncheckedNull` is like `stripNull` but only removes `UncheckedNull` from the union. + This is needed when we want to "revert" the Java nullification function. +- `stripAllUncheckedNull` collapses all `UncheckedNull` unions within this type, and not just the outermost + ones (as `stripUncheckedNull` does). +- `isNullableUnion` determines whether `this` is a nullable union. +- `isUncheckedNullableUnion` determines whether `this` is syntactically a union of the form + `T|UncheckedNull`. + +## Flow Typing + +As typing happens, we accumulate a set of `NotNullInfo`s in the `Context` (see +`Contexts.scala`). A `NotNullInfo` contains the set of `TermRef`s that are known to +be non-null at the current program point. See `Nullables.scala` for how `NotNullInfo`s +are computed. + +During type-checking, when we type an identity or a select tree (in `typedIdent` and +`typedSelect`), we will call `toNotNullTermRef` on the tree before return the typed tree. +If the tree `x` has nullable type `T|Null` and it is known to be not null according to +the `NotNullInfo` and it is not on the lhs of assignment, then we cast it to `x.type & T` +using `defn.Any_typeCast`. + +The reason for casting to `x.type & T`, as opposed to just `T`, is that it allows us to +support flow typing for paths of length greater than one. + +```scala +abstract class Node { + val x: String + val next: Node | Null +} + +def f = { + val l: Node|Null = ??? + if (l != null && l.next != null) { + val third: l.next.next.type = l.next.next + } +} +``` + +After typing, `f` becomes: + +```scala +def f = { + val l: Node|Null = ??? + if (l != null && l.$asInstanceOf$[l.type & Node].next != null) { + val third: + l.$asInstanceOf$[l.type & Node].next.$asInstanceOf$[(l.type & Node).next.type & Node].next.type = + l.$asInstanceOf$[l.type & Node].next.$asInstanceOf$[(l.type & Node).next.type & Node].next + } +} +``` +Notice that in the example above `(l.type & Node).next.type & Node` is still a stable path, so +we can use it in the type and track it for flow typing. diff --git a/scala3doc/dotty-docs/docs/docs/internals/higher-kinded-v2.md b/scala3doc/dotty-docs/docs/docs/internals/higher-kinded-v2.md new file mode 100644 index 000000000000..977c2ce81361 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/higher-kinded-v2.md @@ -0,0 +1,462 @@ +--- +layout: doc-page +title: "Higher-Kinded Types in Dotty" +--- + + + +Higher-Kinded Types in Dotty V2 +=============================== +This note outlines how we intend to represent higher-kinded types in Dotty. +The principal idea is to collapse the four previously disparate features of +refinements, type parameters, existentials and higher-kinded types into just +one: refinements of type members. All other features will be encoded using +these refinements. + +The complexity of type systems tends to grow exponentially with the number of +independent features, because there are an exponential number of possible +feature interactions. Consequently, a reduction from 4 to 1 fundamental +features achieves a dramatic reduction of complexity. It also adds some nice +usablilty improvements, notably in the area of partial type application. + +This is a second version of the scheme which differs in a key aspect from the +first one: Following Adriaan's idea, we use traits with type members to model +type lambdas and type applications. This is both more general and more robust +than the intersections with type constructor traits that we had in the first +version. + +The duality +----------- +The core idea: A parameterized class such as + +```scala +class Map[K, V] +``` + +is treated as equivalent to a type with type members: + +```scala +class Map { type Map$K; type Map$V } +``` + +The type members are name-mangled (i.e. `Map$K`) so that they do not conflict +with other members or parameters named `K` or `V`. + +A type-instance such as `Map[String, Int]` would then be treated as equivalent +to: + +```scala +Map { type Map$K = String; type Map$V = Int } +``` + +Named type parameters +--------------------- +Type parameters can have unmangled names. This is achieved by adding the `type` +keyword to a type parameter declaration, analogous to how `val` indicates a +named field. For instance, + +```scala +class Map[type K, type V] +``` + +is treated as equivalent to + +```scala +class Map { type K; type V } +``` + +The parameters are made visible as fields. + +Wildcards +--------- +A wildcard type such as `Map[_, Int]` is equivalent to: + +```scala +Map { type Map$V = Int } +``` + +I.e. `_`'s omit parameters from being instantiated. Wildcard arguments can have +bounds. E.g. + +```scala +Map[_ <: AnyRef, Int] +``` + +is equivalent to: + +```scala +Map { type Map$K <: AnyRef; type Map$V = Int } +``` + +Type parameters in the encodings +-------------------------------- +The notion of type parameters makes sense even for encoded types, which do not +contain parameter lists in their syntax. Specifically, the type parameters of a +type are a sequence of type fields that correspond to parameters in the +unencoded type. They are determined as follows. + +* The type parameters of a class or trait type are those parameter fields declared in the class + that are not yet instantiated, in the order they are given. Type parameter fields of parents + are not considered. +* The type parameters of an abstract type are the type parameters of its upper bound. +* The type parameters of an alias type are the type parameters of its right hand side. +* The type parameters of every other type is the empty sequence. + +Partial applications +-------------------- +The definition of type parameters in the previous section leads to a simple +model of partial applications. Consider for instance: + +```scala +type Histogram = Map[_, Int] +``` + +`Histogram` is a higher-kinded type that still has one type parameter. +`Histogram[String]` would be a possible type instance, and it would be +equivalent to `Map[String, Int]`. + + +Modelling polymorphic type declarations +--------------------------------------- +The partial application scheme gives us a new -- and quite elegant -- way to do +certain higher-kinded types. But how do we interprete the poymorphic types that +exist in current Scala? + +More concretely, current Scala allows us to write parameterized type +definitions, abstract types, and type parameters. In the new scheme, only +classes (and traits) can have parameters and these are treated as equivalent to +type members. Type aliases and abstract types do not allow the definition of +parameterized types so we have to interprete polymorphic type aliases and +abstract types specially. + +Modelling polymorphic type aliases: simple case +----------------------------------------------- +A polymorphic type alias such as: + +```scala +type Pair[T] = Tuple2[T, T] +``` + +where `Tuple2` is declared as + +```scala +class Tuple2[T1, T2] ... +``` + +is expanded to a monomorphic type alias like this: + +```scala +type Pair = Tuple2 { type Tuple2$T2 = Tuple2$T1 } +``` + +More generally, each type parameter of the left-hand side must appear as a type +member of the right hand side type. Type members must appear in the same order +as their corresponding type parameters. References to the type parameter are +then translated to references to the type member. The type member itself is +left uninstantiated. + +This technique can expand most polymorphic type aliases appearing in Scala +codebases but not all of them. For instance, the following alias cannot be +expanded, because the parameter type `T` is not a type member of the right-hand +side `List[List[T]]`. + +```scala +type List2[T] = List[List[T]] +``` + +We scanned the Scala standard library for occurrences of polymorphic type +aliases and determined that only two occurrences could not be expanded. In +`io/Codec.scala`: + +```scala +type Configure[T] = (T => T, Boolean) +``` + +And in `collection/immutable/HashMap.scala`: + +```scala +private type MergeFunction[A1, B1] = ((A1, B1), (A1, B1)) => (A1, B1) +``` + +For these cases, we use a fall-back scheme that models a parameterized alias as +a `Lambda` type. + +Modelling polymorphic type aliases: general case +------------------------------------------------ +A polymorphic type alias such as: + +```scala +type List2D[T] = List[List[T]] +``` + +is represented as a monomorphic type alias of a type lambda. Here's the +expanded version of the definition above: + +```scala +type List2D = Lambda$I { type Apply = List[List[$hkArg$0]] } +``` + +Here, `Lambda$I` is a standard trait defined as follows: + +```scala +trait Lambda$I[type $hkArg$0] { type +Apply } +``` + +The `I` suffix of the `Lambda` trait indicates that it has one invariant type +parameter (named $hkArg$0). Other suffixes are `P` for covariant type +parameters, and `N` for contravariant type parameters. Lambda traits can have +more than one type parameter. For instance, here is a trait with contravariant +and covariant type parameters: + +```scala +trait Lambda$NP[type -$hkArg$0, +$hkArg$1] { type +Apply } extends Lambda$IP with Lambda$NI +``` + +Aside: the `+` prefix in front of `Apply` indicates that `Apply` is a covariant +type field. Dotty admits variance annotations on type members. + +The definition of `Lambda$NP` shows that `Lambda` traits form a subtyping +hierarchy: Traits which have covariant or contravariant type parameters are +subtypes of traits which don't. The supertraits of `Lambda$NP` would themselves +be written as follows. + +```scala +trait Lambda$IP[type $hkArg$0, +$hkArg$1] { type +Apply } extends Lambda$II +trait Lambda$NI[type -$hkArg$0, $hkArg$1] { type +Apply } extends Lambda$II +trait Lambda$II[type $hkArg$0, $hkArg$1] { type +Apply } +``` + +`Lambda` traits are special in that they influence how type applications are +expanded: If the standard type application `T[X1, ..., Xn]` leads to a subtype +`S` of a type instance + +```scala +LambdaXYZ { type Arg1 = T1; ...; type ArgN = Tn; type Apply ... } +``` + +where all argument fields `Arg1, ..., ArgN` are concretely defined and the +definition of the `Apply` field may be either abstract or concrete, then the +application is further expanded to `S # Apply`. + +For instance, the type instance `List2D[String]` would be expanded to + +```scala +Lambda$I { type $hkArg$0 = String; type Apply = List[List[String]] } # Apply +``` + +which in turn simplifies to `List[List[String]]`. + +2nd Example: Consider the two aliases + +```scala +type RMap[K, V] = Map[V, K] +type RRMap[K, V] = RMap[V, K] +``` + +These expand as follows: + +```scala +type RMap = Lambda$II { self1 => type Apply = Map[self1.$hkArg$1, self1.$hkArg$0] } +type RRMap = Lambda$II { self2 => type Apply = RMap[self2.$hkArg$1, self2.$hkArg$0] } +``` + +Substituting the definition of `RMap` and expanding the type application gives: + +```scala +type RRMap = Lambda$II { self2 => type Apply = + Lambda$II { self1 => type Apply = Map[self1.$hkArg$1, self1.$hkArg$0] } + { type $hkArg$0 = self2.$hkArg$1; type $hkArg$1 = self2.$hkArg$0 } # Apply } +``` + +Substituting the definitions for `self1.$hkArg${1,2}` gives: + +```scala +type RRMap = Lambda$II { self2 => type Apply = + Lambda$II { self1 => type Apply = Map[self2.$hkArg$0, self2.$hkArg$1] } + { type $hkArg$0 = self2.$hkArg$1; type $hkArg$1 = self2.$hkArg$0 } # Apply } +``` + +Simplifiying the `# Apply` selection gives: + +```scala +type RRMap = Lambda$II { self2 => type Apply = Map[self2.$hkArg$0, self2.$hkArg$1] } +``` + +This can be regarded as the eta-expanded version of `Map`. It has the same expansion as + +```scala +type IMap[K, V] = Map[K, V] +``` + +Modelling higher-kinded types +----------------------------- +The encoding of higher-kinded types uses again the `Lambda` traits to represent +type constructors. Consider the higher-kinded type declaration + +```scala +type Rep[T] +``` + +We expand this to + +```scala +type Rep <: Lambda$I +``` + +The type parameters of `Rep` are the type parameters of its upper bound, so +`Rep` is a unary type constructor. + +More generally, a higher-kinded type declaration + +```scala +type T[v1 X1 >: S1 <: U1, ..., vn XN >: SN <: UN] >: SR <: UR +``` + +is encoded as + +```scala +type T <: LambdaV1...Vn { self => + type v1 $hkArg$0 >: s(S1) <: s(U1) + ... + type vn $hkArg$N >: s(SN) <: s(UN) + type Apply >: s(SR) <: s(UR) +} +``` + +where `s` is the substitution `[XI := self.$hkArg$I | I = 1,...,N]`. + +If we instantiate `Rep` with a type argument, this is expanded as was explained +before. + +```scala +Rep[String] +``` + +would expand to + +```scala +Rep { type $hkArg$0 = String } # Apply +``` + +If we instantiate the higher-kinded type with a concrete type constructor (i.e. +a parameterized trait or class), we have to do one extra adaptation to make it +work. The parameterized trait or class has to be eta-expanded so that it +comforms to the `Lambda` bound. For instance, + +```scala +type Rep = Set +``` + +would expand to: + +```scala +type Rep = Lambda1 { type Apply = Set[$hkArg$0] } +``` + +Or, + +```scala +type Rep = Map[String, _] +``` + +would expand to + +```scala +type Rep = Lambda1 { type Apply = Map[String, $hkArg$0] } +``` + +Full example +------------ +Consider the higher-kinded `Functor` type class + +```scala +class Functor[F[_]] { + def map[A, B](f: A => B): F[A] => F[B] +} +``` + +This would be represented as follows: + +```scala +class Functor[F <: Lambda1] { + def map[A, B](f: A => B): F { type $hkArg$0 = A } # Apply => F { type $hkArg$0 = B } # Apply +} +``` + +The type `Functor[List]` would be represented as follows + +```scala +Functor { + type F = Lambda1 { type Apply = List[$hkArg$0] } +} +``` + +Now, assume we have a value + +```scala +val ml: Functor[List] +``` + +Then `ml.map` would have type + +```scala +s(F { type $hkArg$0 = A } # Apply => F { type $hkArg$0 = B } # Apply) +``` + +where `s` is the substitution of `[F := Lambda1 { type Apply = List[$hkArg$0] }]`. +This gives: + +```scala +Lambda1 { type Apply = List[$hkArg$0] } { type $hkArg$0 = A } # Apply + => Lambda1 { type Apply = List[$hkArg$0] } { type $hkArg$0 = B } # Apply +``` + +This type simplifies to: + +```scala +List[A] => List[B] +``` + +Status of `#` +------------- +In the scheme above we have silently assumed that `#` "does the right thing", +i.e. that the types are well-formed and we can collapse a type alias with a `#` +projection, thereby giving us a form of beta reduction. + +In Scala 2.x, this would not work, because `T#X` means `x.X forSome { val x: T +}`. Hence, two occurrences of `Rep[Int]` say, would not be recognized to be +equal because the existential would be opened each time afresh. + +In pre-existentials Scala, this would not have worked either. There, `T#X` was +a fundamental type constructor, but was restricted to alias types or classes +for both `T` and `X`. Roughly, `#` was meant to encode Java's inner classes. +In Java, given the classes + +```scala +class Outer { class Inner } +class Sub1 extends Outer +class Sub2 extends Outer +``` + +The types `Outer#Inner`, `Sub1#Inner` and `Sub2#Inner` would all exist and be +regarded as equal to each other. But if `Outer` had abstract type members this +would not work, since an abstract type member could be instantiated differently +in `Sub1` and `Sub2`. Assuming that `Sub1#Inner = Sub2#Inner` could then lead +to a soundness hole. To avoid soundness problems, the types in `X#Y` were +restricted so that `Y` was (an alias of) a class type and `X` was (an alias of) +a class type with no abstract type members. + +I believe we can go back to regarding `T#X` as a fundamental type constructor, +the way it was done in pre-existential Scala, but with the following relaxed +restriction: + +> In a type selection `T#x`, `T` is not allowed to have any abstract members different from `X` + +This would typecheck the higher-kinded types examples, because they only +project with `# Apply` once all `$hkArg$` type members are fully instantiated. + +It would be good to study this rule formally, trying to verify its soundness. diff --git a/scala3doc/dotty-docs/docs/docs/internals/overall-structure.md b/scala3doc/dotty-docs/docs/docs/internals/overall-structure.md new file mode 100644 index 000000000000..6b3765471be1 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/overall-structure.md @@ -0,0 +1,235 @@ +--- +layout: doc-page +title: "Dotty Overall Structure" +--- + +The compiler code is found in package [dotty.tools]. It spans the +following three sub-packages: + +``` +backend Compiler backends (currently for JVM and JS) + dotc The main compiler + io Helper modules for file access and classpath handling. +``` + +The [dotc] package contains some main classes that can be run as separate +programs. The most important one is class [Main]. `Main` inherits from +[Driver] which contains the highest level functions for starting a compiler +and processing some sources. `Driver` in turn is based on two other high-level +classes, [Compiler] and [Run]. + +Package Structure +----------------- +Most functionality of `dotc` is implemented in subpackages of `dotc`. Here's a +list of sub-packages and their focus. + +``` +. +├── ast // Abstract syntax trees +├── config // Compiler configuration, settings, platform specific definitions. +├── core // Core data structures and operations, with specific subpackages for: +│   ├── classfile // Reading of Java classfiles into core data structures +│   ├── tasty // Reading and writing of TASTY files to/from core data structures +│   └── unpickleScala2 // Reading of Scala2 symbol information into core data structures +├── parsing // Scanner and parser +├── printing // Pretty-printing trees, types and other data +├── repl // The interactive REPL +├── reporting // Reporting of error messages, warnings and other info. +├── rewrites // Helpers for rewriting Scala 2's constructs into dotty's. +├── semanticdb // Helpers for exporting semanticdb from trees. +├── transform // Miniphases and helpers for tree transformations. +├── typer // Type-checking and other frontend phases +└── util // General purpose utility classes and modules. +``` + +Contexts +-------- +`dotc` has almost no global state (the only significant bit of global state is +the name table, which is used to hash strings into unique names). Instead, all +essential bits of information that can vary over a compiler run are collected +in a [Context]. Most methods in `dotc` take a `Context` value as an implicit +parameter. + +Contexts give a convenient way to customize values in some part of the +call-graph. To run, e.g. some compiler function `f` at a given phase `phase`, +we invoke `f` with an explicit context parameter, like this + +```scala +f(/*normal args*/)(using ctx.withPhase(phase)) +``` + +This assumes that `f` is defined in the way most compiler functions are: + +```scala +def f(/*normal parameters*/)(implicit ctx: Context) ... +``` + +Compiler code follows the convention that all implicit `Context` parameters are +named `ctx`. This is important to avoid implicit ambiguities in the case where +nested methods contain each a Context parameters. The common name ensures then +that the implicit parameters properly shadow each other. + +Sometimes we want to make sure that implicit contexts are not captured in +closures or other long-lived objects, be it because we want to enforce that +nested methods each get their own implicit context, or because we want to avoid +a space leak in the case where a closure can survive several compiler runs. A +typical case is a completer for a symbol representing an external class, which +produces the attributes of the symbol on demand, and which might never be +invoked. In that case we follow the convention that any context parameter is +explicit, not implicit, so we can track where it is used, and that it has a +name different from `ctx`. Commonly used is `ictx` for "initialization +context". + +With these two conventions in place, it has turned out that implicit contexts +work amazingly well as a device for dependency injection and bulk +parameterization. There is of course always the danger that an unexpected +implicit will be passed, but in practice this has not turned out to be much of +a problem. + +Compiler Phases +--------------- +Seen from a temporal perspective, the `dotc` compiler consists of a list of +phases. The current list of phases is specified in class [Compiler] as follows: + +```scala + def phases: List[List[Phase]] = + frontendPhases ::: picklerPhases ::: transformPhases ::: backendPhases + + /** Phases dealing with the frontend up to trees ready for TASTY pickling */ + protected def frontendPhases: List[List[Phase]] = + List(new FrontEnd) :: // Compiler frontend: scanner, parser, namer, typer + List(new YCheckPositions) :: // YCheck positions + List(new Staging) :: // Check PCP, heal quoted types and expand macros + List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks + List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files + List(new PostTyper) :: // Additional checks and cleanups after type checking + List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks + List(new SetRootTree) :: // Set the `rootTreeOrProvider` on class symbols + Nil + + /** Phases dealing with TASTY tree pickling and unpickling */ + protected def picklerPhases: List[List[Phase]] = + List(new Pickler) :: // Generate TASTY info + List(new ReifyQuotes) :: // Turn quoted trees into explicit run-time data structures + Nil + + /** Phases dealing with the transformation from pickled trees to backend trees */ + protected def transformPhases: List[List[Phase]] = + List(new FirstTransform, // Some transformations to put trees into a canonical form + new CheckReentrant, // Internal use only: Check that compiled program has no data races involving global vars + new ElimPackagePrefixes, // Eliminate references to package prefixes in Select nodes + new CookComments, // Cook the comments: expand variables, doc, etc. + new CompleteJavaEnums) :: // Fill in constructors for Java enums + List(new CheckStatic, // Check restrictions that apply to @static members + new ElimRepeated, // Rewrite vararg parameters and arguments + new ExpandSAMs, // Expand single abstract method closures to anonymous classes + new ProtectedAccessors, // Add accessors for protected members + new ExtensionMethods, // Expand methods of value classes with extension methods + new CacheAliasImplicits, // Cache RHS of parameterless alias implicits + new ShortcutImplicits, // Allow implicit functions without creating closures + new ByNameClosures, // Expand arguments to by-name parameters to closures + new HoistSuperArgs, // Hoist complex arguments of supercalls to enclosing scope + new ClassOf, // Expand `Predef.classOf` calls. + new RefChecks) :: // Various checks mostly related to abstract members and overriding + List(new ElimOpaque, // Turn opaque into normal aliases + new TryCatchPatterns, // Compile cases in try/catch + new PatternMatcher, // Compile pattern matches + new ExplicitOuter, // Add accessors to outer classes from nested ones. + new ExplicitSelf, // Make references to non-trivial self types explicit as casts + new StringInterpolatorOpt, // Optimizes raw and s string interpolators by rewriting them to string concatentations + new CrossCastAnd) :: // Normalize selections involving intersection types. + List(new PruneErasedDefs, // Drop erased definitions from scopes and simplify erased expressions + new VCInlineMethods, // Inlines calls to value class methods + new SeqLiterals, // Express vararg arguments as arrays + new InterceptedMethods, // Special handling of `==`, `|=`, `getClass` methods + new Getters, // Replace non-private vals and vars with getter defs (fields are added later) + new ElimByName, // Expand by-name parameter references + new CollectNullableFields, // Collect fields that can be nulled out after use in lazy initialization + new ElimOuterSelect, // Expand outer selections + new AugmentScala2Traits, // Augments Scala2 traits so that super accessors are made non-private + new ResolveSuper, // Implement super accessors + new FunctionXXLForwarders, // Add forwarders for FunctionXXL apply method + new TupleOptimizations, // Optimize generic operations on tuples + new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify. + List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements. + List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types + new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations + new ArrayApply, // Optimize `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]` + new ElimPolyFunction, // Rewrite PolyFunction subclasses to FunctionN subclasses + new TailRec, // Rewrite tail recursion to loops + new Mixin, // Expand trait fields and trait initializers + new LazyVals, // Expand lazy vals + new Memoize, // Add private fields to getters and setters + new NonLocalReturns, // Expand non-local returns + new CapturedVars) :: // Represent vars captured by closures as heap objects + List(new Constructors, // Collect initialization code in primary constructors + // Note: constructors changes decls in transformTemplate, no InfoTransformers should be added after it + new FunctionalInterfaces, // Rewrites closures to implement @specialized types of Functions. + new Instrumentation, // Count closure allocations under -Yinstrument-closures + new GetClass, // Rewrites getClass calls on primitive types. + new LiftTry) :: // Put try expressions that might execute on non-empty stacks into their own methods their implementations + List(new LambdaLift, // Lifts out nested functions to class scope, storing free variables in environments + // Note: in this mini-phase block scopes are incorrect. No phases that rely on scopes should be here + new ElimStaticThis) :: // Replace `this` references to static objects by global identifiers + List(new Flatten, // Lift all inner classes to package scope + new RenameLifted, // Renames lifted classes to local numbering scheme + new TransformWildcards, // Replace wildcards with default values + new MoveStatics, // Move static methods from companion to the class itself + new ExpandPrivate, // Widen private definitions accessed from nested classes + new RestoreScopes, // Repair scopes rendered invalid by moving definitions in prior phases of the group + new SelectStatic, // get rid of selects that would be compiled into GetStatic + new sjs.JUnitBootstrappers, // Generate JUnit-specific bootstrapper classes for Scala.js (not enabled by default) + new CollectEntryPoints, // Find classes with main methods + new CollectSuperCalls) :: // Find classes that are called with super + Nil + + /** Generate the output of the compilation */ + protected def backendPhases: List[List[Phase]] = + List(new sjs.GenSJSIR) :: // Generate .sjsir files for Scala.js (not enabled by default) + List(new GenBCode) :: // Generate JVM bytecode + Nil +``` + +Note that phases are grouped, so the `phases` method is of type +`List[List[Phase]]`. The idea is that all phases in a group are *fused* into a +single tree traversal. That way, phases can be kept small (most phases perform +a single function) without requiring an excessive number of tree traversals +(which are costly, because they have generally bad cache locality). + +Phases fall into four categories: + +* Frontend phases: `Frontend`, `PostTyper` and `Pickler`. `FrontEnd` parses the + source programs and generates untyped abstract syntax trees, which are then + typechecked and transformed into typed abstract syntax trees. `PostTyper` + performs checks and cleanups that require a fully typed program. In + particular, it + + - creates super accessors representing `super` calls in traits + - creates implementations of synthetic (compiler-implemented) methods + - avoids storing parameters passed unchanged from subclass to superclass in + duplicate fields. + + Finally `Pickler` serializes the typed syntax trees produced by the frontend + as TASTY data structures. + +* High-level transformations: All phases from `FirstTransform` to `Erasure`. + Most of these phases transform syntax trees, expanding high-level constructs + to more primitive ones. The last phase in the group, `Erasure` translates all + types into types supported directly by the JVM. To do this, it performs + another type checking pass, but using the rules of the JVM's type system + instead of Scala's. + +* Low-level transformations: All phases from `ElimErasedValueType` to + `CollectSuperCalls`. These further transform trees until they are essentially a + structured version of Java bytecode. + +* Code generators: These map the transformed trees to Java classfiles or + .sjsir files. + +[dotty.tools]: https://github.com/lampepfl/dotty/tree/master/compiler/src/dotty/tools +[dotc]: https://github.com/lampepfl/dotty/tree/master/compiler/src/dotty/tools/dotc +[Main]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Main.scala +[Driver]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Driver.scala +[Compiler]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Compiler.scala +[Run]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Run.scala +[Context]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Contexts.scala diff --git a/scala3doc/dotty-docs/docs/docs/internals/periods.md b/scala3doc/dotty-docs/docs/docs/internals/periods.md new file mode 100644 index 000000000000..05123298617c --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/periods.md @@ -0,0 +1,93 @@ +--- +layout: doc-page +title: "Dotc's concept of time" +--- + +Conceptually, the `dotc` compiler's job is to maintain views of various +artifacts associated with source code at all points in time. But what is +*time* for `dotc`? In fact, it is a combination of compiler runs and compiler +phases. + +The *hours* of the compiler's clocks are measured in compiler [runs]. Every run +creates a new hour, which follows all the compiler runs (hours) that happened +before. `dotc` is designed to be used as an incremental compiler that can +support incremental builds, as well as interactions in an IDE and a REPL. This +means that new runs can occur quite frequently. At the extreme, every +keystroke in an editor or REPL can potentially launch a new compiler run, so +potentially an "hour" of compiler time might take only a fraction of a second +in real time. + +The *minutes* of the compiler's clocks are measured in phases. At every +compiler run, the compiler cycles through a number of [phases]. The list of +phases is defined in the [Compiler]object There are currently about 60 phases +per run, so the minutes/hours analogy works out roughly. After every phase the +view the compiler has of the world changes: trees are transformed, types are +gradually simplified from Scala types to JVM types, definitions are rearranged, +and so on. + +Many pieces in the information compiler are time-dependent. For instance, a +Scala symbol representing a definition has a type, but that type will usually +change as one goes from the higher-level Scala view of things to the +lower-level JVM view. There are different ways to deal with this. Many +compilers change the type of a symbol destructively according to the "current +phase". Another, more functional approach might be to have different symbols +representing the same definition at different phases, which each symbol +carrying a different immutable type. `dotc` employs yet another scheme, which +is inspired by functional reactive programming (FRP): Symbols carry not a +single type, but a function from compiler phase to type. So the type of a +symbol is a time-indexed function, where time ranges over compiler phases. + +Typically, the definition of a symbol or other quantity remains stable for a +number of phases. This leads us to the concept of a [period]. Conceptually, +period is an interval of some given phases in a given compiler run. Periods +are conceptually represented by three pieces of information + +* the ID of the current run, +* the ID of the phase starting the period +* the number of phases in the period + +All three pieces of information are encoded in a value class over a 32 bit +integer. Here's the API for class `Period`: + +```scala +class Period(val code: Int) extends AnyVal { + def runId: RunId // The run identifier of this period. + def firstPhaseId: PhaseId // The first phase of this period + def lastPhaseId: PhaseId // The last phase of this period + def phaseId: PhaseId // The phase identifier of this single-phase period + + def containsPhaseId(id: PhaseId): Boolean + def contains(that: Period): Boolean + def overlaps(that: Period): Boolean + + def & (that: Period): Period + def | (that: Period): Period +} +``` + +We can access the parts of a period using `runId`, `firstPhaseId`, +`lastPhaseId`, or using `phaseId` for periods consisting only of a single +phase. They return `RunId` or `PhaseId` values, which are aliases of `Int`. +`containsPhaseId`, `contains` and `overlaps` test whether a period contains a +phase or a period as a sub-interval, or whether the interval overlaps with +another period. Finally, `&` and `|` produce the intersection and the union of +two period intervals (the union operation `|` takes as `runId` the `runId` of +its left operand, as periods spanning different `runId`s cannot be constructed. + +Periods are constructed using two `apply` methods: + +```scala +object Period { + /** The single-phase period consisting of given run id and phase id */ + def apply(rid: RunId, pid: PhaseId): Period + + /** The period consisting of given run id, and lo/hi phase ids */ + def apply(rid: RunId, loPid: PhaseId, hiPid: PhaseId): Period +} +``` + +As a sentinel value there's `Nowhere`, a period that is empty. + +[runs]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/Run.scala +[phases]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Phases.scala +[period]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Periods.scala diff --git a/scala3doc/dotty-docs/docs/docs/internals/syntax-3.1.md b/scala3doc/dotty-docs/docs/docs/internals/syntax-3.1.md new file mode 100644 index 000000000000..5a3e7b912161 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/syntax-3.1.md @@ -0,0 +1,378 @@ +--- +layout: doc-page +title: "Scala Syntax Summary" +--- + +The following descriptions of Scala tokens uses literal characters `‘c’` when +referring to the ASCII fragment `\u0000` – `\u007F`. + +_Unicode escapes_ are used to represent the Unicode character with the given +hexadecimal code: + +```ebnf +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ +``` + +Informal descriptions are typeset as `“some comment”`. + +### Lexical Syntax +The lexical syntax of Scala is given by the following grammar in EBNF +form. + +```ebnf +whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ +upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ “… and Unicode category Lu” +lower ::= ‘a’ | … | ‘z’ “… and Unicode category Ll” +letter ::= upper | lower “… and Unicode categories Lo, Lt, Nl” +digit ::= ‘0’ | … | ‘9’ +paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ | ‘'(’ | ‘'[’ | ‘'{’ +delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ +opchar ::= “printableChar not matched by (whiteSpace | upper | lower | + letter | digit | paren | delim | opchar | Unicode_Sm | + Unicode_So)” +printableChar ::= “all characters in [\u0020, \u007F] inclusive” +charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) + +op ::= opchar {opchar} +varid ::= lower idrest +alphaid ::= upper idrest + | varid +plainid ::= alphaid + | op +id ::= plainid + | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’ +idrest ::= {letter | digit} [‘_’ op] +quoteId ::= ‘'’ alphaid + +integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +decimalNumeral ::= ‘0’ | nonZeroDigit {digit} +hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit {hexDigit} +digit ::= ‘0’ | nonZeroDigit +nonZeroDigit ::= ‘1’ | … | ‘9’ + +floatingPointLiteral + ::= digit {digit} ‘.’ {digit} [exponentPart] [floatType] + | ‘.’ digit {digit} [exponentPart] [floatType] + | digit {digit} exponentPart [floatType] + | digit {digit} [exponentPart] floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit {digit} +floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ + +booleanLiteral ::= ‘true’ | ‘false’ + +characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ + +stringLiteral ::= ‘"’ {stringElement} ‘"’ + | ‘"""’ multiLineChars ‘"""’ +stringElement ::= printableChar \ (‘"’ | ‘\’) + | UnicodeEscape + | charEscapeSeq +multiLineChars ::= {[‘"’] [‘"’] char \ ‘"’} {‘"’} +processedStringLiteral + ::= alphaid ‘"’ {printableChar \ (‘"’ | ‘$’) | escape} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ +escape ::= ‘$$’ + | ‘$’ letter { letter | digit } + | ‘{’ Block [‘;’ whiteSpace stringFormat whiteSpace] ‘}’ +stringFormat ::= {printableChar \ (‘"’ | ‘}’ | ‘ ’ | ‘\t’ | ‘\n’)} + +comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ + | ‘//’ “any sequence of characters up to end of line” + +nl ::= “new line character” +semi ::= ‘;’ | nl {nl} +``` + +## Keywords + +### Regular keywords + +``` +abstract case catch class def do else enum +erased extends false final finally for given if +implied import lazy match new null object package +private protected override return super sealed then throw +trait true try type val var while yield +: = <- => <: :> # @ +``` + +### Soft keywords + +``` +derives inline opaque +* | & + - +``` + +## Context-free Syntax + +The context-free syntax of Scala is given by the following EBNF +grammar: + +### Literals and Paths +```ebnf +SimpleLiteral ::= [‘-’] integerLiteral + | [‘-’] floatingPointLiteral + | booleanLiteral + | characterLiteral + | stringLiteral +Literal ::= SimpleLiteral + | processedStringLiteral + | ‘null’ + +QualId ::= id {‘.’ id} +ids ::= id {‘,’ id} + +Path ::= StableId + | [id ‘.’] ‘this’ +StableId ::= id + | Path ‘.’ id + | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id +ClassQualifier ::= ‘[’ id ‘]’ +``` + +### Types +```ebnf +Type ::= { ‘erased’ | ‘given’} FunArgTypes ‘=>’ Type + | HkTypeParamClause ‘=>’ Type + | MatchType + | InfixType +FunArgTypes ::= InfixType + | ‘(’ [ FunArgType {‘,’ FunArgType } ] ‘)’ + | ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ +TypedFunParam ::= id ‘:’ Type +MatchType ::= InfixType `match` TypeCaseClauses +InfixType ::= RefinedType {id [nl] RefinedType} +RefinedType ::= AnnotType {[nl] Refinement} +AnnotType ::= SimpleType {Annotation} +SimpleType ::= SimpleType TypeArgs + | SimpleType ‘#’ id + | StableId + | Path ‘.’ ‘type’ + | ‘(’ ArgTypes ‘)’ + | ‘_’ SubtypeBounds + | Refinement + | SimpleLiteral + | ‘$’ ‘{’ Block ‘}’ +ArgTypes ::= Type {‘,’ Type} +FunArgType ::= Type + | ‘=>’ Type +ParamType ::= [‘=>’] ParamValueType +ParamValueType ::= Type [‘*’] +TypeArgs ::= ‘[’ ArgTypes ‘]’ +Refinement ::= ‘{’ [RefineDcl] {semi [RefineDcl]} ‘}’ +SubtypeBounds ::= [‘>:’ Type] [‘<:’ Type] +TypeParamBounds ::= SubtypeBounds {‘:’ Type} +``` + +### Expressions +```ebnf +Expr ::= [‘given’] [‘erased’] FunParams ‘=>’ Expr + | Expr1 +FunParams ::= Bindings + | id + | ‘_’ +Expr1 ::= ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] + | ‘while’ Expr ‘do’ Expr + | ‘try’ Expr ‘catch’ Expr [‘finally’ Expr] + | ‘try’ Expr ‘finally’ Expr + | ‘throw’ Expr + | ‘return’ [Expr] + | ‘for’ Enumerators (‘do’ Expr | ‘yield’ Expr) + | [SimpleExpr ‘.’] id ‘=’ Expr + | SimpleExpr1 ArgumentExprs ‘=’ Expr + | InfixExpr [Ascription] + | [‘inline’] InfixExpr ‘match’ ‘{’ CaseClauses ‘}’ + | ‘implied’ ‘match’ ‘{’ ImpliedCaseClauses ‘}’ +Ascription ::= ‘:’ InfixType + | ‘:’ Annotation {Annotation} +InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr + | InfixExpr ‘given’ (InfixExpr | ParArgumentExprs) +PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr +SimpleExpr ::= ‘new’ (ConstrApp [TemplateBody] | TemplateBody) + | BlockExpr + | ‘$’ ‘{’ Block ‘}’ + | Quoted + | quoteId // only inside splices + | SimpleExpr1 +SimpleExpr1 ::= Literal + | Path + | ‘_’ + | ‘(’ ExprsInParens ‘)’ + | SimpleExpr ‘.’ id + | SimpleExpr TypeArgs + | SimpleExpr1 ArgumentExprs + | XmlExpr +Quoted ::= ‘'’ ‘{’ Block ‘}’ + | ‘'’ ‘[’ Type ‘]’ +ExprsInParens ::= ExprInParens {‘,’ ExprInParens} +ExprInParens ::= InfixExpr ‘:’ Type + | Expr +ParArgumentExprs ::= ‘(’ ExprsInParens ‘)’ + | ‘(’ [ExprsInParens ‘,’] InfixExpr ‘:’ ‘_’ ‘*’ ‘)’ +ArgumentExprs ::= ParArgumentExprs + | [nl] BlockExpr +BlockExpr ::= ‘{’ CaseClauses | Block ‘}’ +Block ::= {BlockStat semi} [Expr] +BlockStat ::= Import + | {Annotation [nl]} {LocalModifier} Def + | Expr1 + +Enumerators ::= Generator {semi Enumerator | Guard} +Enumerator ::= Generator + | Guard + | Pattern1 ‘=’ Expr +Generator ::= Pattern1 ‘<-’ Expr +Guard ::= ‘if’ PostfixExpr + +CaseClauses ::= CaseClause { CaseClause } +CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block +ImpliedCaseClauses::= ImpliedCaseClause { ImpliedCaseClause } +ImpliedCaseClause ::= ‘case’ PatVar [‘:’ RefinedType] [Guard] ‘=>’ Block +TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } +TypeCaseClause ::= ‘case’ InfixType ‘=>’ Type [nl] + +Pattern ::= Pattern1 { ‘|’ Pattern1 } +Pattern1 ::= PatVar ‘:’ RefinedType + | Pattern2 +Pattern2 ::= [id ‘@’] InfixPattern +InfixPattern ::= SimplePattern { id [nl] SimplePattern } +SimplePattern ::= PatVar + | Literal + | ‘(’ [Patterns] ‘)’ + | Quoted + | XmlPattern + | SimplePattern1 [TypeArgs] [ArgumentPatterns] +SimplePattern1 ::= Path + | SimplePattern1 ‘.’ id +PatVar ::= varid + | ‘_’ +Patterns ::= Pattern {‘,’ Pattern} +ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ + | ‘(’ [Patterns ‘,’] Pattern2 ‘:’ ‘_’ ‘*’ ‘)’ +``` + +### Type and Value Parameters +```ebnf +ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ +ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] id [HkTypeParamClause] TypeParamBounds + +DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds + +TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ +TypTypeParam ::= {Annotation} id [HkTypeParamClause] SubtypeBounds + +HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ +HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (Id[HkTypeParamClause] | ‘_’) SubtypeBounds + +ClsParamClause ::= [nl] [‘erased’] ‘(’ [ClsParams] ‘)’ + | ‘given’ [‘erased’] (‘(’ ClsParams ‘)’ | GivenTypes) +ClsParams ::= ClsParam {‘,’ ClsParam} +ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param +Param ::= id ‘:’ ParamType [‘=’ Expr] + +DefParamClause ::= [nl] [‘erased’] ‘(’ [DefParams] ‘)’ | GivenParamClause +GivenParamClause ::= ‘given’ [‘erased’] (‘(’ DefParams ‘)’ | GivenTypes) +DefParams ::= DefParam {‘,’ DefParam} +DefParam ::= {Annotation} [‘inline’] Param +GivenTypes ::= AnnotType {‘,’ AnnotType} +``` + +### Bindings, Imports, and Exports +```ebnf +Bindings ::= ‘(’ Binding {‘,’ Binding} ‘)’ +Binding ::= (id | ‘_’) [‘:’ Type] + +Modifier ::= LocalModifier + | AccessModifier + | ‘override’ +LocalModifier ::= ‘abstract’ + | ‘final’ + | ‘sealed’ + | ‘lazy’ + | ‘opaque’ + | ‘inline’ + | ‘erased’ +AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] +AccessQualifier ::= ‘[’ (id | ‘this’) ‘]’ + +Annotation ::= ‘@’ SimpleType {ParArgumentExprs} + +Import ::= ‘import’ [‘implied’] ImportExpr {‘,’ ImportExpr} +ImportExpr ::= StableId ‘.’ (id | ‘_’ | ImportSelectors) +ImportSelectors ::= ‘{’ {ImportSelector ‘,’} (ImportSelector | ‘_’) ‘}’ +ImportSelector ::= id [‘=>’ id | ‘=>’ ‘_’] +Export ::= ‘export’ [‘implied’] ImportExpr {‘,’ ImportExpr} +``` + +### Declarations and Definitions +```ebnf +RefineDcl ::= ‘val’ ValDcl + | ‘def’ DefDcl + | ‘type’ {nl} TypeDcl +Dcl ::= RefineDcl + | ‘var’ ValDcl +ValDcl ::= ids ‘:’ Type +DefDcl ::= DefSig [‘:’ Type] +DefSig ::= ‘(’ DefParam ‘)’ [nl] id [DefTypeParamClause] {DefParamClause} +TypeDcl ::= id [TypeParamClause] (SubtypeBounds | ‘=’ Type) + | id [TypeParamClause] <: Type = MatchType + +Def ::= ‘val’ PatDef + | ‘var’ VarDef + | ‘def’ DefDef + | ‘type’ {nl} TypeDcl + | ([‘case’] ‘class’ | ‘trait’) ClassDef + | [‘case’] ‘object’ ObjectDef + | ‘enum’ EnumDef + | ‘implied’ InstanceDef + +PatDef ::= Pattern2 {‘,’ Pattern2} [‘:’ Type] ‘=’ Expr +VarDef ::= PatDef + | ids ‘:’ Type ‘=’ ‘_’ +DefDef ::= DefSig [(‘:’ | ‘<:’) Type] ‘=’ Expr + | ‘this’ DefParamClause {DefParamClause} ‘=’ ConstrExpr +ClassDef ::= id ClassConstr [Template]= +ClassConstr ::= [ClsTypeParamClause] [ConstrMods] {ClsParamClause} +ConstrMods ::= {Annotation} [AccessModifier] +ObjectDef ::= id [Template] +EnumDef ::= id ClassConstr InheritClauses EnumBody +InstanceDef ::= [id] InstanceParams InstanceBody +InstanceParams ::= [DefTypeParamClause] {GivenParamClause} +InstanceBody ::= [‘for’ ConstrApp {‘,’ ConstrApp }] [TemplateBody] + | ‘for’ Type ‘=’ Expr +Template ::= InheritClauses [TemplateBody] +InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] +ConstrApps ::= ConstrApp {‘,’ ConstrApp} +ConstrApp ::= AnnotType {ArgumentExprs} +ConstrExpr ::= SelfInvocation + | {’ SelfInvocation {semi BlockStat} ‘}’ +SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} + +TemplateBody ::= [nl] ‘{’ [SelfType] TemplateStat {semi TemplateStat} ‘}’ +TemplateStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | {Annotation [nl]} {Modifier} Dcl + | Expr1 + | +SelfType ::= id [‘:’ InfixType] ‘=>’ + | ‘this’ ‘:’ InfixType ‘=>’ + +EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ +EnumStat ::= TemplateStat + | {Annotation [nl]} {Modifier} EnumCase +EnumCase ::= ‘case’ (id ClassConstr [‘extends’ ConstrApps]] | ids) + +TopStatSeq ::= TopStat {semi TopStat} +TopStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | Packaging + | +Packaging ::= ‘package’ QualId [nl] ‘{’ TopStatSeq ‘}’ + +CompilationUnit ::= {‘package’ QualId semi} TopStatSeq +``` diff --git a/scala3doc/dotty-docs/docs/docs/internals/syntax.md b/scala3doc/dotty-docs/docs/docs/internals/syntax.md new file mode 100644 index 000000000000..4c0e48c6534a --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/syntax.md @@ -0,0 +1,448 @@ +--- +layout: doc-page +title: "Scala Syntax Summary" +--- + +The following descriptions of Scala tokens uses literal characters `‘c’` when +referring to the ASCII fragment `\u0000` – `\u007F`. + +_Unicode escapes_ are used to represent the Unicode character with the given +hexadecimal code: + +```ebnf +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ +``` + +Informal descriptions are typeset as `“some comment”`. + +### Lexical Syntax +The lexical syntax of Scala is given by the following grammar in EBNF +form. + +```ebnf +whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ +upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ “… and Unicode category Lu” +lower ::= ‘a’ | … | ‘z’ “… and Unicode category Ll” +letter ::= upper | lower “… and Unicode categories Lo, Lt, Nl” +digit ::= ‘0’ | … | ‘9’ +paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ | ‘'(’ | ‘'[’ | ‘'{’ +delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ +opchar ::= “printableChar not matched by (whiteSpace | upper | lower | + letter | digit | paren | delim | opchar | Unicode_Sm | + Unicode_So)” +printableChar ::= “all characters in [\u0020, \u007F] inclusive” +charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) + +op ::= opchar {opchar} +varid ::= lower idrest +alphaid ::= upper idrest + | varid +plainid ::= alphaid + | op +id ::= plainid + | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’ + | INT // interpolation id, only for quasi-quotes +idrest ::= {letter | digit} [‘_’ op] +quoteId ::= ‘'’ alphaid + +integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +decimalNumeral ::= ‘0’ | nonZeroDigit [{digit | ‘_’} digit] +hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +nonZeroDigit ::= ‘1’ | … | ‘9’ + +floatingPointLiteral + ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] + | decimalNumeral exponentPart [floatType] + | decimalNumeral floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit [{digit | ‘_’} digit] +floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ + +booleanLiteral ::= ‘true’ | ‘false’ + +characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ + +stringLiteral ::= ‘"’ {stringElement} ‘"’ + | ‘"""’ multiLineChars ‘"""’ +stringElement ::= printableChar \ (‘"’ | ‘\’) + | UnicodeEscape + | charEscapeSeq +multiLineChars ::= {[‘"’] [‘"’] char \ ‘"’} {‘"’} +processedStringLiteral + ::= alphaid ‘"’ {printableChar \ (‘"’ | ‘$’) | escape} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ +escape ::= ‘$$’ + | ‘$’ letter { letter | digit } + | ‘{’ Block [‘;’ whiteSpace stringFormat whiteSpace] ‘}’ +stringFormat ::= {printableChar \ (‘"’ | ‘}’ | ‘ ’ | ‘\t’ | ‘\n’)} + +symbolLiteral ::= ‘'’ plainid // until 2.13 + +comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ + | ‘//’ “any sequence of characters up to end of line” + +nl ::= “new line character” +semi ::= ‘;’ | nl {nl} +colonEol ::= ": at end of line that can start a tenmplate body" +``` + +## Keywords + +### Regular keywords + +``` +abstract case catch class def do else enum +export extends false final finally for given if +implicit import lazy match new null object package +private protected override return super sealed then throw +trait true try type val var while with +yield +: = <- => <: :> # @ +=>> ?=> +``` + +### Soft keywords + +``` +as derives end extension inline opaque open transparent using +* + - +``` + +## Context-free Syntax + +The context-free syntax of Scala is given by the following EBNF +grammar: + +### Literals and Paths +```ebnf +SimpleLiteral ::= [‘-’] integerLiteral + | [‘-’] floatingPointLiteral + | booleanLiteral + | characterLiteral + | stringLiteral +Literal ::= SimpleLiteral + | processedStringLiteral + | symbolLiteral + | ‘null’ + +QualId ::= id {‘.’ id} +ids ::= id {‘,’ id} + +SimpleRef ::= id + | [id ‘.’] ‘this’ + | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id + +ClassQualifier ::= ‘[’ id ‘]’ +``` + +### Types +```ebnf +Type ::= FunType + | HkTypeParamClause ‘=>>’ Type LambdaTypeTree(ps, t) + | FunParamClause ‘=>>’ Type TermLambdaTypeTree(ps, t) + | MatchType + | InfixType +FunType ::= FunArgTypes (‘=>’ | ‘?=>’) Type Function(ts, t) + | HKTypeParamClause '=>' Type PolyFunction(ps, t) +FunArgTypes ::= InfixType + | ‘(’ [ FunArgType {‘,’ FunArgType } ] ‘)’ + | FunParamClause +FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ +TypedFunParam ::= id ‘:’ Type +MatchType ::= InfixType `match` ‘{’ TypeCaseClauses ‘}’ +InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) +RefinedType ::= WithType {[nl | colonEol] Refinement} RefinedTypeTree(t, ds) +WithType ::= AnnotType {‘with’ AnnotType} (deprecated) +AnnotType ::= SimpleType {Annotation} Annotated(t, annot) + +SimpleType ::= SimpleLiteral SingletonTypeTree(l) + | ‘?’ TypeBounds + | SimpleType1 { ‘(’ Singletons ‘)’ } +SimpleType1 ::= id Ident(name) + | Singleton ‘.’ id Select(t, name) + | Singleton ‘.’ ‘type’ SingletonTypeTree(p) + | ‘(’ ArgTypes ‘)’ Tuple(ts) + | Refinement RefinedTypeTree(EmptyTree, refinement) + | ‘$’ ‘{’ Block ‘}’ + | SimpleType1 TypeArgs AppliedTypeTree(t, args) + | SimpleType1 ‘#’ id Select(t, name) +Singleton ::= SimpleRef + | SimpleLiteral + | Singleton ‘.’ id +-- not yet | Singleton ‘(’ Singletons ‘)’ +-- not yet | Singleton ‘[’ ArgTypes ‘]’ +Singletons ::= Singleton { ‘,’ Singleton } +ArgTypes ::= Types +FunArgType ::= Type + | ‘=>’ Type PrefixOp(=>, t) +ParamType ::= [‘=>’] ParamValueType +ParamValueType ::= Type [‘*’] PostfixOp(t, "*") +TypeArgs ::= ‘[’ ArgTypes ‘]’ ts +NamedTypeArg ::= id ‘=’ Type NamedArg(id, t) +NamedTypeArgs ::= ‘[’ NamedTypeArg {‘,’ NamedTypeArg} ‘]’ nts +Refinement ::= ‘{’ [RefineDcl] {semi [RefineDcl]} ‘}’ ds +TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) +TypeParamBounds ::= TypeBounds {‘:’ Type} ContextBounds(typeBounds, tps) +Types ::= Type {‘,’ Type} +``` + +### Expressions +```ebnf +Expr ::= FunParams (‘=>’ | ‘?=>’) Expr Function(args, expr), Function(ValDef([implicit], id, TypeTree(), EmptyTree), expr) + | Expr1 +BlockResult ::= FunParams (‘=>’ | ‘?=>’) Block + | Expr1 +FunParams ::= Bindings + | id + | ‘_’ +Expr1 ::= [‘inline’] ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] If(Parens(cond), thenp, elsep?) + | [‘inline’] ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] If(cond, thenp, elsep?) + | ‘while’ ‘(’ Expr ‘)’ {nl} Expr WhileDo(Parens(cond), body) + | ‘while’ Expr ‘do’ Expr WhileDo(cond, body) + | ‘try’ Expr Catches [‘finally’ Expr] Try(expr, catches, expr?) + | ‘try’ Expr [‘finally’ Expr] Try(expr, Nil, expr?) + | ‘throw’ Expr Throw(expr) + | ‘return’ [Expr] Return(expr?) + | ForExpr + | HkTypeParamClause ‘=>’ Expr PolyFunction(ts, expr) + | [SimpleExpr ‘.’] id ‘=’ Expr Assign(expr, expr) + | SimpleExpr1 ArgumentExprs ‘=’ Expr Assign(expr, expr) + | PostfixExpr [Ascription] + | ‘inline’ InfixExpr MatchClause +Ascription ::= ‘:’ InfixType Typed(expr, tp) + | ‘:’ Annotation {Annotation} Typed(expr, Annotated(EmptyTree, annot)*) +Catches ::= ‘catch’ (Expr | ExprCaseClause) +PostfixExpr ::= InfixExpr [id] PostfixOp(expr, op) +InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr InfixOp(expr, op, expr) + | InfixExpr MatchClause +MatchClause ::= ‘match’ ‘{’ CaseClauses ‘}’ Match(expr, cases) +PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr PrefixOp(expr, op) +SimpleExpr ::= SimpleRef + | Literal + | ‘_’ + | BlockExpr + | ‘$’ ‘{’ Block ‘}’ + | Quoted + | quoteId // only inside splices + | ‘new’ ConstrApp {‘with’ ConstrApp} [TemplateBody] New(constr | templ) + | ‘new’ TemplateBody + | ‘(’ ExprsInParens ‘)’ Parens(exprs) + | SimpleExpr ‘.’ id Select(expr, id) + | SimpleExpr ‘.’ MatchClause + | SimpleExpr TypeArgs TypeApply(expr, args) + | SimpleExpr ArgumentExprs Apply(expr, args) + | SimpleExpr ‘_’ PostfixOp(expr, _) + | XmlExpr +Quoted ::= ‘'’ ‘{’ Block ‘}’ + | ‘'’ ‘[’ Type ‘]’ +ExprsInParens ::= ExprInParens {‘,’ ExprInParens} +ExprInParens ::= PostfixExpr ‘:’ Type -- normal Expr allows only RefinedType here + | Expr +ParArgumentExprs ::= ‘(’ [‘using’] ExprsInParens ‘)’ exprs + | ‘(’ [ExprsInParens ‘,’] PostfixExpr ‘:’ ‘_’ ‘*’ ‘)’ exprs :+ Typed(expr, Ident(wildcardStar)) +ArgumentExprs ::= ParArgumentExprs + | BlockExpr +BlockExpr ::= ‘{’ (CaseClauses | Block) ‘}’ +Block ::= {BlockStat semi} [BlockResult] Block(stats, expr?) +BlockStat ::= Import + | {Annotation {nl}} [‘implicit’ | ‘lazy’] Def + | {Annotation {nl}} {LocalModifier} TmplDef + | Extension + | Expr1 + | EndMarker + +ForExpr ::= ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) ForYield(enums, expr) + {nl} [‘yield’] Expr + | ‘for’ Enumerators (‘do’ Expr | ‘yield’ Expr) ForDo(enums, expr) +Enumerators ::= Generator {semi Enumerator | Guard} +Enumerator ::= Generator + | Guard + | Pattern1 ‘=’ Expr GenAlias(pat, expr) +Generator ::= [‘case’] Pattern1 ‘<-’ Expr GenFrom(pat, expr) +Guard ::= ‘if’ PostfixExpr + +CaseClauses ::= CaseClause { CaseClause } Match(EmptyTree, cases) +CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block CaseDef(pat, guard?, block) // block starts at => +ExprCaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Expr +ImplicitCaseClauses ::= ImplicitCaseClause { ImplicitCaseClause } +ImplicitCaseClause ::= ‘case’ PatVar [‘:’ RefinedType] [Guard] ‘=>’ Block +TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } +TypeCaseClause ::= ‘case’ InfixType ‘=>’ Type [nl] + +Pattern ::= Pattern1 { ‘|’ Pattern1 } Alternative(pats) +Pattern1 ::= Pattern2 [‘:’ RefinedType] Bind(name, Typed(Ident(wildcard), tpe)) + | ‘given’ PatVar ‘:’ RefinedType +Pattern2 ::= [id ‘@’] InfixPattern Bind(name, pat) +InfixPattern ::= SimplePattern { id [nl] SimplePattern } InfixOp(pat, op, pat) +SimplePattern ::= PatVar Ident(wildcard) + | Literal Bind(name, Ident(wildcard)) + | ‘(’ [Patterns] ‘)’ Parens(pats) Tuple(pats) + | Quoted + | XmlPattern + | SimplePattern1 [TypeArgs] [ArgumentPatterns] +SimplePattern1 ::= SimpleRef + | SimplePattern1 ‘.’ id +PatVar ::= varid + | ‘_’ +Patterns ::= Pattern {‘,’ Pattern} +ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ Apply(fn, pats) + | ‘(’ [Patterns ‘,’] Pattern2 ‘:’ ‘_’ ‘*’ ‘)’ +``` + +### Type and Value Parameters +```ebnf +ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ +ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) + id [HkTypeParamClause] TypeParamBounds Bound(below, above, context) + +DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds + +TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ +TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds + +HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ +HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id [HkTypeParamClause] | ‘_’) + TypeBounds + +ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] +ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ + | [nl] ‘(’ ‘using’ (ClsParams | Types) ‘)’ +ClsParams ::= ClsParam {‘,’ ClsParam} +ClsParam ::= {Annotation} ValDef(mods, id, tpe, expr) -- point of mods on val/var + [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param +Param ::= id ‘:’ ParamType [‘=’ Expr] + | INT + +DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] +DefParamClause ::= [nl] ‘(’ DefParams ‘)’ | UsingParamClause +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefParams | Types) ‘)’ +DefParams ::= DefParam {‘,’ DefParam} +DefParam ::= {Annotation} [‘inline’] Param ValDef(mods, id, tpe, expr) -- point of mods at id. +ClosureMods ::= { ‘implicit’ | ‘given’} +``` + +### Bindings and Imports +```ebnf +Bindings ::= ‘(’ [[‘using’] Binding {‘,’ Binding}] ‘)’ +Binding ::= (id | ‘_’) [‘:’ Type] ValDef(_, id, tpe, EmptyTree) + +Modifier ::= LocalModifier + | AccessModifier + | ‘override’ + | ‘opaque’ +LocalModifier ::= ‘abstract’ + | ‘final’ + | ‘sealed’ + | ‘open’ + | ‘implicit’ + | ‘lazy’ + | ‘inline’ +AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] +AccessQualifier ::= ‘[’ id ‘]’ + +Annotation ::= ‘@’ SimpleType1 {ParArgumentExprs} Apply(tpe, args) + +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec Import(expr, sels) +ImportSpec ::= id + | ‘_’ + | ‘{’ ImportSelectors) ‘}’ +ImportSelectors ::= id [‘=>’ id | ‘=>’ ‘_’] [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} +WildCardSelector ::= ‘given’ (‘_' | InfixType) + | ‘_' +Export ::= ‘export’ [‘given’] ImportExpr {‘,’ ImportExpr} + +EndMarker ::= ‘end’ EndMarkerTag -- when followed by EOL +EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ‘try’ + | ‘new’ | ‘this’ | ‘given’ | ‘extension’ | ‘val’ +``` + +### Declarations and Definitions +```ebnf +RefineDcl ::= ‘val’ ValDcl + | ‘def’ DefDcl + | ‘type’ {nl} TypeDcl + | INT +Dcl ::= RefineDcl + | ‘var’ VarDcl +ValDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) +VarDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) +DefDcl ::= DefSig ‘:’ Type DefDef(_, name, tparams, vparamss, tpe, EmptyTree) +DefSig ::= id [DefTypeParamClause] DefParamClauses + | ExtParamClause {nl} [‘.’] id DefParamClauses +TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound + [‘=’ Type] + +Def ::= ‘val’ PatDef + | ‘var’ VarDef + | ‘def’ DefDef + | ‘type’ {nl} TypeDcl + | TmplDef + | INT +PatDef ::= ids [‘:’ Type] ‘=’ Expr + | Pattern2 [‘:’ Type | Ascription] ‘=’ Expr PatDef(_, pats, tpe?, expr) +VarDef ::= PatDef + | ids ‘:’ Type ‘=’ ‘_’ +DefDef ::= DefSig [‘:’ Type] ‘=’ Expr DefDef(_, name, tparams, vparamss, tpe, expr) + | ‘this’ DefParamClause DefParamClauses ‘=’ ConstrExpr DefDef(_, , Nil, vparamss, EmptyTree, expr | Block) + +TmplDef ::= ([‘case’] ‘class’ | [‘super’] ‘trait’) ClassDef + | [‘case’] ‘object’ ObjectDef + | ‘enum’ EnumDef + | ‘given’ GivenDef +ClassDef ::= id ClassConstr [Template] ClassDef(mods, name, tparams, templ) +ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses with DefDef(_, , Nil, vparamss, EmptyTree, EmptyTree) as first stat +ConstrMods ::= {Annotation} [AccessModifier] +ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor +EnumDef ::= id ClassConstr InheritClauses EnumBody EnumDef(mods, name, tparams, template) +GivenDef ::= [GivenSig] Type ‘=’ Expr + | [GivenSig] ConstrApps [TemplateBody] +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘as’ +Extension ::= ‘extension’ [DefTypeParamClause] ‘(’ DefParam ‘)’ + {UsingParamClause}] ExtMethods +ExtMethods ::= ExtMethod | [nl] ‘{’ ExtMethod {semi ExtMethod ‘}’ +ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef +Template ::= InheritClauses [TemplateBody] Template(constr, parents, self, stats) +InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] +ConstrApps ::= ConstrApp {(‘,’ | ‘with’) ConstrApp} +ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} Apply(tp, args) +ConstrExpr ::= SelfInvocation + | ‘{’ SelfInvocation {semi BlockStat} ‘}’ +SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} + +TemplateBody ::= [nl | colonEol] + ‘{’ [SelfType] TemplateStat {semi TemplateStat} ‘}’ (self, stats) +TemplateStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | {Annotation [nl]} {Modifier} Dcl + | Extension + | Expr1 + | EndMarker + | +SelfType ::= id [‘:’ InfixType] ‘=>’ ValDef(_, name, tpt, _) + | ‘this’ ‘:’ InfixType ‘=>’ + +EnumBody ::= [nl | colonEol] + ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ +EnumStat ::= TemplateStat + | {Annotation [nl]} {Modifier} EnumCase +EnumCase ::= ‘case’ (id ClassConstr [‘extends’ ConstrApps]] | ids) + +TopStatSeq ::= TopStat {semi TopStat} +TopStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | Extension + | Packaging + | PackageObject + | EndMarker + | +Packaging ::= ‘package’ QualId [nl | colonEol] ‘{’ TopStatSeq ‘}’ Package(qid, stats) +PackageObject ::= ‘package’ ‘object’ ObjectDef object with package in mods. + +CompilationUnit ::= {‘package’ QualId semi} TopStatSeq Package(qid, stats) +``` diff --git a/scala3doc/dotty-docs/docs/docs/internals/type-system.md b/scala3doc/dotty-docs/docs/docs/internals/type-system.md new file mode 100644 index 000000000000..30aa907f6773 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/internals/type-system.md @@ -0,0 +1,138 @@ +--- +layout: doc-page +title: "Type System" +--- + +The types are defined in [dotty/tools/dotc/core/Types.scala][1] + +## Class diagram ## +- [PDF][2], generated with [a fork of scaladiagrams][3] + +## Proxy types and ground types ## +A type which inherits `TypeProxy` is a proxy for another type accessible using +the `underlying` method, other types are called _ground_ types and inherit +`CachedGroundType` or `UncachedGroundType`. + +Here's a diagram, copied from [dotty/tools/dotc/core/Types.scala][1]: + +``` +Type -+- ProxyType --+- NamedType ----+--- TypeRef + | | \ + | +- SingletonType-+-+- TermRef + | | | + | | +--- ThisType + | | +--- SuperType + | | +--- ConstantType + | | +--- MethodParam + | | +----RecThis + | | +--- SkolemType + | +- PolyParam + | +- RefinedOrRecType -+-- RefinedType + | | -+-- RecType + | +- HKApply + | +- TypeBounds + | +- ExprType + | +- AnnotatedType + | +- TypeVar + | +- PolyType + | + +- GroundType -+- AndType + +- OrType + +- MethodType -----+- ImplicitMethodType + | +- JavaMethodType + +- ClassInfo + | + +- NoType + +- NoPrefix + +- ErrorType + +- WildcardType + +``` + +## Representations of types ## + Type | Representation + ------------------------- | ----------------------------- + `p.x.type` | `TermRef(p, x)` + `p#T` | `TypeRef(p, T)` + `p.x.T` == `p.x.type#T` | `TypeRef(TermRef(p, x), T)` + `this.type` | `ThisType` + `A & B` | `AndType(A, B)` + A \| B | `OrType(A, B)` + `=> T` | `ExprType(T)` + `p { refinedName }` | `RefinedType(p, refinedName)` + type of the value `super` | `SuperType` + `type T >: A <: B` | `TypeRef` with underlying type `RealTypeBounds(A, B)` + `type T = A` | `TypeRef` with underlying type `TypeAlias(A)` + `class p.C ...` | `ClassInfo(p, C, ...)` + +### Representation of methods ### +```scala +def f[A, B <: Ord[A]](x: A, y: B): Unit +``` +is represented as: + +```scala +val p = PolyType(List("A", "B"))( + List(TypeBounds(Nothing, Any), + TypeBounds(Nothing, + RefinedType(Ordering, + scala$math$Ordering$$T, TypeAlias(PolyParam(p, 0))))), + m) + +val m = MethodType(List("x", "y"), + List(PolyParam(p, 0), PolyParam(p, 1)))(Unit) +``` +(This is a slightly simplified version, e.g. we write `Unit` instead of +`TypeRef(TermRef(ThisType(TypeRef(NoPrefix,)),scala),Unit)`). + +Note that a PolyParam refers to a type parameter using its index (here A is 0 +and B is 1). + +## Subtyping checks ## +`topLevelSubType(tp1, tp2)` in [dotty/tools/dotc/core/TypeComparer.scala][4] +checks if `tp1` is a subtype of `tp2`. + +### Type rebasing ### +**FIXME**: This section is no longer accurate because +https://github.com/lampepfl/dotty/pull/331 changed the handling of refined +types. + +Consider [tests/pos/refinedSubtyping.scala][5] +```scala +class Test { + + class C { type T; type Coll } + + type T1 = C { type T = Int } + + type T11 = T1 { type Coll = Set[Int] } + + type T2 = C { type Coll = Set[T] } + + type T22 = T2 { type T = Int } + + var x: T11 = _ + var y: T22 = _ + + x = y + y = x + +} +``` +We want to do the subtyping checks recursively, since it would be nice if we +could check if `T22 <: T11` by first checking if `T2 <: T1`. To achieve this +recursive subtyping check, we remember that `T2#T` is really `T22#T`. This +procedure is called rebasing and is done by storing refined names in +`pendingRefinedBases` and looking them up using `rebase`. + +## Type caching ## +TODO + +## Type inference via constraint solving ## +TODO + +[1]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Types.scala +[2]: https://github.com/samuelgruetter/dotty/blob/classdiagrampdf/dotty-types.pdf +[3]: https://github.com/samuelgruetter/scaladiagrams/tree/print-descendants +[4]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +[5]: https://github.com/lampepfl/dotty/blob/master/tests/pos/refinedSubtyping.scala diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/compiler-plugins.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/compiler-plugins.md new file mode 100644 index 000000000000..92cb75e0d6bf --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/compiler-plugins.md @@ -0,0 +1,129 @@ +--- +layout: doc-page +title: "Changes in Compiler Plugins" +--- + +Compiler plugins are supported by Dotty since 0.9. There are two notable changes +compared to `scalac`: + +- No support for analyzer plugins +- Added support for research plugins + +[Analyzer plugins][1] in `scalac` run during type checking and may influence +normal type checking. This is a very powerful feature but for production usages, +a predictable and consistent type checker is more important. + +For experimentation and research, Dotty introduces _research plugin_. Research plugins +are more powerful than `scalac` analyzer plugins as they let plugin authors customize +the whole compiler pipeline. One can easily replace the standard typer by a custom one or +create a parser for a domain-specific language. However, research plugins are only +enabled for nightly or snaphot releases of Dotty. + +Common plugins that add new phases to the compiler pipeline are called +_standard plugins_ in Dotty. In terms of features, they are similar to +`scalac` plugins, despite minor changes in the API. + +## Using Compiler Plugins + +Both standard and research plugins can be used with `dotc` by adding the `-Xplugin:` option: + +```shell +dotc -Xplugin:pluginA.jar -Xplugin:pluginB.jar Test.scala +``` + +The compiler will examine the jar provided, and look for a property file named +`plugin.properties` in the root directory of the jar. The property file specifies +the fully qualified plugin class name. The format of a property file is as follows: + +```properties +pluginClass=dividezero.DivideZero +``` + +This is different from `scalac` plugins that required a `scalac-plugin.xml` file. + +Starting from 1.1.5, `sbt` also supports Dotty compiler plugins. Please refer to the +`sbt` [documentation][2] for more information. + +## Writing a Standard Compiler Plugin + +Here is the source code for a simple compiler plugin that reports integer divisions by +zero as errors. + +```scala +package dividezero + +import dotty.tools.dotc.ast.Trees._ +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Constants.Constant +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.plugins.{PluginPhase, StandardPlugin} +import dotty.tools.dotc.transform.{Pickler, Staging} + +class DivideZero extends StandardPlugin { + val name: String = "divideZero" + override val description: String = "divide zero check" + + def init(options: List[String]): List[PluginPhase] = (new DivideZeroPhase) :: Nil +} + +class DivideZeroPhase extends PluginPhase { + import tpd._ + + val phaseName = "divideZero" + + override val runsAfter = Set(Pickler.name) + override val runsBefore = Set(Staging.name) + + override def transformApply(tree: Apply)(implicit ctx: Context): Tree = { + tree match { + case Apply(Select(rcvr, nme.DIV), List(Literal(Constant(0)))) + if rcvr.tpe <:< defn.IntType => + report.error("dividing by zero", tree.pos) + case _ => + () + } + tree + } +} +``` + +The plugin main class (`DivideZero`) must extend the trait `StandardPlugin` +and implement the method `init` that takes the plugin's options as argument +and returns a list of `PluginPhase`s to be inserted into the compilation pipeline. + +Our plugin adds one compiler phase to the pipeline. A compiler phase must extend +the `PluginPhase` trait. In order to specify when the phase is executed, we also +need to specify a `runsBefore` and `runsAfter` constraints that are list of phase +names. + +We can now transform trees by overriding methods like `transformXXX`. + +## Writing a Research Compiler Plugin + +Here is a template for research plugins. + +```scala +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.dotc.plugins.ResearchPlugin + +class DummyResearchPlugin extends ResearchPlugin { + val name: String = "dummy" + override val description: String = "dummy research plugin" + + def init(options: List[String], phases: List[List[Phase]])(implicit ctx: Context): List[List[Phase]] = + phases +} +``` + +A research plugin must extend the trait `ResearchPlugin` and implement the +method `init` that takes the plugin's options as argument as well as the compiler +pipeline in the form of a list of compiler phases. The method can replace, remove +or add any phases to the pipeline and return the updated pipeline. + + +[1]: https://github.com/scala/scala/blob/2.13.x/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +[2]: https://www.scala-sbt.org/1.x/docs/Compiler-Plugins.html diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/eta-expansion-spec.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/eta-expansion-spec.md new file mode 100644 index 000000000000..bcfe488607ad --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/eta-expansion-spec.md @@ -0,0 +1,74 @@ +--- +layout: doc-page +title: "Automatic Eta Expansion - More Details" +--- + +### Motivation + +Scala maintains a convenient distinction between _methods_ and _functions_. +Methods are part of the definition of a class that can be invoked in objects while functions are complete objects themselves, making them first-class entities. For example, they can be assigned to variables. +These two mechanisms are bridged in Scala by a mechanism called _eta-expansion_ (also called eta-abstraction), which converts a reference to a method into a function. Intuitively, a method `m` can be passed around by turning it into an object: the function `x => m(x)`. + +In this snippet which assigns a method to a `val`, the compiler will perform _automatic eta-expansion_, as shown in the comment: + +```scala +def m(x: Int, y: String) = ??? +val f = m // becomes: val f = (x: Int, y: String) => m(x, y) +``` + +In Scala 2, a method reference `m` was converted to a function value only if the expected type was a function type, which means the conversion in the example above would not have been triggered, because `val f` does not have a type ascription. To still get eta-expansion, a shortcut `m _` would force the conversion. + +For methods with one or more parameters like in the example above, this restriction has now been dropped. The syntax `m _` is no longer needed and will be deprecated in the future. + +## Automatic eta-expansion and partial application +In the following example `m` can be partially applied to the first two parameters. +Assignining `m` to `f1` will automatically eta-expand. + +```scala +def m(x: Boolean, y: String)(z: Int): List[Int] +val f1 = m +val f2 = m(true, "abc") +``` + +This creates two function values: + +```scala +f1: (Boolean, String) => Int => List[Int] +f2: Int => List[Int] +``` + +## Automatic eta-expansion and implicit parameter lists + +Methods with implicit parameter lists will always get applied to implicit arguments. + +```scala +def foo(x: Int)(implicit p: Double): Float = ??? +implicit val bla: Double = 1.0 + +val bar = foo // val bar: Int => Float = ... +``` + +## Automatic Eta-Expansion and query types + +A method with context parameters can be expanded to a value of a context type by writing the expected context type explicitly. + +```scala +def foo(x: Int)(using p: Double): Float = ??? +val bar: Double ?=> Float = foo(3) +``` + +## Rules + +- If `m` has an argument list with one or more parameters, we always eta-expand +- If `m` is has an empty argument list (i.e. has type `()R`): + 1. If the expected type is of the form `() => T`, we eta expand. + 2. If m is defined by Java, or overrides a Java defined method, we insert `()`. + 3. Otherwise we issue an error of the form: + +Thus, an unapplied method with an empty argument list is only converted to a function when a function type is expected. It is considered best practice to either explicitly apply the method to `()`, or convert it to a function with `() => m()`. + +The method value syntax `m _` is deprecated. + +### Reference + +For more info, see [PR #2701](https://github.com/lampepfl/dotty/pull/2701). diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/eta-expansion.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/eta-expansion.md new file mode 100644 index 000000000000..365a0e2d09a7 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/eta-expansion.md @@ -0,0 +1,42 @@ +--- +layout: doc-page +title: "Automatic Eta Expansion" +--- + +The conversion of _methods_ into _functions_ has been improved and happens automatically for methods with one or more parameters. + +```scala +def m(x: Boolean, y: String)(z: Int): List[Int] +val f1 = m +val f2 = m(true, "abc") +``` + +This creates two function values: +```scala +f1: (Boolean, String) => Int => List[Int] +f2: Int => List[Int] +``` + +The syntax `m _` is no longer needed and will be deprecated in the future. + +## Automatic eta-expansion and nullary methods + +Automatic eta expansion does not apply to "nullary" methods that take an empty parameter list. + +```scala +def next(): T +``` + +Given a simple reference to `next` does not auto-convert to a function. +One has to write explicitly `() => next()` to achieve that +Once again since the `_` is going to be deprecated it's better to write it this way +rather than `next _`. + +The reason for excluding nullary methods from automatic eta expansion +is that Scala implicitly inserts the `()` argument, which would +conflict with eta expansion. Automatic `()` insertion is +[limited](../dropped-features/auto-apply.md) in Dotty, but the fundamental ambiguity +remains. + +[More details](eta-expansion-spec.md) + diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/implicit-conversions-spec.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/implicit-conversions-spec.md new file mode 100644 index 000000000000..9a1b52d6bbd2 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/implicit-conversions-spec.md @@ -0,0 +1,122 @@ +--- +layout: doc-page +title: "Implicit Conversions - More Details" +--- + +## Implementation + +An implicit conversion, or _view_, from type `S` to type `T` is +defined by either: + +- An `implicit def` which has type `S => T` or `(=> S) => T` +- An implicit value which has type `Conversion[S, T]` + +The standard library defines an abstract class `Conversion`: + +```scala +package scala +@java.lang.FunctionalInterface +abstract class Conversion[-T, +U] extends Function1[T, U] +``` + +Function literals are automatically converted to `Conversion` values. + +Views are applied in three situations: + +1. If an expression `e` is of type `T`, and `T` does not conform to + the expression's expected type `pt`. In this case, an implicit `v` + which is applicable to `e` and whose result type conforms to `pt` + is searched. The search proceeds as in the case of implicit + parameters, where the implicit scope is the one of `T => pt`. If + such a view is found, the expression `e` is converted to `v(e)`. +1. In a selection `e.m` with `e` of type `T`, if the selector `m` does + not denote an accessible member of `T`. In this case, a view `v` + which is applicable to `e` and whose result contains an accessible + member named `m` is searched. The search proceeds as in the case of + implicit parameters, where the implicit scope is the one of `T`. If + such a view is found, the selection `e.m` is converted to `v(e).m`. +1. In an application `e.m(args)` with `e` of type `T`, if the selector + `m` denotes some accessible member(s) of `T`, but none of these + members is applicable to the arguments `args`. In this case, a view + `v` which is applicable to `e` and whose result contains a method + `m` which is applicable to `args` is searched. The search proceeds + as in the case of implicit parameters, where the implicit scope is + the one of `T`. If such a view is found, the application + `e.m(args)` is converted to `v(e).m(args)`. + +# Differences with Scala 2 implicit conversions + +In Scala 2, views whose parameters are passed by-value take precedence +over views whose parameters are passed by-name. This is no longer the +case in Scala 3. A type error reporting the ambiguous conversions will +be emitted in cases where this rule would be applied in Scala 2: + +```scala +implicit def conv1(x: Int): String = x.toString +implicit def conv2(x: => Int): String = x.toString + +val x: String = 0 // Compiles in Scala2 (uses `conv1`), + // type error in Scala 3 because of ambiguity. +``` + +In Scala 2, implicit values of a function type would be considered as +potential views. In Scala 3, these implicit value need to have type +`Conversion`: + +```scala +// Scala 2: +def foo(x: Int)(implicit conv: Int => String): String = x + +// Becomes with Scala 3: +def foo(x: Int)(implicit conv: Conversion[Int, String]): String = x + +// Call site is unchanged: +foo(4)(_.toString) + +// Scala 2: +implicit val myConverter: Int => String = _.toString + +// Becomes with Scala 3: +implicit val myConverter: Conversion[Int, String] = _.toString +``` + +Note that implicit conversions are also affected by the [changes to +implicit resolution](implicit-resolution.md) between Scala 2 and +Scala 3. + +## Motivation for the changes + +The introduction of `Conversion` in Scala 3 and the decision to +restrict implicit values of this type to be considered as potential +views comes from the desire to remove surprising behavior from the +language: + +```scala +implicit val m: Map[Int, String] = Map(1 -> "abc") + +val x: String = 1 // scalac: assigns "abc" to x + // Dotty: type error +``` + +This snippet contains a type error. The right hand side of `val x` +does not conform to type `String`. In Scala 2, the compiler will use +`m` as an implicit conversion from `Int` to `String`, whereas Scala 3 +will report a type error, because Map isn't an instance of +`Conversion`. + +## Migration path + +Implicit values that are used as views should see their type changed +to `Conversion`. + +For the migration of implicit conversions that are affected by the +changes to implicit resolution, refer to the [Changes in Implicit +Resolution](implicit-resolution.md) for more information. + +## Reference + +For more information about implicit resolution, see [Changes in +Implicit Resolution](implicit-resolution.md). +Other details are available in +[PR #2065](https://github.com/lampepfl/dotty/pull/2065) + diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/implicit-conversions.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/implicit-conversions.md new file mode 100644 index 000000000000..3e24d5acaa10 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/implicit-conversions.md @@ -0,0 +1,64 @@ +--- +layout: doc-page +title: "Implicit Conversions" +--- + +An _implicit conversion_, also called _view_, is a conversion that +is applied by the compiler in several situations: + +1. When an expression `e` of type `T` is encountered, but the compiler + needs an expression of type `S`. +1. When an expression `e.m` where `e` has type `T` but `T` defines no + member `m` is encountered. + +In those cases, the compiler looks in the implicit scope for a +conversion that can convert an expression of type `T` to an expression +of type `S` (or to a type that defines a member `m` in the second +case). + +This conversion can be either: + +1. An `implicit def` of type `T => S` or `(=> T) => S` +1. An implicit value of type `scala.Conversion[T, S]` + +Defining an implicit conversion will emit a warning unless the import +`scala.language.implicitConversions` is in scope, or the flag +`-language:implicitConversions` is given to the compiler. + +## Examples + +The first example is taken from `scala.Predef`. Thanks to this +implicit conversion, it is possible to pass a `scala.Int` to a Java +method that expects a `java.lang.Integer` + +```scala +import scala.language.implicitConversions +implicit def int2Integer(x: Int): java.lang.Integer = + x.asInstanceOf[java.lang.Integer] +``` + +The second example shows how to use `Conversion` to define an +`Ordering` for an arbitrary type, given existing `Ordering`s for other +types: + +```scala +import scala.language.implicitConversions +implicit def ordT[T, S]( + implicit conv: Conversion[T, S], + ordS: Ordering[S] + ): Ordering[T] = { + // `ordS` compares values of type `S`, but we can convert from `T` to `S` + (x: T, y: T) => ordS.compare(x, y) +} + +class A(val x: Int) // The type for which we want an `Ordering` + +// Convert `A` to a type for which an `Ordering` is available: +implicit val AToInt: Conversion[A, Int] = _.x + +implicitly[Ordering[Int]] // Ok, exists in the standard library +implicitly[Ordering[A]] // Ok, will use the implicit conversion from + // `A` to `Int` and the `Ordering` for `Int`. +``` + +[More details](implicit-conversions-spec.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/implicit-resolution.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/implicit-resolution.md new file mode 100644 index 000000000000..6732e7e706d5 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/implicit-resolution.md @@ -0,0 +1,166 @@ +--- +layout: doc-page +title: "Changes in Implicit Resolution" +--- +This page describes changes to the implicit resolution that apply both to the new `given`s and to the old-style `implicit`s in Dotty. +Implicit resolution uses a new algorithm which caches implicit results +more aggressively for performance. There are also some changes that +affect implicits on the language level. + +**1.** Types of implicit values and result types of implicit methods +must be explicitly declared. Excepted are only values in local blocks +where the type may still be inferred: +```scala + class C { + + val ctx: Context = ... // ok + + /*!*/ implicit val x = ... // error: type must be given explicitly + + /*!*/ implicit def y = ... // error: type must be given explicitly + + val y = { + implicit val ctx = this.ctx // ok + ... + } +``` +**2.** Nesting is now taken into account for selecting an implicit.Consider for instance the following scenario: +```scala + def f(implicit i: C) = { + def g(implicit j: C) = { + implicitly[C] + } + } +``` +This will now resolve the `implicitly` call to `j`, because `j` is nested +more deeply than `i`. Previously, this would have resulted in an +ambiguity error. The previous possibility of an implicit search failure +due to _shadowing_ (where an implicit is hidden by a nested definition) +no longer applies. + +**3.** Package prefixes no longer contribute to the implicit search scope of a type. Example: +```scala + package p + given a as A + + object o { + given b as B + type C + } +``` +Both `a` and `b` are visible as implicits at the point of the definition +of `type C`. However, a reference to `p.o.C` outside of package `p` will +have only `b` in its implicit search scope but not `a`. + +In more detail, here are the rules for what constitutes the implicit scope of +a type: + +**Definition:** A reference is an _anchor_ if it refers to an object, a class, a trait, an abstract type, an opaque type alias, or a match type alias. References to packages and package objects are anchors only under -source:3.0-migration. + +**Definition:** The _anchors_ of a type _T_ is a set of references defined as follows: + + 1. If _T_ is a reference to an anchor, _T_ itself plus, if _T_ is of the form _P#A_, the anchors of _P_. + 1. If _T_ is an alias of _U_, the anchors of _U_. + 1. If _T_ is a reference to a type parameter, the union of the anchors of both of its bounds. + 1. If _T_ is a singleton reference, the anchors of its underlying type, plus, + if _T_ is of the form _(P#x).type_, the anchors of _P_. + 1. If _T_ is the this-type _o.this_ of a static object _o_, the anchors of a term reference _o.type_ to that object. + 1. If _T_ is some other type, the union of the anchors of each constituent type of _T_. + + **Definition:** The _implicit scope_ of a type _T_ is the smallest set _S_ of term references such that + + 1. If _T_ is a reference to a class, _S_ includes a reference to the companion object + of the class, if it exists, as well as the implicit scopes of all of _T_'s parent classes. + 1. If _T_ is a reference to an object, _S_ includes _T_ itself as well as + the implicit scopes of all of _T_'s parent classes. + 1. If _T_ is a reference to an opaque type alias named _A_, _S_ includes + a reference to an object _A_ defined in the same scope as the type, if it exists, + as well as the implicit scope of _T_'s underlying type or bounds. + 1. If _T_ is a reference to an an abstract type or match type alias + named _A_, _S_ includes a reference to an object _A_ defined in the same scope as the type, if it exists, as well as the implicit scopes of _T_'s given bounds. + 1. If _T_ is a reference to an anchor of the form _p.A_ then _S_ also includes + all term references on the path _p_. + 1. If _T_ is some other type, _S_ includes the implicit scopes of all anchors of _T_. + + +**4.** The treatment of ambiguity errors has changed. If an ambiguity is encountered in some recursive step of an implicit search, the ambiguity is propagated to the caller. + +Example: Say you have the following definitions: +```scala + class A + class B extends C + class C + implicit def a1: A + implicit def a2: A + implicit def b(implicit a: A): B + implicit def c: C +``` +and the query `implicitly[C]`. + +This query would now be classified as ambiguous. This makes sense, after all +there are two possible solutions, `b(a1)` and `b(a2)`, neither of which is better +than the other and both of which are better than the third solution, `c`. +By contrast, Scala 2 would have rejected the search for `A` as +ambiguous, and subsequently have classified the query `b(implicitly[A])` as a normal fail, +which means that the alternative `c` would be chosen as solution! + +Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement +the analogue of a "negated" search in implicit resolution, where a query `Q1` fails if some +other query `Q2` succeeds and `Q1` succeeds if `Q2` fails. With the new cleaned up behavior +these techniques no longer work. But there is now a new special type `scala.implicits.Not` +which implements negation directly. For any query type `Q`: `Not[Q]` succeeds if and only if +the implicit search for `Q` fails. + +**5.** The treatment of divergence errors has also changed. A divergent implicit is treated as a normal failure, after which alternatives are still tried. This also makes sense: Encountering a divergent implicit means that we assume that no finite solution can be found on the corresponding path, but another path can still be tried. By contrast, +most (but not all) divergence errors in Scala 2 would terminate the implicit search as a whole. + +**6.** Scala-2 gives a lower level of priority to implicit conversions with call-by-name parameters relative to implicit conversions with call-by-value parameters. Dotty drops this distinction. So the following code snippet would be ambiguous in Dotty: + +```scala + implicit def conv1(x: Int): A = new A(x) + implicit def conv2(x: => Int): A = new A(x) + def buzz(y: A) = ??? + buzz(1) // error: ambiguous +``` +**7.** The rule for picking a _most specific_ alternative among a set of overloaded or implicit alternatives is refined to take context parameters into account. All else being equal, an alternative that takes some context parameters is taken to be less specific than an alternative that takes none. If both alternatives take context parameters, we try to choose between them as if they were methods with regular parameters. The following paragraph in the SLS is affected by this change: + +_Original version:_ + +> An alternative A is _more specific_ than an alternative B if the relative weight of A over B is greater than the relative weight of B over A. + +_Modified version:_ + +An alternative A is _more specific_ than an alternative B if + + - the relative weight of A over B is greater than the relative weight of B over A, or + - the relative weights are the same, and A takes no implicit parameters but B does, or + - the relative weights are the same, both A and B take implicit parameters, and A is more specific than B if all implicit parameters in either alternative are replaced by regular parameters. + +**8.** The previous disambiguation of implicits based on inheritance depth is refined to make it transitive. Transitivity is important to guarantee that search outcomes are compilation-order independent. Here's a scenario where the previous rules violated transitivity: +```scala + class A extends B + object A { given a ... } + class B + object B extends C { given b ... } + class C { given c } +``` + Here `a` is more specific than `b` since the companion class `A` is a subclass of the companion class `B`. Also, `b` is more specific than `c` + since `object B` extends class `C`. But `a` is not more specific than `c`. This means if `a, b, c` are all applicable implicits, it makes + a difference in what order they are compared. If we compare `b` and `c` + first, we keep `b` and drop `c`. Then, comparing `a` with `b` we keep `a`. But if we compare `a` with `c` first, we fail with an ambiguity error. + +The new rules are as follows: An implicit `a` defined in `A` is more specific than an implicit `b` defined in `B` if + + - `A` extends `B`, or + - `A` is an object and the companion class of `A` extends `B`, or + - `A` and `B` are objects, + `B` does not inherit any implicit members from base classes (*), + and the companion class of `A` extends the companion class of `B`. + +Condition (*) is new. It is necessary to ensure that the defined relation is transitive. + + + + + +[//]: # todo: expand with precise rules diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/interpolation-escapes.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/interpolation-escapes.md new file mode 100644 index 000000000000..1b313bebe8d6 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/interpolation-escapes.md @@ -0,0 +1,13 @@ +--- +layout: doc-page +title: Escapes in interpolations +--- + +In Scala 2 there was no straightforward way to represent a single quote character `"` in a single quoted interpolation. A \ character can't be used for that because interpolators themselves decide how to handle escaping, so the parser doesn't know whether the " should be escaped or used as a terminator. + +In Dotty, you can use the `$` meta character of interpolations to escape a `"` character. + +```scala + val inventor = "Thomas Edison" + val interpolation = s"as $inventor said: $"The three great essentials to achieve anything worth while are: Hard work, Stick-to-itiveness, and Common sense.$"" +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/lazy-vals-init.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/lazy-vals-init.md new file mode 100644 index 000000000000..4e5a4031f158 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/lazy-vals-init.md @@ -0,0 +1,79 @@ +--- +layout: doc-page +title: Lazy Vals initialization +--- + +Dotty implements [Version 6](https://docs.scala-lang.org/sips/improved-lazy-val-initialization.html#version-6---no-synchronization-on-this-and-concurrent-initialization-of-fields) +of the [SIP-20] improved lazy vals initialization proposal. + +## Motivation + +The newly proposed lazy val initialization mechanism aims to eliminate the acquisition of resources +during the execution of the lazy val initializer block, thus reducing the possibility of a deadlock. +The concrete deadlock scenarios that the new lazy val initialization scheme eliminates are +summarized in the [SIP-20] document. + +## Implementation + +Given a lazy field of the form: + +```scala +class Foo { + lazy val bar = +} +``` + +The Dotty compiler will generate code equivalent to: + +```scala +class Foo { + import dotty.runtime.LazyVals + var value_0: Int = _ + var bitmap: Long = 0L + val bitmap_offset: Long = LazyVals.getOffset(classOf[LazyCell], "bitmap") + + def bar(): Int = { + while (true) { + val flag = LazyVals.get(this, bitmap_offset) + val state = LazyVals.STATE(flag, ) + + if (state == ) { + return value_0 + } else if (state == ) { + if (LazyVals.CAS(this, bitmap_offset, flag, , )) { + try { + val result = + value_0 = result + LazyVals.setFlag(this, bitmap_offset, , ) + return result + } + catch { + case ex => + LazyVals.setFlag(this, bitmap_offset, , ) + throw ex + } + } + } else /* if (state == || state == ) */ { + LazyVals.wait4Notification(this, bitmap_offset, flag, ) + } + } + } +} +``` + +The state of the lazy val `` is represented with 4 values: 0, 1, 2 and 3. The state 0 +represents a non-initialized lazy val. The state 1 represents a lazy val that is currently being +initialized by some thread. The state 2 denotes that there are concurrent readers of the lazy val. +The state 3 represents a lazy val that has been initialized. `` is the id of the lazy +val. This id grows with the number of volatile lazy vals defined in the class. + +## Note on recursive lazy vals + +Ideally recursive lazy vals should be flagged as an error. The current behavior for +recursive lazy vals is undefined (initialization may result in a deadlock). + +## Reference + +* [SIP-20] + +[SIP-20]: https://docs.scala-lang.org/sips/improved-lazy-val-initialization.html diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/main-functions.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/main-functions.md new file mode 100644 index 000000000000..b9e933329e11 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/main-functions.md @@ -0,0 +1,88 @@ +--- +layout: doc-page +title: "Main Methods" +--- + +Scala 3 offers a new way to define programs that can be invoked from the command line: +A `@main` annotation on a method turns this method into an executable program. +Example: +```scala +@main def happyBirthday(age: Int, name: String, others: String*) = { + val suffix = + (age % 100) match { + case 11 | 12 | 13 => "th" + case _ => + (age % 10) match { + case 1 => "st" + case 2 => "nd" + case 3 => "rd" + case _ => "th" + } + } + val bldr = new StringBuilder(s"Happy $age$suffix birthday, $name") + for other <- others do bldr.append(" and ").append(other) + bldr.toString +} +``` +This would generate a main program `happyBirthday` that could be called like this +``` +> scala happyBirthday 23 Lisa Peter +Happy 23rd Birthday, Lisa and Peter! +``` +A `@main` annotated method can be written either at the top-level or in a statically accessible object. The name of the program is in each case the name of the method, without any object prefixes. The `@main` method can have an arbitrary number of parameters. +For each parameter type there must be an instance of the `scala.util.FromString` type class +that is used to convert an argument string to the required parameter type. +The parameter list of a main method can end in a repeated parameter that then +takes all remaining arguments given on the command line. + +The program implemented from a `@main` method checks that there are enough arguments on +the command line to fill in all parameters, and that argument strings are convertible to +the required types. If a check fails, the program is terminated with an error message. + +Examples: + +``` +> scala happyBirthday 22 +Illegal command line after first argument: more arguments expected + +> scala happyBirthday sixty Fred +Illegal command line: java.lang.NumberFormatException: For input string: "sixty" +``` + +The Scala compiler generates a program from a `@main` method `f` as follows: + + - It creates a class named `f` in the package where the `@main` method was found + - The class has a static method `main` with the usual signature. It takes an `Array[String]` + as argument and returns `Unit`. + - The generated `main` method calls method `f` with arguments converted using + methods in the `scala.util.CommandLineParser` object. + +For instance, the `happyBirthDay` method above would generate additional code equivalent to the following class: +```scala +final class happyBirthday { + import scala.util.{CommandLineParser => CLP} + def main(args: Array[String]): Unit = + try + happyBirthday( + CLP.parseArgument[Int](args, 0), + CLP.parseArgument[String](args, 1), + CLP.parseRemainingArguments[String](args, 2)) + catch { + case error: CLP.ParseError => CLP.showError(error) + } +} +``` +**Note**: The `` modifier above expresses that the `main` method is generated +as a static method of class `happyBirthDay`. It is not available for user programs in Scala. Regular "static" members are generated in Scala using objects instead. + +`@main` methods are the recommended scheme to generate programs that can be invoked from the command line in Scala 3. They replace the previous scheme to write program as objects with a special `App` parent class. In Scala 2, `happyBirthday` could be written also like this: + +```scala +object happyBirthday extends App { + // needs by-hand parsing of arguments vector + ... +} +``` + +The previous functionality of `App`, which relied on the "magic" `DelayedInit` trait, is no longer available. `App` still exists in limited form for now, but it does not support command line arguments and will be deprecated in the future. If programs need to cross-build +between Scala 2 and Scala 3, it is recommended to use an explicit `main` method with an `Array[String]` argument instead. diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/match-syntax.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/match-syntax.md new file mode 100644 index 000000000000..e83f5f9ead86 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/match-syntax.md @@ -0,0 +1,42 @@ +--- +layout: doc-page +title: Match Expressions +--- + +The syntactical precedence of match expressions has been changed. +`match` is still a keyword, but it is used like an alphabetical operator. This has several consequences: + + 1. `match` expressions can be chained: + + ```scala + xs match { + case Nil => "empty" + case x :: xs1 => "nonempty" + } match { + case "empty" => 0 + case "nonempty" => 1 + } + + 2. `match` may follow a period: + + ```scala + if xs.match { + case Nil => false + case _ => true + } + then "nonempty" + else "empty" + + 3. The scrutinee of a match expression must be an `InfixExpr`. Previously the scrutinee could be followed by a type ascription `: T`, but this is no longer supported. So `x : T match { ... }` now has to be + written `(x: T) match { ... }`. + +## Syntax + +The new syntax of match expressions is as follows. +``` +InfixExpr ::= ... + | InfixExpr MatchClause +SimpleExpr ::= ... + | SimpleExpr ‘.’ MatchClause +MatchClause ::= ‘match’ ‘{’ CaseClauses ‘}’ +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/numeric-literals.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/numeric-literals.md new file mode 100644 index 000000000000..9678a9616d87 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/numeric-literals.md @@ -0,0 +1,240 @@ +--- +layout: doc-page +title: Numeric Literals +--- + +In Scala 2, numeric literals were confined to the primitive numeric types `Int`, `Long`, `Float`, and `Double`. Scala 3 allows to write numeric literals also for user defined types. Example: +```scala +val x: Long = -10_000_000_000 +val y: BigInt = 0x123_abc_789_def_345_678_901 +val z: BigDecimal = 110_222_799_799.99 + +(y: BigInt) match { + case 123_456_789_012_345_678_901 => +} +``` +The syntax of numeric literals is the same as before, except there are no pre-set limits +how large they can be. + +### Meaning of Numeric Literals + +The meaning of a numeric literal is determined as follows: + + - If the literal ends with `l` or `L`, it is a `Long` integer (and must fit + in its legal range). + - If the literal ends with `f` or `F`, it is a single precision floating point number of type `Float`. + - If the literal ends with `d` or `D`, it is a double precision floating point number of type `Double`. + +In each of these cases the conversion to a number is exactly as in Scala 2 or in Java. If a numeric literal does _not_ end in one of these suffixes, its meaning is determined by the expected type: + + 1. If the expected type is `Int`, `Long`, `Float`, or `Double`, the literal is + treated as a standard literal of that type. + 2. If the expected type is a fully defined type `T` that has a given instance of type + `scala.util.FromDigits[T]`, the literal is converted to a value of type `T` by passing it as an argument to + the `fromDigits` method of that instance (more details below). + 3. Otherwise, the literal is treated as a `Double` literal (if it has a decimal point or an + exponent), or as an `Int` literal (if not). (This last possibility is again as in Scala 2 or Java.) + +With these rules, the definition +```scala +val x: Long = -10_000_000_000 +``` +is legal by rule (1), since the expected type is `Long`. The definitions +```scala +val y: BigInt = 0x123_abc_789_def_345_678_901 +val z: BigDecimal = 111222333444.55 +``` +are legal by rule (2), since both `BigInt` and `BigDecimal` have `FromDigits` instances +(which implement the `FromDigits` subclasses `FromDigits.WithRadix` and `FromDigits.Decimal`, respectively). +On the other hand, +```scala +val x = -10_000_000_000 +``` +gives a type error, since without an expected type `-10_000_000_000` is treated by rule (3) as an `Int` literal, but it is too large for that type. + +### The FromDigits Trait + +To allow numeric literals, a type simply has to define a `given` instance of the +`scala.util.FromDigits` type class, or one of its subclasses. `FromDigits` is defined +as follows: +```scala +trait FromDigits[T] { + def fromDigits(digits: String): T +} +``` +Implementations of the `fromDigits` convert strings of digits to the values of the +implementation type `T`. +The `digits` string consists of digits between `0` and `9`, possibly preceded by a +sign ("+" or "-"). Number separator characters `_` are filtered out before +the string is passed to `fromDigits`. + +The companion object `FromDigits` also defines subclasses of `FromDigits` for +whole numbers with a given radix, for numbers with a decimal point, and for +numbers that can have both a decimal point and an exponent: +```scala +object FromDigits { + + /** A subclass of `FromDigits` that also allows to convert whole number literals + * with a radix other than 10 + */ + trait WithRadix[T] extends FromDigits[T] { + def fromDigits(digits: String): T = fromDigits(digits, 10) + def fromDigits(digits: String, radix: Int): T + } + + /** A subclass of `FromDigits` that also allows to convert number + * literals containing a decimal point ".". + */ + trait Decimal[T] extends FromDigits[T] + + /** A subclass of `FromDigits`that allows also to convert number + * literals containing a decimal point "." or an + * exponent `('e' | 'E')['+' | '-']digit digit*`. + */ + trait Floating[T] extends Decimal[T] + ... +} +``` +A user-defined number type can implement one of those, which signals to the compiler +that hexadecimal numbers, decimal points, or exponents are also accepted in literals +for this type. + +### Error Handling + +`FromDigits` implementations can signal errors by throwing exceptions of some subtype +of `FromDigitsException`. `FromDigitsException` is defined with three subclasses in the +`FromDigits` object as follows: +```scala +abstract class FromDigitsException(msg: String) extends NumberFormatException(msg) + +class NumberTooLarge (msg: String = "number too large") extends FromDigitsException(msg) +class NumberTooSmall (msg: String = "number too small") extends FromDigitsException(msg) +class MalformedNumber(msg: String = "malformed number literal") extends FromDigitsException(msg) +``` + +### Example + +As a fully worked out example, here is an implementation of a new numeric class, `BigFloat`, that accepts numeric literals. `BigFloat` is defined in terms of a `BigInt` mantissa and an `Int` exponent: +```scala +case class BigFloat(mantissa: BigInt, exponent: Int) { + override def toString = s"${mantissa}e${exponent}" +} +``` +`BigFloat` literals can have a decimal point as well as an exponent. E.g. the following expression +should produce the `BigFloat` number `BigFloat(-123, 997)`: +```scala +-0.123E+1000: BigFloat +``` +The companion object of `BigFloat` defines an `apply` constructor method to construct a `BigFloat` +from a `digits` string. Here is a possible implementation: +```scala +object BigFloat { + import scala.util.FromDigits + + def apply(digits: String): BigFloat = { + val (mantissaDigits, givenExponent) = digits.toUpperCase.split('E') match { + case Array(mantissaDigits, edigits) => + val expo = + try FromDigits.intFromDigits(edigits) + catch { + case ex: FromDigits.NumberTooLarge => + throw FromDigits.NumberTooLarge(s"exponent too large: $edigits") + } + (mantissaDigits, expo) + case Array(mantissaDigits) => + (mantissaDigits, 0) + } + val (intPart, exponent) = mantissaDigits.split('.') match { + case Array(intPart, decimalPart) => + (intPart ++ decimalPart, givenExponent - decimalPart.length) + case Array(intPart) => + (intPart, givenExponent) + } + BigFloat(BigInt(intPart), exponent) + } +``` +To accept `BigFloat` literals, all that's needed in addition is a `given` instance of type +`FromDigits.Floating[BigFloat]`: +```scala + given FromDigits as FromDigits.Floating[BigFloat] { + def fromDigits(digits: String) = apply(digits) + } +} // end BigFloat +``` +Note that the `apply` method does not check the format of the `digits` argument. It is +assumed that only valid arguments are passed. For calls coming from the compiler +that assumption is valid, since the compiler will first check whether a numeric +literal has the correct format before it gets passed on to a conversion method. + +### Compile-Time Errors + +With the setup of the previous section, a literal like +```scala +1e10_0000_000_000: BigFloat +``` +would be expanded by the compiler to +```scala +BigFloat.FromDigits.fromDigits("1e100000000000") +``` +Evaluating this expression throws a `NumberTooLarge` exception at run time. We would like it to +produce a compile-time error instead. We can achieve this by tweaking the `BigFloat` class +with a small dose of metaprogramming. The idea is to turn the `fromDigits` method +into a macro, i.e. make it an inline method with a splice as right hand side. +To do this, replace the `FromDigits` instance in the `BigFloat` object by the following two definitions: +```scala +object BigFloat { + ... + + class FromDigits extends FromDigits.Floating[BigFloat] { + def fromDigits(digits: String) = apply(digits) + } + + given FromDigits { + override inline def fromDigits(digits: String) = ${ + fromDigitsImpl('digits) + } + } +``` +Note that an inline method cannot directly fill in for an abstract method, since it produces +no code that can be executed at runtime. That is why we define an intermediary class +`FromDigits` that contains a fallback implementation which is then overridden by the inline +method in the `FromDigits` given instance. That method is defined in terms of a macro +implementation method `fromDigitsImpl`. Here is its definition: +```scala + private def fromDigitsImpl(digits: Expr[String])(using ctx: QuoteContext): Expr[BigFloat] = + digits match { + case Const(ds) => + try { + val BigFloat(m, e) = apply(ds) + '{BigFloat(${Expr(m)}, ${Expr(e)})} + } + catch { + case ex: FromDigits.FromDigitsException => + ctx.error(ex.getMessage) + '{BigFloat(0, 0)} + } + case digits => + '{apply($digits)} + } +} // end BigFloat +``` +The macro implementation takes an argument of type `Expr[String]` and yields +a result of type `Expr[BigFloat]`. It tests whether its argument is a constant +string. If that is the case, it converts the string using the `apply` method +and lifts the resulting `BigFloat` back to `Expr` level. For non-constant +strings `fromDigitsImpl(digits)` is simply `apply(digits)`, i.e. everything is +evaluated at runtime in this case. + +The interesting part is the `catch` part of the case where `digits` is constant. +If the `apply` method throws a `FromDigitsException`, the exception's message is issued as a compile time error in the `ctx.error(ex.getMessage)` call. + +With this new implementation, a definition like +```scala +val x: BigFloat = 1234.45e3333333333 +``` +would give a compile time error message: +```scala +3 | val x: BigFloat = 1234.45e3333333333 + | ^^^^^^^^^^^^^^^^^^ + | exponent too large: 3333333333 +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/operators.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/operators.md new file mode 100644 index 000000000000..5ff0d12a3e13 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/operators.md @@ -0,0 +1,182 @@ +--- +layout: doc-page +title: Rules for Operators +--- + +The rules for infix operators have changed. There are two annotations that regulate operators: `infix` and `alpha`. +Furthermore, a syntax change allows infix operators to be written on the left in a multi-line expression. + +## The @alpha Annotation + +An `@alpha` annotation on a method definition defines an alternate name for the implementation of that method: Example: +```scala +import scala.annotation.alpha + +object VecOps { + @alpha("append") def (xs: Vec[T]) ++= [T] (ys: Vec[T]): Vec[T] = ... +} +``` +Here, the `++=` operation is implemented (in Byte code or native code) under the name `append`. The implementation name affects the code that is generated, and is the name under which code from other languages can call the method. For instance, `++=` could be invoked from Java like this: +``` +VecOps.append(vec1, vec2) +``` +The `@alpha` annotation has no bearing on Scala usages. Any application of that method in Scala has to use `++=`, not `append`. + +An `@alpha` annotation will be _mandatory_ if the method name is symbolic. Symbolic methods without `@alpha` annotations are deprecated. + +### Motivation + +The `@alpha` annotation serves a dual purpose: + + - It helps interoperability between Scala and other languages. + - It serves as a documentation tool by providing an alternative regular name + as an alias of a symbolic operator. + +### Details + + 1. `@alpha` is defined in package `scala.annotation`. It takes a single argument + of type `String`. That string is called the _external name_ of the definition + that's annotated. + + 2. An `@alpha` annotation can be given for all kinds of definitions. + + 3. The name given in an `@alpha` annotation must be a legal name + for the defined entities on the host platform. + + 4. Definitions with symbolic names should have an `@alpha` annotation. Lack of such + an annotation will raise a deprecation warning. + + 5. Definitions with names in backticks that are not legal host platform names + should have an `@alpha` annotation. Lack of such an annotation will raise a deprecation warning. + + 6. `@alpha` annotations must agree: If two definitions are members of an object or class with the same name and matching types, then either none of them has an `@alpha` annotation, or both have `@alpha` annotations with the same name. + + 7. There must be a one-to-one relationship between external and internal names: + If two definitions are members of an object or class with matching types and both have `@alpha` annotations with the same external name, then their internal method names must also be the same. + +## The @infix Annotation + +An `@infix` annotation on a method definition allows using the method as an infix operation. Example: +```scala +import scala.annotation.alpha + +trait MultiSet[T] { + + @infix + def union(other: MultiSet[T]): MultiSet[T] + + def difference(other: MultiSet[T]): MultiSet[T] + + @alpha("intersection") + def *(other: MultiSet[T]): MultiSet[T] +} + +val s1, s2: MultiSet[Int] + +s1 union s2 // OK +s1 `union` s2 // also OK but unusual +s1.union(s2) // also OK + +s1.difference(s2) // OK +s1 `difference` s2 // OK +s1 difference s2 // gives a deprecation warning + +s1 * s2 // OK +s1 `*` s2 // also OK, but unusual +s1.*(s2) // also OK, but unusual +``` +Infix operations involving alphanumeric operators are deprecated, unless +one of the following conditions holds: + + - the operator definition carries an `@infix` annotation, or + - the operator was compiled with Scala 2, or + - the operator is followed by an opening brace. + +An alphanumeric operator is an operator consisting entirely of letters, digits, the `$` and `_` characters, or +any unicode character `c` for which `java.lang.Character.isIdentifierPart(c)` returns `true`. + +Infix operations involving symbolic operators are always allowed, so `@infix` is redundant for methods with symbolic names. + +The `@infix` annotation can also be given to a type: +``` +@infix type or[X, Y] +val x: String or Int = ... +``` + +### Motivation + +The purpose of the `@infix` annotation is to achieve consistency across a code base in how a method or type is applied. The idea is that the author of a method decides whether that method should be applied as an infix operator or in a regular application. Use sites then implement that decision consistently. + +### Details + + 1. `@infix` is defined in package `scala.annotation`. + + 2. If a method overrides another, their infix annotations must agree. Either both are annotated with `@infix`, or none of them are. + + 3. `@infix` annotations can be given to method definitions. The first non-receiver parameter list of an `@infix` method must define exactly one parameter. Examples: + + ```scala + @infix def op(x: S): R // ok + @infix def op[T](x: T)(y: S): R // ok + @infix def op[T](x: T, y: S): R // error: two parameters + + @infix def (x: A) op (y: B): R // ok + @infix def (x: A) op (y1: B, y2: B): R // error: two parameters + ``` + + 4. `@infix` annotations can also be given to type, trait or class definitions that have exactly two type parameters. An infix type like + + ```scala + @infix type op[X, Y] + ``` + + can be applied using infix syntax, i.e. `A op B`. + + 5. To smooth migration to Scala 3.0, alphanumeric operators will only be deprecated from Scala 3.1 onwards, +or if the `-strict` option is given in Dotty/Scala 3. + +## Syntax Change + +Infix operators can now appear at the start of lines in a multi-line expression. Examples: +```scala +val str = "hello" + ++ " world" + ++ "!" + +def condition = + x > 0 + || xs.exists(_ > 0) + || xs.isEmpty +``` +Previously, these expressions would have been rejected, since the compiler's semicolon inference +would have treated the continuations `++ " world"` or `|| xs.isEmpty` as separate statements. + +To make this syntax work, the rules are modified to not infer semicolons in front of leading infix operators. +A _leading infix operator_ is + - a symbolic identifier such as `+`, or `approx_==`, or an identifier in backticks, + - that starts a new line, + - that precedes a token on the same line that can start an expression, + - and that is immediately followed by at least one space character `' '`. + +Example: + +```scala + freezing + | boiling +``` +This is recognized as a single infix operation. Compare with: +```scala + freezing + !boiling +``` +This is seen as two statements, `freezing` and `!boiling`. The difference is that only the operator in the first example +is followed by a space. + +Another example: +```scala + println("hello") + ??? + ??? match { case 0 => 1 } +``` +This code is recognized as three different statements. `???` is syntactically a symbolic identifier, but +neither of its occurrences is followed by a space and a token that can start an expression. diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/overload-resolution.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/overload-resolution.md new file mode 100644 index 000000000000..557f9de18bc8 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/overload-resolution.md @@ -0,0 +1,84 @@ +--- +layout: doc-page +title: "Changes in Overload Resolution" +--- + +Overload resolution in Dotty improves on Scala 2 in two ways. +First, it takes all argument lists into account instead of +just the first argument list. +Second, it can infer parameter types of function values even if they +are in the first argument list. + +## Looking Beyond the First Argument List + +Overloading resolution now can take argument lists into account when +choosing among a set of overloaded alternatives. +For example, the following code compiles in Dotty, while it results in an +ambiguous overload error in Scala2: + +```scala +def f(x: Int)(y: String): Int = 0 +def f(x: Int)(y: Int): Int = 0 + +f(3)("") // ok +``` + +The following code compiles as well: + +```scala +def g(x: Int)(y: Int)(z: Int): Int = 0 +def g(x: Int)(y: Int)(z: String): Int = 0 + +g(2)(3)(4) // ok +g(2)(3)("") // ok +``` + +To make this work, the rules for overloading resolution in section 6.23.3 of the SLS are augmented +as follows: + +> In a situation where a function is applied to more than one argument list, if overloading +resolution yields several competing alternatives when `n >= 1` parameter lists are taken +into account, then resolution re-tried using `n + 1` argument lists. + +This change is motivated by the new language feature [extension +methods](../contextual/extension-methods.md), where emerges the need to do +overload resolution based on additional argument blocks. + +## Parameter Types of Function Values + +The handling of function values with missing parameter types has been improved. We can now +pass such values in the first argument list of an overloaded application, provided +that the remaining parameters suffice for picking a variant of the overloaded function. +For example, the following code compiles in Dotty, while it results in an +missing parameter type error in Scala2: +```scala +def f(x: Int, f: Int => Int) = f(x) +def f(x: String, f: String => String) = f(x) +f("a", _.length) +``` +To make this work, the rules for overloading resolution in section 6.23.3 of the SLS are modified +as follows: + +Replace the sentence + +> Otherwise, let `S1,…,Sm` be the vector of types obtained by typing each argument with an undefined expected type. + +with the following paragraph: + +> Otherwise, let `S1,…,Sm` be the vector of known types of all argument types, where the _known type_ of an argument `E` +is determined as followed: + + - If `E` is a function value `(p_1, ..., p_n) => B` that misses some parameter types, the known type + of `E` is `(S_1, ..., S_n) => ?`, where each `S_i` is the type of parameter `p_i` if it is given, or `?` + otherwise. Here `?` stands for a _wildcard type_ that is compatible with every other type. + - Otherwise the known type of `E` is the result of typing `E` with an undefined expected type. + +A pattern matching closure +```scala +{ case P1 => B1 ... case P_n => B_n } +```` +is treated as if it was expanded to the function value +```scala +x => x match { case P1 => B1 ... case P_n => B_n } +``` +and is therefore also approximated with a `? => ?` type. diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/pattern-bindings.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/pattern-bindings.md new file mode 100644 index 000000000000..1a585442e404 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/pattern-bindings.md @@ -0,0 +1,61 @@ +--- +layout: doc-page +title: "Pattern Bindings" +--- + +In Scala 2, pattern bindings in `val` definitions and `for` expressions are +loosely typed. Potentially failing matches are still accepted at compile-time, +but may influence the program's runtime behavior. +From Scala 3.1 on, type checking rules will be tightened so that errors are reported at compile-time instead. + +## Bindings in Pattern Definitions + +```scala +val xs: List[Any] = List(1, 2, 3) +val (x: String) :: _ = xs // error: pattern's type String is more specialized + // than the right hand side expression's type Any +``` +This code gives a compile-time error in Scala 3.1 (and also in Scala 3.0 under the `-strict` setting) whereas it will fail at runtime with a `ClassCastException` in Scala 2. In Scala 3.1, a pattern binding is only allowed if the pattern is _irrefutable_, that is, if the right-hand side's type conforms to the pattern's type. For instance, the following is OK: +```scala + val pair = (1, true) + val (x, y) = pair +``` +Sometimes one wants to decompose data anyway, even though the pattern is refutable. For instance, if at some point one knows that a list `elems` is non-empty one might +want to decompose it like this: +```scala +val first :: rest = elems // error +``` +This works in Scala 2. In fact it is a typical use case for Scala 2's rules. But in Scala 3.1 it will give a type error. One can avoid the error by marking the pattern with an @unchecked annotation: +```scala +val first :: rest : @unchecked = elems // OK +``` +This will make the compiler accept the pattern binding. It might give an error at runtime instead, if the underlying assumption that `elems` can never be empty is wrong. + +## Pattern Bindings in For Expressions + +Analogous changes apply to patterns in `for` expressions. For instance: + +```scala +val elems: List[Any] = List((1, 2), "hello", (3, 4)) +for ((x, y) <- elems) yield (y, x) // error: pattern's type (Any, Any) is more specialized + // than the right hand side expression's type Any +``` +This code gives a compile-time error in Scala 3.1 whereas in Scala 2 the list `elems` +is filtered to retain only the elements of tuple type that match the pattern `(x, y)`. +The filtering functionality can be obtained in Scala 3 by prefixing the pattern with `case`: +```scala + for (case (x, y) <- elems) yield (y, x) // returns List((2, 1), (4, 3)) +``` + +## Syntax Changes + +There are two syntax changes relative to Scala 2: First, pattern definitions can carry ascriptions such as `: @unchecked`. Second, generators in for expressions may be prefixed with `case`. +``` +PatDef ::= ids [‘:’ Type] ‘=’ Expr + | Pattern2 [‘:’ Type | Ascription] ‘=’ Expr +Generator ::= [‘case’] Pattern1 ‘<-’ Expr +``` + +## Migration + +The new syntax is supported in Dotty and Scala 3.0. However, to enable smooth cross compilation between Scala 2 and Scala 3, the changed behavior and additional type checks are only enabled under the `-strict` setting. They will be enabled by default in version 3.1 of the language. diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/pattern-matching.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/pattern-matching.md new file mode 100644 index 000000000000..6f88fd06348d --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/pattern-matching.md @@ -0,0 +1,243 @@ +--- +layout: doc-page +title: "Option-less pattern matching" +--- + +Dotty implementation of pattern matching was greatly simplified compared to scalac. From a user perspective, this means that Dotty generated patterns are a *lot* easier to debug, as variables all show up in debug modes and positions are correctly preserved. + +Dotty supports a superset of scalac's [extractors](https://www.scala-lang.org/files/archive/spec/2.13/08-pattern-matching.html#extractor-patterns). + +## Extractors + +Extractors are objects that expose a method `unapply` or `unapplySeq`: + +```Scala +def unapply[A](x: T)(implicit x: B): U +def unapplySeq[A](x: T)(implicit x: B): U +``` + +Extractors expose the method `unapply` are called fixed-arity extractors, which +work with patterns of fixed arity. Extractors expose the method `unapplySeq` are +called variadic extractors, which enables variadic patterns. + +### Fixed-Arity Extractors + +Fixed-arity extractors expose the following signature: + +```Scala +def unapply[A](x: T)(implicit x: B): U +``` + +The type `U` conforms to one of the following matches: + +- Boolean match +- Product match + +Or `U` conforms to the type `R`: + +```Scala +type R = { + def isEmpty: Boolean + def get: S +} +``` + +and `S` conforms to one of the following matches: + +- single match +- name-based match + +The former form of `unapply` has higher precedence, and _single match_ has higher +precedence over _name-based match_. + +A usage of a fixed-arity extractor is irrefutable if one of the following condition holds: + +- `U = true` +- the extractor is used as a product match +- `U = Some[T]` (for Scala2 compatibility) +- `U <: R` and `U <: { def isEmpty: false }` + +### Variadic Extractors + +Variadic extractors expose the following signature: + +```Scala +def unapplySeq[A](x: T)(implicit x: B): U +``` + +The type `U` conforms to one of the following matches: + +- sequence match +- product-sequence match + +Or `U` conforms to the type `R`: + +```Scala +type R = { + def isEmpty: Boolean + def get: S +} +``` + +and `S` conforms to one of the two matches above. + +The former form of `unapplySeq` has higher priority, and _sequence match_ has higher +precedence over _product-sequence match_. + +A usage of a variadic extractor is irrefutable if one of the following condition holds: + +- the extractor is used directly as a sequence match or product-sequence match +- `U = Some[T]` (for Scala2 compatibility) +- `U <: R` and `U <: { def isEmpty: false }` + +## Boolean Match + +- `U =:= Boolean` +- Pattern-matching on exactly `0` patterns + +For example: + + + +```scala +object Even { + def unapply(s: String): Boolean = s.size % 2 == 0 +} + +"even" match { + case s @ Even() => println(s"$s has an even number of characters") + case s => println(s"$s has an odd number of characters") +} +// even has an even number of characters +``` + +## Product Match + +- `U <: Product` +- `N > 0` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1` ... `_N: PN` members in `U` +- Pattern-matching on exactly `N` patterns with types `P1, P2, ..., PN` + +For example: + + + +```scala +class FirstChars(s: String) extends Product { + def _1 = s.charAt(0) + def _2 = s.charAt(1) + + // Not used by pattern matching: Product is only used as a marker trait. + def canEqual(that: Any): Boolean = ??? + def productArity: Int = ??? + def productElement(n: Int): Any = ??? +} + +object FirstChars { + def unapply(s: String): FirstChars = new FirstChars(s) +} + +"Hi!" match { + case FirstChars(char1, char2) => + println(s"First: $char1; Second: $char2") +} +// First: H; Second: i +``` + +## Single Match + +- If there is exactly `1` pattern, pattern-matching on `1` pattern with type `U` + + + +```scala +class Nat(val x: Int) { + def get: Int = x + def isEmpty = x < 0 +} + +object Nat { + def unapply(x: Int): Nat = new Nat(x) +} + +5 match { + case Nat(n) => println(s"$n is a natural number") + case _ => () +} +// 5 is a natural number +``` + +## Name-based Match + +- `N > 1` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1 ... _N: PN` members in `U` +- Pattern-matching on exactly `N` patterns with types `P1, P2, ..., PN` + +```Scala +object ProdEmpty { + def _1: Int = ??? + def _2: String = ??? + def isEmpty = true + def unapply(s: String): this.type = this + def get = this +} + +"" match { + case ProdEmpty(_, _) => ??? + case _ => () +} +``` + + +## Sequence Match + +- `U <: X`, `T2` and `T3` conform to `T1` + +```Scala +type X = { + def lengthCompare(len: Int): Int // or, `def length: Int` + def apply(i: Int): T1 + def drop(n: Int): scala.Seq[T2] + def toSeq: scala.Seq[T3] +} +``` + +- Pattern-matching on _exactly_ `N` simple patterns with types `T1, T1, ..., T1`, where `N` is the runtime size of the sequence, or +- Pattern-matching on `>= N` simple patterns and _a vararg pattern_ (e.g., `xs: _*`) with types `T1, T1, ..., T1, Seq[T1]`, where `N` is the minimum size of the sequence. + + + +```scala +object CharList { + def unapplySeq(s: String): Option[Seq[Char]] = Some(s.toList) +} + +"example" match { + case CharList(c1, c2, c3, c4, _, _, _) => + println(s"$c1,$c2,$c3,$c4") + case _ => + println("Expected *exactly* 7 characters!") +} +// e,x,a,m +``` + +## Product-Sequence Match + +- `U <: Product` +- `N > 0` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1` ... `_N: PN` members in `U` +- `PN` conforms to the signature `X` defined in Seq Pattern +- Pattern-matching on exactly `>= N` patterns, the first `N - 1` patterns have types `P1, P2, ... P(N-1)`, + the type of the remaining patterns are determined as in Seq Pattern. + +```Scala +class Foo(val name: String, val children: Int *) +object Foo { + def unapplySeq(f: Foo): Option[(String, Seq[Int])] = Some((f.name, f.children)) +} + +def foo(f: Foo) = f match { + case Foo(name, ns : _*) => + case Foo(name, x, y, ns : _*) => +} +``` + +There are plans for further simplification, in particular to factor out *product +match* and *name-based match* into a single type of extractor. \ No newline at end of file diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/structural-types-spec.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/structural-types-spec.md new file mode 100644 index 000000000000..f9951df9368a --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/structural-types-spec.md @@ -0,0 +1,94 @@ +--- +layout: doc-page +title: "Programmatic Structural Types - More Details" +--- + +## Syntax + +``` +SimpleType ::= ... | Refinement +Refinement ::= ‘{’ RefineStatSeq ‘}’ +RefineStatSeq ::= RefineStat {semi RefineStat} +RefineStat ::= ‘val’ VarDcl | ‘def’ DefDcl | ‘type’ {nl} TypeDcl +``` + +## Implementation of structural types + +The standard library defines a universal marker trait `Selectable` in the package `scala`: + +```scala +trait Selectable extends Any +``` + +An implementation of `Selectable` that relies on Java reflection is +available in the standard library: `scala.reflect.Selectable`. Other +implementations can be envisioned for platforms where Java reflection +is not available. + +Implementations of `Selectable` have to make available one or both of +the methods `selectDynamic` and `applyDynamic`. The methods could be members of the `Selectable` implementation or they could be extension methods. + +The `selectDynamic` method takes a field name and returns the value associated with that name in the `Selectable`. +It should have a signature of the form: +```scala +def selectDynamic(name: String): T +``` +Often, the return type `T` is `Any`. + +The `applyDynamic` method is used for selections that are applied to arguments. It takes a method name and possibly `ClassTag`s representing its parameters types as well as the arguments to pass to the function. +Its signature should be of one of the two following forms: +```scala +def applyDynamic(name: String)(args: Any*): T +def applyDynamic(name: String, ctags: ClassTag[?]*)(args: Any*): T +``` +Both versions are passed the actual arguments in the `args` parameter. The second version takes in addition a vararg argument of class tags that identify the method's parameter classes. Such an argument is needed +if `applyDynamic` is implemented using Java reflection, but it could be +useful in other cases as well. `selectDynamic` and `applyDynamic` can also take additional context parameters in using clauses. These are resolved in the normal way at the callsite. + +Given a value `v` of type `C { Rs }`, where `C` is a class reference +and `Rs` are structural refinement declarations, and given `v.a` of type `U`, we consider three distinct cases: + +- If `U` is a value type, we map `v.a` to: + ```scala + v.selectDynamic("a").asInstanceOf[U] + ``` + +- If `U` is a method type `(T11, ..., T1n)...(TN1, ..., TNn): R` and it is not a dependent method type, we map `v.a(a11, ..., a1n)...(aN1, ..., aNn)` to: + ```scala + v.applyDynamic("a")(a11, ..., a1n, ..., aN1, ..., aNn) + .asInstanceOf[R] + ``` + If this call resolves to an `applyDynamic` method of the second form that takes a `ClassTag[?]*` argument, we further rewrite this call to + ```scala + v.applyDynamic("a", CT11, ..., CT1n, ..., CTN1, ... CTNn)( + a11, ..., a1n, ..., aN1, ..., aNn) + .asInstanceOf[R] + ``` + where each `CT_ij` is the class tag of the type of the formal parameter `Tij` + +- If `U` is neither a value nor a method type, or a dependent method + type, an error is emitted. + +Note that `v`'s static type does not necessarily have to conform to `Selectable`, nor does it need to have `selectDynamic` and `applyDynamic` as members. It suffices that there is an implicit +conversion that can turn `v` into a `Selectable`, and the selection methods could also be available as extension methods. + +## Limitations of structural types + +- Dependent methods cannot be called via structural call. +- Overloaded methods cannot be called via structural call. +- Refinements do not handle polymorphic methods. + +## Differences with Scala 2 structural types + +- Scala 2 supports structural types by means of Java reflection. Unlike + Scala 3, structural calls do not rely on a mechanism such as + `Selectable`, and reflection cannot be avoided. +- In Scala 2, structural calls to overloaded methods are possible. +- In Scala 2, mutable `var`s are allowed in refinements. In Scala 3, + they are no longer allowed. + + +## Context + +For more info, see [Rethink Structural +Types](https://github.com/lampepfl/dotty/issues/1886). diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/structural-types.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/structural-types.md new file mode 100644 index 000000000000..b7879abe703f --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/structural-types.md @@ -0,0 +1,182 @@ +--- +layout: doc-page +title: "Programmatic Structural Types" +--- + +## Motivation + +Some usecases, such as modelling database access, are more awkward in +statically typed languages than in dynamically typed languages: With +dynamically typed languages, it's quite natural to model a row as a +record or object, and to select entries with simple dot notation (e.g. +`row.columnName`). + +Achieving the same experience in statically typed +language requires defining a class for every possible row arising from +database manipulation (including rows arising from joins and +projections) and setting up a scheme to map between a row and the +class representing it. + +This requires a large amount of boilerplate, which leads developers to +trade the advantages of static typing for simpler schemes where colum +names are represented as strings and passed to other operators (e.g. +`row.select("columnName")`). This approach forgoes the advantages of +static typing, and is still not as natural as the dynamically typed +version. + +Structural types help in situations where we would like to support +simple dot notation in dynamic contexts without losing the advantages +of static typing. They allow developers to use dot notation and +configure how fields and methods should be resolved. + +## Example + +Here's an example of a structural type `Person`: +```scala + class Record(elems: (String, Any)*) extends Selectable { + private val fields = elems.toMap + def selectDynamic(name: String): Any = fields(name) + } + type Person = Record { + val name: String + val age: Int + } +``` +The person type adds a _refinement_ to its parent type `Record` that defines `name` and `age` fields. We say the refinement is _structural_ since `name` and `age` are not defined in the parent type. But they exist nevertheless as members of class `Person`. For instance, the following +program would print "Emma is 42 years old.": +```scala + val person = Record("name" -> "Emma", "age" -> 42).asInstanceOf[Person] + println(s"${person.name} is ${person.age} years old.") +``` +The parent type `Record` in this example is a generic class that can represent arbitrary records in its `elems` argument. This argument is a +sequence of pairs of labels of type `String` and values of type `Any`. +When we create a `Person` as a `Record` we have to assert with a typecast +that the record defines the right fields of the right types. `Record` +itself is too weakly typed so the compiler cannot know this without +help from the user. In practice, the connection between a structural type +and its underlying generic representation would most likely be done by +a database layer, and therefore would not be a concern of the end user. + +`Record` extends the marker trait `scala.Selectable` and defines +a method `selectDynamic`, which maps a field name to its value. +Selecting a structural type member is done by calling this method. +The `person.name` and `person.age` selections are translated by +the Scala compiler to: +```scala + person.selectDynamic("name").asInstanceOf[String] + person.selectDynamic("age").asInstanceOf[Int] +``` + +Besides `selectDynamic`, a `Selectable` class sometimes also defines a method `applyDynamic`. This can then be used to translate function calls of structural members. So, if `a` is an instance of `Selectable`, a structural call like `a.f(b, c)` would translate to +```scala + a.applyDynamic("f")(b, c) +``` + +## Using Java Reflection + +Structural types can also be accessed using Java reflection. Example: +```scala + type Closeable = { + def close(): Unit + } + class FileInputStream { + def close(): Unit + } + class Channel { + def close(): Unit + } +``` +Here, we define a structural type `Closeable` that defines a `close` method. There are various classes that have `close` methods, we just list `FileInputStream` and `Channel` as two examples. It would be easiest if the two classes shared a common interface that factors out the `close` method. But such factorings are often not possible if different libraries are combined in one application. Yet, we can still have methods that work on +all classes with a `close` method by using the `Closeable` type. For instance, +```scala + import scala.reflect.Selectable.reflectiveSelectable + + def autoClose(f: Closeable)(op: Closeable => Unit): Unit = + try op(f) finally f.close() +``` +The call `f.close()` has to use Java reflection to identify and call the `close` method in the receiver `f`. This needs to be enabled by an import +of `reflectiveSelectable` shown above. What happens "under the hood" is then the following: + + - The import makes available an implicit conversion that turns any type into a + `Selectable`. `f` is wrapped in this conversion. + + - The compiler then transforms the `close` call on the wrapped `f` + to an `applyDynamic` call. The end result is: + + ```scala + reflectiveSelectable(f).applyDynamic("close")() + ``` + - The implementation of `applyDynamic` in `reflectiveSelectable`'s result +uses Java reflection to find and call a method `close` with zero parameters in the value referenced by `f` at runtime. + +Structural calls like this tend to be much slower than normal method calls. The mandatory import of `reflectiveSelectable` serves as a signpost that something inefficient is going on. + +**Note:** In Scala 2, Java reflection is the only mechanism available for structural types and it is automatically enabled without needing the +`reflectiveSelectable` conversion. However, to warn against inefficient +dispatch, Scala 2 requires a language import `import scala.language.reflectiveCalls`. + +Before resorting to structural calls with Java reflection one should consider alternatives. For instance, sometimes a more a modular _and_ efficient architecture can be obtained using typeclasses. + +## Extensibility + +New instances of `Selectable` can be defined to support means of +access other than Java reflection, which would enable usages such as +the database access example given at the beginning of this document. + +## Local Selectable Instances + +Local and anonymous classes that extend `Selectable` get more refined types +than other classes. Here is an example: +```scala +trait Vehicle extends reflect.Selectable { + val wheels: Int +} +val i3 = new Vehicle { // i3: Vehicle { val range: Int } + val wheels = 4 + val range = 240 +} +i3.range +``` +The type of `i3` in this example is `Vehicle { val range: Int }`. Hence, +`i3.range` is well-formed. Since the base class `Vehicle` does not define a `range` field or method, we need structural dispatch to access the `range` field of the anonymous class that initializes `id3`. Structural dispatch +is implemented by the base trait `reflect.Selectable` of `Vehicle`, which +defines the necessary `selectDynamic` member. + +`Vehicle` could also extend some other subclass of `scala.Selectable` that implements `selectDynamic` and `applyDynamic` differently. But if it does not extend a `Selectable` at all, the code would no longer typecheck: +```scala +class Vehicle { + val wheels: Int +} +val i3 = new Vehicle { // i3: Vehicle + val wheels = 4 + val range = 240 +} +i3.range: // error: range is not a member of `Vehicle` +``` +The difference is that the type of an anonymous class that does not extend `Selectable` is just formed from the parent type(s) of the class, without +adding any refinements. Hence, `i3` now has just type `Vehicle` and the selection `i3.range` gives a "member not found" error. + +Note that in Scala 2 all local and anonymous classes could produce values with refined types. But +members defined by such refinements could be selected only with the language import +`reflectiveCalls`. + +## Relation with `scala.Dynamic` + +There are clearly some connections with `scala.Dynamic` here, since +both select members programmatically. But there are also some +differences. + +- Fully dynamic selection is not typesafe, but structural selection + is, as long as the correspondence of the structural type with the + underlying value is as stated. + +- `Dynamic` is just a marker trait, which gives more leeway where and + how to define reflective access operations. By contrast + `Selectable` is a trait which declares the access operations. + +- Two access operations, `selectDynamic` and `applyDynamic` are shared + between both approaches. In `Selectable`, `applyDynamic` also takes + `ClassTag` indicating the method's formal parameter types. `Dynamic` + comes with `updateDynamic`. + +[More details](structural-types-spec.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/type-checking.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/type-checking.md new file mode 100644 index 000000000000..b167a37c44cc --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/type-checking.md @@ -0,0 +1,6 @@ +--- +layout: doc-page +title: "Changes in Type Checking" +--- + +[//]: # todo: fill in diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/type-inference.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/type-inference.md new file mode 100644 index 000000000000..88c361cf09d5 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/type-inference.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Changes in Type Inference" +--- + +[//]: # todo: fill in + diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/vararg-patterns.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/vararg-patterns.md new file mode 100644 index 000000000000..6dc3946e71f7 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/vararg-patterns.md @@ -0,0 +1,43 @@ +--- +layout: doc-page +title: "Vararg Patterns" +--- + +The syntax of vararg patterns has changed. In the new syntax one writes varargs in patterns exactly +like one writes them in expressions, using a `: _*` type annotation: + +```scala +xs match { + case List(1, 2, xs: _*) => println(xs) // binds xs + case List(1, _ : _*) => // wildcard pattern +} +``` + +The old syntax, which is shorter but less regular, is no longer supported. + +```scala +/*!*/ case List(1, 2, xs @ _*) // syntax error +/*!*/ case List(1, 2, _*) => ... // syntax error +``` + +The change to the grammar is: + +```diff + SimplePattern ::= ‘_’ + | varid + | Literal + | StableId + | StableId ‘(’ [Patterns ‘)’ +- | StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’ ++ | StableId ‘(’ [Patterns ‘,’] (varid | ‘_’) ‘:’ ‘_’ ‘*’ ‘)’ + | ‘(’ [Patterns] ‘)’ + | XmlPattern +``` + +## Compatibility considerations + +To enable smooth cross compilation between Scala 2 and Scala 3, Dotty will +accept both the old and the new syntax. Under the `-strict` setting, an error +will be emitted when the old syntax is encountered. They will be enabled by +default in version 3.1 of the language. + diff --git a/scala3doc/dotty-docs/docs/docs/reference/changed-features/wildcards.md b/scala3doc/dotty-docs/docs/docs/reference/changed-features/wildcards.md new file mode 100644 index 000000000000..882941b0d549 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/changed-features/wildcards.md @@ -0,0 +1,43 @@ +--- +layout: doc-page +title: Wildcard Arguments in Types +--- + +The syntax of wildcard arguments in types has changed from `_` to `?`. Example: +```scala +List[?] +Map[? <: AnyRef, ? >: Null] +``` + +### Motivation + +We would like to use the underscore syntax `_` to stand for an anonymous type parameter, aligning it with its meaning in +value parameter lists. So, just as `f(_)` is a shorthand for the lambda `x => f(x)`, in the future `C[_]` will be a shorthand +for the type lambda `[X] =>> C[X]`. This makes higher-kinded types easier to use. It also removes the wart that, used as a type +parameter, `F[_]` means `F` is a type constructor whereas used as a type, `F[_]` means it is a wildcard (i.e. existential) type. +In the future, `F[_]` will mean the same thing, no matter where it is used. + +We pick `?` as a replacement syntax for wildcard types, since it aligns with Java's syntax. + +### Migration Strategy + +The migration to the new scheme is complicated, in particular since the [kind projector](https://github.com/typelevel/kind-projector) +compiler plugin still uses the reverse convention, with `?` meaning parameter placeholder instead of wildcard. Fortunately, kind projector has added `*` as an alternative syntax for `?`. + +A step-by-step migration is made possible with the following measures: + + 1. In Scala 3.0, both `_` and `?` are legal names for wildcards. + 2. In Scala 3.1, `_` is deprecated in favor of `?` as a name for a wildcard. A `-rewrite` option is + available to rewrite one to the other. + 3. In Scala 3.2, the meaning of `_` changes from wildcard to placeholder for type parameter. + 4. The Scala 3.1 behavior is already available today under the `-strict` setting. + +To smooth the transition for codebases that use kind-projector, we adopt the following measures under the command line +option `-Ykind-projector`: + + 1. In Scala 3.0, `*` is available as a type parameter placeholder. + 2. In Scala 3.2, `*` is deprecated in favor of `_`. A `-rewrite` option is + available to rewrite one to the other. + 3. In Scala 3.3, `*` is removed again, and all type parameter placeholders will be expressed with `_`. + +These rules make it possible to cross build between Scala 2 using the kind projector plugin and Scala 3.0 - 3.2 using option `-Ykind-projector`. diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/by-name-context-parameters.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/by-name-context-parameters.md new file mode 100644 index 000000000000..a259cea88ae5 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/by-name-context-parameters.md @@ -0,0 +1,67 @@ +--- +layout: doc-page +title: "By-Name Context Parameters" +--- + +Context parameters can be declared by-name to avoid a divergent inferred expansion. Example: + +```scala +trait Codec[T] { + def write(x: T): Unit +} + +given intCodec as Codec[Int] = ??? + +given optionCodec[T](using ev: => Codec[T]) as Codec[Option[T]] { + def write(xo: Option[T]) = xo match { + case Some(x) => ev.write(x) + case None => + } +} + +val s = summon[Codec[Option[Int]]] + +s.write(Some(33)) +s.write(None) +``` +As is the case for a normal by-name parameter, the argument for the context parameter `ev` +is evaluated on demand. In the example above, if the option value `x` is `None`, it is +not evaluated at all. + +The synthesized argument for a context parameter is backed by a local val +if this is necessary to prevent an otherwise diverging expansion. + +The precise steps for synthesizing an argument for a by-name context parameter of type `=> T` are as follows. + + 1. Create a new given of type `T`: + + ```scala + given lv as T = ??? + ``` + where `lv` is an arbitrary fresh name. + + 1. This given is not immediately available as candidate for argument inference (making it immediately available could result in a loop in the synthesized computation). But it becomes available in all nested contexts that look again for an argument to a by-name context parameter. + + 1. If this search succeeds with expression `E`, and `E` contains references to `lv`, replace `E` by + + + ```scala + { given lv as T = E; lv } + ``` + + Otherwise, return `E` unchanged. + +In the example above, the definition of `s` would be expanded as follows. + +```scala +val s = summon[Test.Codec[Option[Int]]]( + optionCodec[Int](using intCodec) +) +``` + +No local given instance was generated because the synthesized argument is not recursive. + +### Reference + +For more info, see [Issue #1998](https://github.com/lampepfl/dotty/issues/1998) +and the associated [Scala SIP](https://docs.scala-lang.org/sips/byname-implicits.html). diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/context-bounds.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/context-bounds.md new file mode 100644 index 000000000000..0b85f1a3a0fc --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/context-bounds.md @@ -0,0 +1,45 @@ +--- +layout: doc-page +title: "Context Bounds" +--- + +A context bound is a shorthand for expressing the common pattern of a context parameter that depends on a type parameter. Using a context bound, the `maximum` function of the last section can be written like this: + +```scala +def maximum[T: Ord](xs: List[T]): T = xs.reduceLeft(max) +``` + +A bound like `: Ord` on a type parameter `T` of a method or class indicates a context parameter `with Ord[T]`. The context parameter(s) generated from context bounds come last in the definition of the containing method or class. E.g., + +```scala +def f[T: C1 : C2, U: C3](x: T)(using y: U, z: V): R +``` + +would expand to + +```scala +def f[T, U](x: T)(using y: U, z: V)(using C1[T], C2[T], C3[U]): R +``` + +Context bounds can be combined with subtype bounds. If both are present, subtype bounds come first, e.g. + +```scala +def g[T <: B : C](x: T): R = ... +``` + +## Migration + +To ease migration, context bounds in Dotty map in Scala 3.0 to old-style implicit parameters +for which arguments can be passed either with a `(using ...)` clause or with a normal application. From Scala 3.1 on, they will map to context parameters instead, as is described above. + +If the source version is `3.1` and the `-migration` command-line option is set, any pairing of an evidence +context parameter stemming from a context bound with a normal argument will give a migration +warning. The warning indicates that a `(using ...)` clause is needed instead. The rewrite can be +done automatically under `-rewrite`. + +## Syntax + +``` +TypeParamBounds ::= [SubtypeBounds] {ContextBound} +ContextBound ::= ‘:’ Type +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/context-functions-spec.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/context-functions-spec.md new file mode 100644 index 000000000000..39006116907c --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/context-functions-spec.md @@ -0,0 +1,74 @@ +--- +layout: doc-page +title: "Context Functions - More Details" +--- + +## Syntax + + Type ::= ... + | FunArgTypes ‘?=>’ Type + Expr ::= ... + | FunParams ‘?=>’ Expr + +Context function types associate to the right, e.g. +`S ?=> T ?=> U` is the same as `S ?=> (T ?=> U)`. + +## Implementation + +Context function types are shorthands for class types that define `apply` +methods with context parameters. Specifically, the `N`-ary function type +`T1, ..., TN => R` is a shorthand for the class type +`ContextFunctionN[T1 , ... , TN, R]`. Such class types are assumed to have the following definitions, for any value of `N >= 1`: +```scala +package scala +trait ContextFunctionN[-T1 , ... , -TN, +R] { + def apply(using x1: T1 , ... , xN: TN): R +} +``` +Context function types erase to normal function types, so these classes are +generated on the fly for typechecking, but not realized in actual code. + +Context function literals `(using x1: T1, ..., xn: Tn) => e` map +context parameters `xi` of types `Ti` to the result of evaluating the expression `e`. +The scope of each context parameter `xi` is `e`. The parameters must have pairwise distinct names. + +If the expected type of the context function literal is of the form +`scala.ContextFunctionN[S1, ..., Sn, R]`, the expected type of `e` is `R` and +the type `Ti` of any of the parameters `xi` can be omitted, in which case `Ti += Si` is assumed. If the expected type of the context function literal is +some other type, all context parameter types must be explicitly given, and the expected type of `e` is undefined. +The type of the context function literal is `scala.ContextFunctionN[S1, ...,Sn, T]`, where `T` is the widened +type of `e`. `T` must be equivalent to a type which does not refer to any of +the context parameters `xi`. + +The context function literal is evaluated as the instance creation +expression +```scala +new scala.ContextFunctionN[T1, ..., Tn, T] { + def apply(using x1: T1, ..., xn: Tn): T = e +} +``` +A context parameter may also be a wildcard represented by an underscore `_`. In that case, a fresh name for the parameter is chosen arbitrarily. + +Note: The closing paragraph of the +[Anonymous Functions section](https://www.scala-lang.org/files/archive/spec/2.12/06-expressions.html#anonymous-functions) +of Scala 2.12 is subsumed by context function types and should be removed. + +Context function literals `(using x1: T1, ..., xn: Tn) => e` are +automatically created for any expression `e` whose expected type is +`scala.ContextFunctionN[T1, ..., Tn, R]`, unless `e` is +itself a context function literal. This is analogous to the automatic +insertion of `scala.Function0` around expressions in by-name argument position. + +Context function types generalize to `N > 22` in the same way that function types do, see [the corresponding +documentation](../dropped-features/limit22.md). + +## Examples + +See the section on Expressiveness from [Simplicitly: foundations and +applications of implicit function +types](https://dl.acm.org/citation.cfm?id=3158130). + +### Type Checking + +After desugaring no additional typing rules are required for context function types. diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/context-functions.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/context-functions.md new file mode 100644 index 000000000000..21e2dcf39fa3 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/context-functions.md @@ -0,0 +1,158 @@ +--- +layout: doc-page +title: "Context Functions" +--- + +_Context functions_ are functions with (only) context parameters. +Their types are _context function types_. Here is an example of a context function type: + +```scala +type Executable[T] = ExecutionContext ?=> T +``` +Context functions are written using `?=>` as the "arrow" sign. +They are applied to synthesized arguments, in +the same way methods with context parameters are applied. For instance: +```scala + given ec as ExecutionContext = ... + + def f(x: Int): ExecutionContext ?=> Int = ... + + // could be written as follows with the type alias from above + // def f(x: Int): Executable[Int] = ... + + f(2)(using ec) // explicit argument + f(2) // argument is inferred +``` +Conversely, if the expected type of an expression `E` is a context function type +`(T_1, ..., T_n) ?=> U` and `E` is not already an +context function literal, `E` is converted to a context function literal by rewriting it to +```scala + (using x_1: T1, ..., x_n: Tn) => E +``` +where the names `x_1`, ..., `x_n` are arbitrary. This expansion is performed +before the expression `E` is typechecked, which means that `x_1`, ..., `x_n` +are available as givens in `E`. + +Like their types, context function literals are written using `?=>` as the arrow between parameters and results. They differ from normal function literals in that their types are context function types. + +For example, continuing with the previous definitions, +```scala + def g(arg: Executable[Int]) = ... + + g(22) // is expanded to g((using ev: ExecutionContext) => 22) + + g(f(2)) // is expanded to g((using ev: ExecutionContext) => f(2)(using ev)) + + g(ExecutionContext ?=> f(3)) // is expanded to g((using ev: ExecutionContext) => f(3)(using ev)) + g((using ctx: ExecutionContext) => f(22)(using ctx)) // is left as it is +``` + +### Example: Builder Pattern + +Context function types have considerable expressive power. For +instance, here is how they can support the "builder pattern", where +the aim is to construct tables like this: +```scala + table { + row { + cell("top left") + cell("top right") + } + row { + cell("bottom left") + cell("bottom right") + } + } +``` +The idea is to define classes for `Table` and `Row` that allow the +addition of elements via `add`: +```scala + class Table { + val rows = new ArrayBuffer[Row] + def add(r: Row): Unit = rows += r + override def toString = rows.mkString("Table(", ", ", ")") + } + + class Row { + val cells = new ArrayBuffer[Cell] + def add(c: Cell): Unit = cells += c + override def toString = cells.mkString("Row(", ", ", ")") + } + + case class Cell(elem: String) +``` +Then, the `table`, `row` and `cell` constructor methods can be defined +with context function types as parameters to avoid the plumbing boilerplate +that would otherwise be necessary. +```scala + def table(init: Table ?=> Unit) = { + given t as Table // note the use of a creator application; same as: given t as Table = new Table + init + t + } + + def row(init: Row ?=> Unit)(using t: Table) = { + given r as Row + init + t.add(r) + } + + def cell(str: String)(using r: Row) = + r.add(new Cell(str)) +``` +With that setup, the table construction code above compiles and expands to: +```scala + table { (using $t: Table) => + + row { (using $r: Row) => + cell("top left")(using $r) + cell("top right")(using $r) + }(using $t) + + row { (using $r: Row) => + cell("bottom left")(using $r) + cell("bottom right")(using $r) + }(using $t) + } +``` +### Example: Postconditions + +As a larger example, here is a way to define constructs for checking arbitrary postconditions using an extension method `ensuring` so that the checked result can be referred to simply by `result`. The example combines opaque type aliases, context function types, and extension methods to provide a zero-overhead abstraction. + +```scala +object PostConditions { + opaque type WrappedResult[T] = T + + def result[T](using r: WrappedResult[T]): T = r + + extension [T](x: T) def ensuring(condition: WrappedResult[T] ?=> Boolean): T = { + assert(condition(using x)) + x + } +} +import PostConditions.{ensuring, result} + +val s = List(1, 2, 3).sum.ensuring(result == 6) +``` +**Explanations**: We use a context function type `WrappedResult[T] ?=> Boolean` +as the type of the condition of `ensuring`. An argument to `ensuring` such as +`(result == 6)` will therefore have a given of type `WrappedResult[T]` in +scope to pass along to the `result` method. `WrappedResult` is a fresh type, to make sure +that we do not get unwanted givens in scope (this is good practice in all cases +where context parameters are involved). Since `WrappedResult` is an opaque type alias, its +values need not be boxed, and since `ensuring` is added as an extension method, its argument +does not need boxing either. Hence, the implementation of `ensuring` is as about as efficient +as the best possible code one could write by hand: + +```scala +{ val result = List(1, 2, 3).sum + assert(result == 6) + result +} +``` +### Reference + +For more info, see the [blog article](https://www.scala-lang.org/blog/2016/12/07/implicit-function-types.html), +(which uses a different syntax that has been superseded). + +[More details](./context-functions-spec.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/conversions.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/conversions.md new file mode 100644 index 000000000000..7faeb3aac0d5 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/conversions.md @@ -0,0 +1,75 @@ +--- +layout: doc-page +title: "Implicit Conversions" +--- + +Implicit conversions are defined by given instances of the `scala.Conversion` class. +This class is defined in package `scala` as follows: +```scala +abstract class Conversion[-T, +U] extends (T => U) +``` +For example, here is an implicit conversion from `String` to `Token`: +```scala +given Conversion[String, Token] { + def apply(str: String): Token = new KeyWord(str) +} +``` +Using an alias this can be expressed more concisely as: +```scala +given Conversion[String, Token] = new KeyWord(_) +``` +An implicit conversion is applied automatically by the compiler in three situations: + +1. If an expression `e` has type `T`, and `T` does not conform to the expression's expected type `S`. +2. In a selection `e.m` with `e` of type `T`, but `T` defines no member `m`. +3. In an application `e.m(args)` with `e` of type `T`, if `T` does define + some member(s) named `m`, but none of these members can be applied to the arguments `args`. + +In the first case, the compiler looks for a given `scala.Conversion` instance that maps +an argument of type `T` to type `S`. In the second and third +case, it looks for a given `scala.Conversion` instance that maps an argument of type `T` +to a type that defines a member `m` which can be applied to `args` if present. +If such an instance `C` is found, the expression `e` is replaced by `C.apply(e)`. + +## Examples + +1. The `Predef` package contains "auto-boxing" conversions that map +primitive number types to subclasses of `java.lang.Number`. For instance, the +conversion from `Int` to `java.lang.Integer` can be defined as follows: +```scala +given int2Integer as Conversion[Int, java.lang.Integer] = + java.lang.Integer.valueOf(_) +``` + +2. The "magnet" pattern is sometimes used to express many variants of a method. Instead of defining overloaded versions of the method, one can also let the method take one or more arguments of specially defined "magnet" types, into which various argument types can be converted. E.g. +```scala +object Completions { + + // The argument "magnet" type + enum CompletionArg { + case Error(s: String) + case Response(f: Future[HttpResponse]) + case Status(code: Future[StatusCode]) + } + object CompletionArg { + + // conversions defining the possible arguments to pass to `complete` + // these always come with CompletionArg + // They can be invoked explicitly, e.g. + // + // CompletionArg.fromStatusCode(statusCode) + + given fromString as Conversion[String, CompletionArg] = Error(_) + given fromFuture as Conversion[Future[HttpResponse], CompletionArg] = Response(_) + given fromStatusCode as Conversion[Future[StatusCode], CompletionArg] = Status(_) + } + import CompletionArg._ + + def complete[T](arg: CompletionArg) = arg match { + case Error(s) => ... + case Response(f) => ... + case Status(code) => ... + } +} +``` +This setup is more complicated than simple overloading of `complete`, but it can still be useful if normal overloading is not available (as in the case above, since we cannot have two overloaded methods that take `Future[...]` arguments), or if normal overloading would lead to a combinatorial explosion of variants. diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/delegates.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/delegates.md new file mode 100644 index 000000000000..ea5ffcfee205 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/delegates.md @@ -0,0 +1,6 @@ +--- +layout: doc-page +title: "Given Instances" +--- + +The contents of this page have [moved](./givens.md). diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/derivation-macro.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/derivation-macro.md new file mode 100644 index 000000000000..9a49aef73c34 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/derivation-macro.md @@ -0,0 +1,225 @@ +--- +layout: doc-page +title: How to write a type class `derived` method using macros +--- + +In the main [derivation](./derivation.md) documentation page, we explained the +details behind `Mirror`s and type class derivation. Here we demonstrate how to +implement a type class `derived` method using macros only. We follow the same +example of deriving `Eq` instances and for simplicity we support a `Product` +type e.g., a case class `Person`. The low-level method we will use to implement +the `derived` method exploits quotes, splices of both expressions and types and +the `scala.quoted.Expr.summon` method which is the equivalent of +`summonFrom`. The former is suitable for use in a quote context, used within +macros. + +As in the original code, the type class definition is the same: + +```scala +trait Eq[T] { + def eqv(x: T, y: T): Boolean +} +``` + +we need to implement a method `Eq.derived` on the companion object of `Eq` that +produces a quoted instance for `Eq[T]`. Here is a possible signature, + +```scala +given derived[T: Type](using qctx: QuoteContext) as Expr[Eq[T]] +``` + +and for comparison reasons we give the same signature we had with `inline`: + +```scala +inline given derived[T] as (m: Mirror.Of[T]) => Eq[T] = ??? +``` + +Note, that since a type is used in a subsequent stage it will need to be lifted +to a `Type` by using the corresponding context bound. Also, not that we can +summon the quoted `Mirror` inside the body of the `derived` this we can omit it +from the signature. The body of the `derived` method is shown below: + + +```scala +given derived[T: Type](using qctx: QuoteContext) as Expr[Eq[T]] = { + import qctx.tasty._ + + val ev: Expr[Mirror.Of[T]] = Expr.summon(using '[Mirror.Of[T]]).get + + ev match { + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = $elementTypes }} => + val elemInstances = summonAll(elementTypes) + val eqProductBody: (Expr[T], Expr[T]) => Expr[Boolean] = (x, y) => { + elemInstances.zipWithIndex.foldLeft(Expr(true: Boolean)) { + case (acc, (elem, index)) => + val e1 = '{$x.asInstanceOf[Product].productElement(${Expr(index)})} + val e2 = '{$y.asInstanceOf[Product].productElement(${Expr(index)})} + + '{ $acc && $elem.asInstanceOf[Eq[Any]].eqv($e1, $e2) } + } + } + '{ + eqProduct((x: T, y: T) => ${eqProductBody('x, 'y)}) + } + + // case for Mirror.ProductOf[T] + // ... + } +} +``` + +Note, that in the `inline` case we can merely write +`summonAll[m.MirroredElemTypes]` inside the inline method but here, since +`Expr.summon` is required, we can extract the element types in a macro fashion. +Being inside a macro, our first reaction would be to write the code below. Since +the path inside the type argument is not stable this cannot be used: + +```scala +'{ + summonAll[$m.MirroredElemTypes] +} +``` + +Instead we extract the tuple-type for element types using pattern matching over +quotes and more specifically of the refined type: + +```scala + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = $elementTypes } } => ... +``` + +The implementation of `summonAll` as a macro can be show below assuming that we +have the given instances for our primitive types: + +```scala + def summonAll[T](t: Type[T])(using qctx: QuoteContext): List[Expr[Eq[_]]] = t match { + case '[String *: $tpes] => '{ summon[Eq[String]] } :: summonAll(tpes) + case '[Int *: $tpes] => '{ summon[Eq[Int]] } :: summonAll(tpes) + case '[$tpe *: $tpes] => derived(using tpe, qctx) :: summonAll(tpes) + case '[EmptyTuple] => Nil + } +``` + +One additional difference with the body of `derived` here as opposed to the one +with `inline` is that with macros we need to synthesize the body of the code during the +macro-expansion time. That is the rationale behind the `eqProductBody` function. +Assuming that we calculate the equality of two `Person`s defined with a case +class that holds a name of type `String` and an age of type `Int`, the equality +check we want to generate is the following: + +```scala +true + && Eq[String].eqv(x.productElement(0),y.productElement(0)) + && Eq[Int].eqv(x.productElement(1), y.productElement(1)) +``` + +### Calling the derived method inside the macro + +Following the rules in [Macros](../metaprogramming/toc.md) we create two methods. +One that hosts the top-level splice `eqv` and one that is the implementation. +Alternatively and what is shown below is that we can call the `eqv` method +directly. The `eqGen` can trigger the derivation. + +```scala +extension [T](x: =>T) + inline def === (y: =>T)(using eq: Eq[T]): Boolean = eq.eqv(x, y) + +implicit inline def eqGen[T]: Eq[T] = ${ Eq.derived[T] } +``` + +Note, that we use inline method syntax and we can compare instance such as +`Sm(Person("Test", 23)) === Sm(Person("Test", 24))` for e.g., the following two +types: + +```scala +case class Person(name: String, age: Int) + +enum Opt[+T] { + case Sm(t: T) + case Nn +} +``` + +The full code is shown below: + +```scala +import scala.deriving._ +import scala.quoted._ +import scala.quoted.matching._ + +trait Eq[T] { + def eqv(x: T, y: T): Boolean +} + +object Eq { + given Eq[String] { + def eqv(x: String, y: String) = x == y + } + + given Eq[Int] { + def eqv(x: Int, y: Int) = x == y + } + + def eqProduct[T](body: (T, T) => Boolean): Eq[T] = + new Eq[T] { + def eqv(x: T, y: T): Boolean = body(x, y) + } + + def eqSum[T](body: (T, T) => Boolean): Eq[T] = + new Eq[T] { + def eqv(x: T, y: T): Boolean = body(x, y) + } + + def summonAll[T](t: Type[T])(using qctx: QuoteContext): List[Expr[Eq[_]]] = t match { + case '[String *: $tpes] => '{ summon[Eq[String]] } :: summonAll(tpes) + case '[Int *: $tpes] => '{ summon[Eq[Int]] } :: summonAll(tpes) + case '[$tpe *: $tpes] => derived(using tpe, qctx) :: summonAll(tpes) + case '[EmptyTuple] => Nil + } + + given derived[T: Type](using qctx: QuoteContext) as Expr[Eq[T]] = { + import qctx.tasty._ + + val ev: Expr[Mirror.Of[T]] = Expr.summon(using '[Mirror.Of[T]]).get + + ev match { + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = $elementTypes }} => + val elemInstances = summonAll(elementTypes) + val eqProductBody: (Expr[T], Expr[T]) => Expr[Boolean] = (x, y) => { + elemInstances.zipWithIndex.foldLeft(Expr(true: Boolean)) { + case (acc, (elem, index)) => + val e1 = '{$x.asInstanceOf[Product].productElement(${Expr(index)})} + val e2 = '{$y.asInstanceOf[Product].productElement(${Expr(index)})} + + '{ $acc && $elem.asInstanceOf[Eq[Any]].eqv($e1, $e2) } + } + } + '{ + eqProduct((x: T, y: T) => ${eqProductBody('x, 'y)}) + } + + case '{ $m: Mirror.SumOf[T] { type MirroredElemTypes = $elementTypes }} => + val elemInstances = summonAll(elementTypes) + val eqSumBody: (Expr[T], Expr[T]) => Expr[Boolean] = (x, y) => { + val ordx = '{ $m.ordinal($x) } + val ordy = '{ $m.ordinal($y) } + + val elements = Expr.ofList(elemInstances) + '{ + $ordx == $ordy && $elements($ordx).asInstanceOf[Eq[Any]].eqv($x, $y) + } + } + + '{ + eqSum((x: T, y: T) => ${eqSumBody('x, 'y)}) + } + } + } +} + +object Macro3 { + extension [T](x: =>T) + inline def === (y: =>T)(using eq: Eq[T]): Boolean = eq.eqv(x, y) + + implicit inline def eqGen[T]: Eq[T] = ${ Eq.derived[T] } +} +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/derivation.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/derivation.md new file mode 100644 index 000000000000..08860d6eaaa8 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/derivation.md @@ -0,0 +1,399 @@ +--- +layout: doc-page +title: Type Class Derivation +--- + +Type class derivation is a way to automatically generate given instances for type classes which satisfy some simple +conditions. A type class in this sense is any trait or class with a type parameter determining the type being operated +on. Common examples are `Eq`, `Ordering`, or `Show`. For example, given the following `Tree` algebraic data type +(ADT), + +```scala +enum Tree[T] derives Eq, Ordering, Show { + case Branch(left: Tree[T], right: Tree[T]) + case Leaf(elem: T) +} +``` + +The `derives` clause generates the following given instances for the `Eq`, `Ordering` and `Show` type classes in the +companion object of `Tree`, + +```scala +given [T: Eq] as Eq[Tree[T]] = Eq.derived +given [T: Ordering] as Ordering[Tree] = Ordering.derived +given [T: Show] as Show[Tree] = Show.derived +``` + +We say that `Tree` is the _deriving type_ and that the `Eq`, `Ordering` and `Show` instances are _derived instances_. + +### Types supporting `derives` clauses + +All data types can have a `derives` clause. This document focuses primarily on data types which also have a given instance +of the `Mirror` type class available. Instances of the `Mirror` type class are generated automatically by the compiler +for, + ++ enums and enum cases ++ case classes and case objects ++ sealed classes or traits that have only case classes and case objects as children + +`Mirror` type class instances provide information at the type level about the components and labelling of the type. +They also provide minimal term level infrastructure to allow higher level libraries to provide comprehensive +derivation support. + +```scala +sealed trait Mirror { + + /** the type being mirrored */ + type MirroredType + + /** the type of the elements of the mirrored type */ + type MirroredElemTypes + + /** The mirrored *-type */ + type MirroredMonoType + + /** The name of the type */ + type MirroredLabel <: String + + /** The names of the elements of the type */ + type MirroredElemLabels <: Tuple +} + +object Mirror { + /** The Mirror for a product type */ + trait Product extends Mirror { + + /** Create a new instance of type `T` with elements taken from product `p`. */ + def fromProduct(p: scala.Product): MirroredMonoType + } + + trait Sum extends Mirror { self => + /** The ordinal number of the case class of `x`. For enums, `ordinal(x) == x.ordinal` */ + def ordinal(x: MirroredMonoType): Int + } +} +``` + +Product types (i.e. case classes and objects, and enum cases) have mirrors which are subtypes of `Mirror.Product`. Sum +types (i.e. sealed class or traits with product children, and enums) have mirrors which are subtypes of `Mirror.Sum`. + +For the `Tree` ADT from above the following `Mirror` instances will be automatically provided by the compiler, + +```scala +// Mirror for Tree +Mirror.Sum { + type MirroredType = Tree + type MirroredElemTypes[T] = (Branch[T], Leaf[T]) + type MirroredMonoType = Tree[_] + type MirroredLabels = "Tree" + type MirroredElemLabels = ("Branch", "Leaf") + + def ordinal(x: MirroredMonoType): Int = x match { + case _: Branch[_] => 0 + case _: Leaf[_] => 1 + } +} + +// Mirror for Branch +Mirror.Product { + type MirroredType = Branch + type MirroredElemTypes[T] = (Tree[T], Tree[T]) + type MirroredMonoType = Branch[_] + type MirroredLabels = "Branch" + type MirroredElemLabels = ("left", "right") + + def fromProduct(p: Product): MirroredMonoType = + new Branch(...) +} + +// Mirror for Leaf +Mirror.Product { + type MirroredType = Leaf + type MirroredElemTypes[T] = Tuple1[T] + type MirroredMonoType = Leaf[_] + type MirroredLabels = "Leaf" + type MirroredElemLabels = Tuple1["elem"] + + def fromProduct(p: Product): MirroredMonoType = + new Leaf(...) +} +``` + +Note the following properties of `Mirror` types, + ++ Properties are encoded using types rather than terms. This means that they have no runtime footprint unless used and + also that they are a compile time feature for use with Dotty's metaprogramming facilities. ++ The kinds of `MirroredType` and `MirroredElemTypes` match the kind of the data type the mirror is an instance for. + This allows `Mirrors` to support ADTs of all kinds. ++ There is no distinct representation type for sums or products (ie. there is no `HList` or `Coproduct` type as in + Scala 2 versions of shapeless). Instead the collection of child types of a data type is represented by an ordinary, + possibly parameterized, tuple type. Dotty's metaprogramming facilities can be used to work with these tuple types + as-is, and higher level libraries can be built on top of them. ++ For both product and sum types, the elements of `MirroredElemTypes` are arranged in definition order (i.e. `Branch[T]` + precedes `Leaf[T]` in `MirroredElemTypes` for `Tree` because `Branch` is defined before `Leaf` in the source file). + This means that `Mirror.Sum` differs in this respect from shapeless's generic representation for ADTs in Scala 2, + where the constructors are ordered alphabetically by name. ++ The methods `ordinal` and `fromProduct` are defined in terms of `MirroredMonoType` which is the type of kind-`*` + which is obtained from `MirroredType` by wildcarding its type parameters. + +### Type classes supporting automatic deriving + +A trait or class can appear in a `derives` clause if its companion object defines a method named `derived`. The +signature and implementation of a `derived` method for a type class `TC[_]` are arbitrary but it is typically of the +following form, + +```scala +def derived[T](using Mirror.Of[T]): TC[T] = ... +``` + +That is, the `derived` method takes a context parameter of (some subtype of) type `Mirror` which defines the shape of +the deriving type `T`, and computes the type class implementation according to that shape. This is all that the +provider of an ADT with a `derives` clause has to know about the derivation of a type class instance. + +Note that `derived` methods may have context `Mirror` parameters indirectly (e.g. by having a context argument which in turn +has a context `Mirror` parameter, or not at all (e.g. they might use some completely different user-provided mechanism, for +instance using Dotty macros or runtime reflection). We expect that (direct or indirect) `Mirror` based implementations +will be the most common and that is what this document emphasises. + +Type class authors will most likely use higher level derivation or generic programming libraries to implement +`derived` methods. An example of how a `derived` method might be implemented using _only_ the low level facilities +described above and Dotty's general metaprogramming features is provided below. It is not anticipated that type class +authors would normally implement a `derived` method in this way, however this walkthrough can be taken as a guide for +authors of the higher level derivation libraries that we expect typical type class authors will use (for a fully +worked out example of such a library, see [shapeless 3](https://github.com/milessabin/shapeless/tree/shapeless-3)). + +#### How to write a type class `derived` method using low level mechanisms + +The low-level method we will use to implement a type class `derived` method in this example exploits three new +type-level constructs in Dotty: inline methods, inline matches, and implicit searches via `summonInline` or `summonFrom`. Given this definition of the +`Eq` type class, + + +```scala +trait Eq[T] { + def eqv(x: T, y: T): Boolean +} +``` + +we need to implement a method `Eq.derived` on the companion object of `Eq` that produces a given instance for `Eq[T]` given +a `Mirror[T]`. Here is a possible implementation, + +```scala +inline given derived[T](using m: Mirror.Of[T]) as Eq[T] = { + val elemInstances = summonAll[m.MirroredElemTypes] // (1) + inline m match { // (2) + case s: Mirror.SumOf[T] => eqSum(s, elemInstances) + case p: Mirror.ProductOf[T] => eqProduct(p, elemInstances) + } +} +``` + +Note that `derived` is defined as an `inline` given. This means that the method will be expanded at +call sites (for instance the compiler generated instance definitions in the companion objects of ADTs which have a +`derived Eq` clause), and also that it can be used recursively if necessary, to compute instances for children. + +The body of this method (1) first materializes the `Eq` instances for all the child types of type the instance is +being derived for. This is either all the branches of a sum type or all the fields of a product type. The +implementation of `summonAll` is `inline` and uses Dotty's `summonInline` construct to collect the instances as a +`List`, + +```scala + +inline def summonAll[T <: Tuple]: List[Eq[_]] = inline erasedValue[T] match { + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[Eq[t]] :: summonAll[ts] +} +``` + +with the instances for children in hand the `derived` method uses an `inline match` to dispatch to methods which can +construct instances for either sums or products (2). Note that because `derived` is `inline` the match will be +resolved at compile-time and only the left-hand side of the matching case will be inlined into the generated code with +types refined as revealed by the match. + +In the sum case, `eqSum`, we use the runtime `ordinal` values of the arguments to `eqv` to first check if the two +values are of the same subtype of the ADT (3) and then, if they are, to further test for equality based on the `Eq` +instance for the appropriate ADT subtype using the auxiliary method `check` (4). + +```scala +def eqSum[T](s: Mirror.SumOf[T], elems: List[Eq[_]]): Eq[T] = + new Eq[T] { + def eqv(x: T, y: T): Boolean = { + val ordx = s.ordinal(x) // (3) + (s.ordinal(y) == ordx) && check(elems(ordx))(x, y) // (4) + } + } +``` + +In the product case, `eqProduct` we test the runtime values of the arguments to `eqv` for equality as products based +on the `Eq` instances for the fields of the data type (5), + +```scala +def eqProduct[T](p: Mirror.ProductOf[T], elems: List[Eq[_]]): Eq[T] = + new Eq[T] { + def eqv(x: T, y: T): Boolean = + iterator(x).zip(iterator(y)).zip(elems.iterator).forall { // (5) + case ((x, y), elem) => check(elem)(x, y) + } + } +``` + +Pulling this all together we have the following complete implementation, + +```scala +import scala.deriving._ +import scala.compiletime.{erasedValue, summonInline} + +inline def summonAll[T <: Tuple]: List[Eq[_]] = inline erasedValue[T] match { + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[Eq[t]] :: summonAll[ts] +} + +trait Eq[T] { + def eqv(x: T, y: T): Boolean +} + +object Eq { + given Eq[Int] { + def eqv(x: Int, y: Int) = x == y + } + + def check(elem: Eq[_])(x: Any, y: Any): Boolean = + elem.asInstanceOf[Eq[Any]].eqv(x, y) + + def iterator[T](p: T) = p.asInstanceOf[Product].productIterator + + def eqSum[T](s: Mirror.SumOf[T], elems: List[Eq[_]]): Eq[T] = + new Eq[T] { + def eqv(x: T, y: T): Boolean = { + val ordx = s.ordinal(x) + (s.ordinal(y) == ordx) && check(elems(ordx))(x, y) + } + } + + def eqProduct[T](p: Mirror.ProductOf[T], elems: List[Eq[_]]): Eq[T] = + new Eq[T] { + def eqv(x: T, y: T): Boolean = + iterator(x).zip(iterator(y)).zip(elems.iterator).forall { + case ((x, y), elem) => check(elem)(x, y) + } + } + + inline given derived[T](using m: Mirror.Of[T]) as Eq[T] = { + val elemInstances = summonAll[m.MirroredElemTypes] + inline m match { + case s: Mirror.SumOf[T] => eqSum(s, elemInstances) + case p: Mirror.ProductOf[T] => eqProduct(p, elemInstances) + } + } +} +``` + +we can test this relative to a simple ADT like so, + +```scala +enum Opt[+T] derives Eq { + case Sm(t: T) + case Nn +} + +object Test extends App { + import Opt._ + val eqoi = summon[Eq[Opt[Int]]] + assert(eqoi.eqv(Sm(23), Sm(23))) + assert(!eqoi.eqv(Sm(23), Sm(13))) + assert(!eqoi.eqv(Sm(23), Nn)) +} +``` + +In this case the code that is generated by the inline expansion for the derived `Eq` instance for `Opt` looks like the +following, after a little polishing, + +```scala +given derived$Eq[T](using eqT: Eq[T]) as Eq[Opt[T]] = + eqSum(summon[Mirror[Opt[T]]], + List( + eqProduct(summon[Mirror[Sm[T]]], List(summon[Eq[T]])) + eqProduct(summon[Mirror[Nn.type]], Nil) + ) + ) +``` + +Alternative approaches can be taken to the way that `derived` methods can be defined. For example, more aggressively +inlined variants using Dotty macros, whilst being more involved for type class authors to write than the example +above, can produce code for type classes like `Eq` which eliminate all the abstraction artefacts (eg. the `Lists` of +child instances in the above) and generate code which is indistinguishable from what a programmer might write by hand. +As a third example, using a higher level library such as shapeless the type class author could define an equivalent +`derived` method as, + +```scala +given eqSum[A](using inst: => K0.CoproductInstances[Eq, A]) as Eq[A] { + def eqv(x: A, y: A): Boolean = inst.fold2(x, y)(false)( + [t] => (eqt: Eq[t], t0: t, t1: t) => eqt.eqv(t0, t1) + ) +} + +given eqProduct[A](using inst: K0.ProductInstances[Eq, A]) as Eq[A] { + def eqv(x: A, y: A): Boolean = inst.foldLeft2(x, y)(true: Boolean)( + [t] => (acc: Boolean, eqt: Eq[t], t0: t, t1: t) => Complete(!eqt.eqv(t0, t1))(false)(true) + ) +} + +inline def derived[A](using gen: K0.Generic[A]) as Eq[A] = gen.derive(eqSum, eqProduct) +``` + +The framework described here enables all three of these approaches without mandating any of them. + +For a brief discussion on how to use macros to write a type class `derived` +method please read more at [How to write a type class `derived` method using +macros](./derivation-macro.md). + +### Deriving instances elsewhere + +Sometimes one would like to derive a type class instance for an ADT after the ADT is defined, without being able to +change the code of the ADT itself. To do this, simply define an instance using the `derived` method of the type class +as right-hand side. E.g, to implement `Ordering` for `Option` define, + +```scala +given [T: Ordering] as Ordering[Option[T]] = Ordering.derived +``` + +Assuming the `Ordering.derived` method has a context parameter of type `Mirror[T]` it will be satisfied by the +compiler generated `Mirror` instance for `Option` and the derivation of the instance will be expanded on the right +hand side of this definition in the same way as an instance defined in ADT companion objects. + +### Syntax + +``` +Template ::= InheritClauses [TemplateBody] +EnumDef ::= id ClassConstr InheritClauses EnumBody +InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] +ConstrApps ::= ConstrApp {‘with’ ConstrApp} + | ConstrApp {‘,’ ConstrApp} +``` + +### Discussion + +This type class derivation framework is intentionally very small and low-level. There are essentially two pieces of +infrastructure in compiler-generated `Mirror` instances, + ++ type members encoding properties of the mirrored types. ++ a minimal value level mechanism for working generically with terms of the mirrored types. + +The `Mirror` infrastructure can be seen as an extension of the existing `Product` infrastructure for case classes: +typically `Mirror` types will be implemented by the ADTs companion object, hence the type members and the `ordinal` or +`fromProduct` methods will be members of that object. The primary motivation for this design decision, and the +decision to encode properties via types rather than terms was to keep the bytecode and runtime footprint of the +feature small enough to make it possible to provide `Mirror` instances _unconditionally_. + +Whilst `Mirrors` encode properties precisely via type members, the value level `ordinal` and `fromProduct` are +somewhat weakly typed (because they are defined in terms of `MirroredMonoType`) just like the members of `Product`. +This means that code for generic type classes has to ensure that type exploration and value selection proceed in +lockstep and it has to assert this conformance in some places using casts. If generic type classes are correctly +written these casts will never fail. + +As mentioned, however, the compiler-provided mechanism is intentionally very low level and it is anticipated that +higher level type class derivation and generic programming libraries will build on this and Dotty's other +metaprogramming facilities to hide these low-level details from type class authors and general users. Type class +derivation in the style of both shapeless and Magnolia are possible (a prototype of shapeless 3, which combines +aspects of both shapeless 2 and Magnolia has been developed alongside this language feature) as is a more aggressively +inlined style, supported by Dotty's new quote/splice macro and inlining facilities. diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/extension-methods.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/extension-methods.md new file mode 100644 index 000000000000..486e07995378 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/extension-methods.md @@ -0,0 +1,284 @@ +--- +layout: doc-page +title: "Extension Methods" +--- + +Extension methods allow one to add methods to a type after the type is defined. Example: + +```scala +case class Circle(x: Double, y: Double, radius: Double) + +extension (c: Circle) + def circumference: Double = c.radius * math.Pi * 2 +``` + +Like regular methods, extension methods can be invoked with infix `.`: + +```scala +val circle = Circle(0, 0, 1) +circle.circumference +``` + +### Translation of Extension Methods + +Extension methods are methods that have a parameter clause in front of the defined identifier. +An extension method named `f` translates to method named `extension_f` that takes the leading parameter section as its first argument list. +So, the definition of `circumference` above translates to the following method, and can also be invoked as such: + +```scala +def extension_circumference(c: Circle): Double = c.radius * math.Pi * 2 + +assert(circle.circumference == extension_circumference(circle)) +``` + +### Operators + +The extension method syntax can also be used to define operators. Examples: + +```scala +extension (x: String) + def < (y: String): Boolean = ... +extension (x: Elem) + def +: (xs: Seq[Elem]): Seq[Elem] = ... +extension (x: Number) + @infix def min (y: Number): Number = ... + +"ab" < "c" +1 +: List(2, 3) +x min 3 +``` + +The three definitions above translate to + +```scala +def extension_< (x: String)(y: String): Boolean = ... +def extension_+: (xs: Seq[Elem])(x: Elem): Seq[Elem] = ... +@infix def extension_min(x: Number)(y: Number): Number = ... +``` + +Note the swap of the two parameters `x` and `xs` when translating +the right-associative operator `+:` to an extension method. This is analogous +to the implementation of right binding operators as normal methods. The Scala +compiler preprocesses an infix operation `x +: xs` to `xs.+:(x)`, so the extension +method ends up being applied to the sequence as first argument (in other words, +the two swaps cancel each other out). + +### Generic Extensions + +It is also possible to extend generic types by adding type parameters to an extension. For instance: + +```scala +extension [T](xs: List[T]) + def second = xs.tail.head + +extension [T: Numeric](x: T) + def + (y: T): T = summon[Numeric[T]].plus(x, y) +``` + +If an extension method has type parameters, they come immediately after `extension` and are followed by the extended parameter. +When calling a generic extension method, any explicitly given type arguments follow the method name. So the `second` method could be instantiated as follows. + +```scala +List(1, 2, 3).second[Int] +``` + +Of course, the type argument here would usually be left out since it can be inferred. + +Extensions can also take using clauses. For instance, the `+` extension above could equivalently be written with a using clause: + +```scala +extension [T](x: T)(using n: Numeric[T]) + def + (y: T): T = n.plus(x, y) +``` + +**Note**: Type parameters have to be given after the `extension` keyword; +they cannot be given after the `def`. This restriction might be lifted in the future once we support multiple type parameter clauses in a method. By contrast, there can be using clauses in front as well as after the `def`. + +### Collective Extensions + +Sometimes, one wants to define several extension methods that share the same +left-hand parameter type. In this case one can "pull out" the common parameters into +a single extension and enclose all methods in braces or an indented region following a '`:`'. +Example: + +```scala +extension (ss: Seq[String]): + + def longestStrings: Seq[String] = + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + + def longestString: String = longestStrings.head +``` + +Note the right-hand side of `longestString`: it calls `longestStrings` directly, implicitly +assuming the common extended value `ss` as receiver. + +Collective extensions like these are a shorthand for individual extensions +where each method is defined separately. For instance, the first extension above expands to + +```scala +extension (ss: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + +extension (ss: Seq[String]) + def longestString: String = ss.longestStrings.head +``` + +Collective extensions also can take type parameters and have using clauses. Example + +```scala +extension [T](xs: List[T])(using Ordering[T]): + def smallest(n: Int): List[T] = xs.sorted.take(n) + def smallestIndices(n: Int): List[Int] = + val limit = smallest(n).max + xs.zipWithIndex.collect { case (x, i) if x <= limit => i } +``` + +### Translation of Calls to Extension Methods + +To convert a reference to an extension method, the compiler has to know about the extension +method. We say in this case that the extension method is _applicable_ at the point of reference. +There are four possible ways for an extension method to be applicable: + + 1. The extension method is visible under a simple name, by being defined or inherited or imported in a scope enclosing the reference. + 2. The extension method is a member of some given + instance that is visible at the point of the reference. + 3. The reference is of the form `r.m` and the extension method + is defined in the implicit scope of the type of `r`. + 4. The reference is of the form `r.m` and the extension method + is defined in some given instance in the implicit scope of the type of `r`. + +Here is an example for the first rule: + +```scala +trait IntOps: + extension (i: Int) def isZero: Boolean = i == 0 + + extension (i: Int) def safeMod(x: Int): Option[Int] = + // extension method defined in same scope IntOps + if x.isZero then None + else Some(i % x) + +object IntOpsEx extends IntOps: + extension (i: Int) def safeDiv(x: Int): Option[Int] = + // extension method brought into scope via inheritance from IntOps + if x.isZero then None + else Some(i / x) + +trait SafeDiv: + import IntOpsEx._ // brings safeDiv and safeMod into scope + + extension (i: Int) def divide(d: Int) : Option[(Int, Int)] = + // extension methods imported and thus in scope + (i.safeDiv(d), i.safeMod(d)) match + case (Some(d), Some(r)) => Some((d, r)) + case _ => None +``` + +By the second rule, an extension method can be made available by defining a given instance containing it, like this: + +```scala +given ops1 as IntOps // brings safeMod into scope + +1.safeMod(2) +``` + +By the third and fourth rule, an extension method is available if it is in the implicit scope of the receiver type or in a given instance in that scope. Example: + +```scala +class List[T]: + ... +object List: + + extension [T](xs: List[List[T]]) + def flatten: List[T] = xs.foldLeft(Nil: List[T])(_ ++ _) + + given [T: Ordering] as Ordering[List[T]]: + extension (xs: List[T]) + def < (ys: List[T]): Boolean = ... +end List + +// extension method available since it is in the implicit scope of List[List[Int]] +List(List(1, 2), List(3, 4)).flatten + +// extension method available since it is in the given Ordering[List[T]], +// which is itself in the implicit scope of List[Int] +List(1, 2) < List(3) +``` + +The precise rules for resolving a selection to an extension method are as follows. + +Assume a selection `e.m[Ts]` where `m` is not a member of `e`, where the type arguments `[Ts]` are optional, and where `T` is the expected type. The following two rewritings are tried in order: + + 1. The selection is rewritten to `extension_m[Ts](e)`. + 2. If the first rewriting does not typecheck with expected type `T`, + and there is an extension method `m` in some eligible object `o`, the selection is rewritten to `o.extension_m[Ts](e)`. An object `o` is _eligible_ if + + - `o` forms part of the implicit scope of `T`, or + - `o` is a given instance that is visible at the point of the application, or + - `o` is a given instance in the implicit scope of `T`. + + This second rewriting is attempted at the time where the compiler also tries an implicit conversion + from `T` to a type containing `m`. If there is more than one way of rewriting, an ambiguity error results. + +An extension method can also be used as an identifier by itself. If an identifier `m` does not +resolve, the identifier is rewritten to: + +- `x.m` if the identifier appears in an extension with parameter `x` +- `this.m` otherwise + +and the rewritten term is again tried as an application of an extension method. Example: + +```scala +extension (s: String) + def position(ch: Char, n: Int): Int = + if n < s.length && s(n) != ch then position(ch, n + 1) + else n +``` + +The recursive call `position(ch, n + 1)` expands to `s.position(ch, n + 1)` in this case. The whole extension method rewrites to + +```scala +def extension_position(s: String)(ch: Char, n: Int): Int = + if n < s.length && s(n) != ch then extension_position(s)(ch, n + 1) + else n +``` + +### More Details + +1. To avoid confusion, names of normal methods are not allowed to start with `extension_`. + +2. A named import such as `import a.m` of an extension method in `a` will make `m` only available as an extension method. + To access it under `extension_m` that name as to be imported separately. Example: + + ```scala + object DoubleOps: + extension (x: Double) def ** (exponent: Int): Double = + require(exponent >= 0) + if exponent == 0 then 1 else x * (x ** (exponent - 1)) + + import DoubleOps.{**, extension_**} + assert(2.0 ** 3 == extension_**(2.0)(3)) + ``` + +### Syntax + +Here are the syntax changes for extension methods and collective extensions relative +to the [current syntax](../../internals/syntax.md). + +``` +BlockStat ::= ... | Extension +TemplateStat ::= ... | Extension +TopStat ::= ... | Extension +Extension ::= ‘extension’ [DefTypeParamClause] ‘(’ DefParam ‘)’ + {UsingParamClause} ExtMethods +ExtMethods ::= ExtMethod | [nl] ‘{’ ExtMethod {semi ExtMethod ‘}’ +ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef +``` + +`extension` is a soft keyword. It is recognized as a keyword only if it appears +at the start of a statement and is followed by `[` or `(`. In all other cases +it is treated as an identifier. diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/given-clauses.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/given-clauses.md new file mode 100644 index 000000000000..85a691e55bbe --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/given-clauses.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Given Parameters" +--- + +The contents of this page have [moved](./using-clauses.md). + diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/given-imports.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/given-imports.md new file mode 100644 index 000000000000..69c2b55e1bf1 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/given-imports.md @@ -0,0 +1,132 @@ +--- +layout: doc-page +title: "Importing Givens" +--- + +A special form of import wildcard selector is used to import given instances. Example: + +```scala +object A { + class TC + given tc as TC + def f(using TC) = ??? +} + +object B { + import A._ + import A.{given _} +} +``` + +In the code above, the `import A._` clause of object `B` will import all members +of `A` _except_ the given instance `tc`. Conversely, the second import `import A.{given _}` will import _only_ that given instance. +The two import clauses can also be merged into one: + +```scala +object B { + import A.{given _, _} +} +``` + +Generally, a normal wildcard selector `_` brings all definitions other than givens or extensions into scope +whereas a `given _` selector brings all givens (including those resulting from extensions) into scope. + +There are two main benefits arising from these rules: + +- It is made clearer where givens in scope are coming from. + In particular, it is not possible to hide imported givens in a long list of regular wildcard imports. +- It enables importing all givens + without importing anything else. This is particularly important since givens + can be anonymous, so the usual recourse of using named imports is not + practical. + +### Importing By Type + +Since givens can be anonymous it is not always practical to import them by their name, and wildcard imports are typically used instead. By-type imports provide a more specific alternative to wildcard imports, which makes it clearer what is imported. Example: + +```scala +import A.{given TC} +``` + +This imports any given in `A` that has a type which conforms to `TC`. Importing givens of several types `T1,...,Tn` +is expressed by multiple `given` selectors. + +```scala +import A.{given T1, ..., given Tn} +``` + +Importing all given instances of a parameterized type is expressed by wildcard arguments. +For instance, assuming the object + +```scala +object Instances { + given intOrd as Ordering[Int] + given listOrd[T: Ordering] as Ordering[List[T]] + given ec as ExecutionContext = ... + given im as Monoid[Int] +} +``` + +the import + +```scala +import Instances.{given Ordering[?], given ExecutionContext} +``` + +would import the `intOrd`, `listOrd`, and `ec` instances but leave out the `im` instance, since it fits none of the specified bounds. + +By-type imports can be mixed with by-name imports. If both are present in an import clause, by-type imports come last. For instance, the import clause + +```scala +import Instances.{im, given Ordering[?]} +``` + +would import `im`, `intOrd`, and `listOrd` but leave out `ec`. + + + +### Migration + +The rules for imports stated above have the consequence that a library +would have to migrate in lockstep with all its users from old style implicits and +normal imports to givens and given imports. + +The following modifications avoid this hurdle to migration. + + 1. A `given` import selector also brings old style implicits into scope. So, in Scala 3.0 + an old-style implicit definition can be brought into scope either by a `_` or a `given _` wildcard selector. + + 2. In Scala 3.1, old-style implicits accessed through a `_` wildcard import will give a deprecation warning. + + 3. In some version after 3.1, old-style implicits accessed through a `_` wildcard import will give a compiler error. + +These rules mean that library users can use `given _` selectors to access old-style implicits in Scala 3.0, +and will be gently nudged and then forced to do so in later versions. Libraries can then switch to +given instances once their user base has migrated. + +### Syntax + +``` +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= StableId ‘.’ ImportSpec +ImportSpec ::= id + | ‘_’ + | ‘{’ ImportSelectors) ‘}’ +ImportSelectors ::= id [‘=>’ id | ‘=>’ ‘_’] [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} +WildCardSelector ::= ‘_' + | ‘given’ (‘_' | InfixType) +Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/givens.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/givens.md new file mode 100644 index 000000000000..e4b72543545c --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/givens.md @@ -0,0 +1,109 @@ +--- +layout: doc-page +title: "Given Instances" +--- + +Given instances (or, simply, "givens") define "canonical" values of certain types +that serve for synthesizing arguments to [context parameters](./using-clauses.html). Example: + +```scala +trait Ord[T] { + def compare(x: T, y: T): Int + extension (x: T) def < (y: T) = compare(x, y) < 0 + extension (x: T) def > (y: T) = compare(x, y) > 0 +} + +given intOrd as Ord[Int] { + def compare(x: Int, y: Int) = + if (x < y) -1 else if (x > y) +1 else 0 +} + +given listOrd[T](using ord: Ord[T]) as Ord[List[T]] { + + def compare(xs: List[T], ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = ord.compare(x, y) + if (fst != 0) fst else compare(xs1, ys1) +} +``` +This code defines a trait `Ord` with two given instances. `intOrd` defines +a given for the type `Ord[Int]` whereas `listOrd[T]` defines givens +for `Ord[List[T]]` for all types `T` that come with a given instance for `Ord[T]` +themselves. The `using` clause in `listOrd` defines a condition: There must be a +given of type `Ord[T]` for a given of type `List[Ord[T]]` to exist. +Such conditions are expanded by the compiler to [context +parameters](./using-clauses.html). + +## Anonymous Givens + +The name of a given can be left out. So the definitions +of the last section can also be expressed like this: +```scala +given Ord[Int] { ... } +given [T](using Ord[T]) as Ord[List[T]] { ... } +``` +If the name of a given is missing, the compiler will synthesize a name from +the implemented type(s). + +**Note** The name synthesized by the compiler is chosen to be readable and reasonably concise. For instance, the two instances above would get the names: +```scala +given_Ord_Int +given_Ord_List_T +``` +The precise rules for synthesizing names are found [here](./relationship-implicits.html#anonymous-given-instances). These rules do not guarantee absence of name conflicts between +given instances of types that are "too similar". To avoid conflicts one can +use named instances. + +**Note** To ensure robust binary compatibility, publicly available libraries should prefer named instances. + +## Alias Givens + +An alias can be used to define a given instance that is equal to some expression. E.g.: +```scala +given global as ExecutionContext = new ForkJoinPool() +``` +This creates a given `global` of type `ExecutionContext` that resolves to the right +hand side `new ForkJoinPool()`. +The first time `global` is accessed, a new `ForkJoinPool` is created, which is then +returned for this and all subsequent accesses to `global`. This operation is thread-safe. + +Alias givens can be anonymous as well, e.g. +```scala +given Position = enclosingTree.position +given (using config: Config) as Factory = MemoizingFactory(config) +``` + +An alias given can have type parameters and context parameters just like any other given, +but it can only implement a single type. + +## Given Macros + +Given aliases can have the `inline` and `transparent` modifiers. +Example: +```scala +transparent inline given mkAnnotations[A, T] as Annotations[A, T] = ${ + // code producing a value of a subtype of Annotations +} +``` +Since `mkAnnotations` is `transparent`, the type of an application is the type of its right hand side, which can be a proper subtype of the declared result type `Annotations[A, T]`. + +## Given Instance Initialization + +A given instance without type or context parameters is initialized on-demand, the first +time it is accessed. If a given has type or context parameters, a fresh instance +is created for each reference. + +## Syntax + +Here is the new syntax for given instances, seen as a delta from the [standard context free syntax of Scala 3](../../internals/syntax.md). + +``` +TmplDef ::= ... + | ‘given’ GivenDef +GivenDef ::= [GivenSig] Type ‘=’ Expr + | [GivenSig] ConstrApps [TemplateBody] +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘as’ +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/implicit-by-name-parameters.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/implicit-by-name-parameters.md new file mode 100644 index 000000000000..52aa41b50daf --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/implicit-by-name-parameters.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Implicit By-Name Parameters" +--- + +The contents of this page have [moved](./by-name-context-parameters.md). + diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/implicit-function-types-spec.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/implicit-function-types-spec.md new file mode 100644 index 000000000000..5bceb18649af --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/implicit-function-types-spec.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Implicit Function Types - More Details" +--- + +The contents of this page have [moved](./context-functions-spec.md). + diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/implicit-function-types.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/implicit-function-types.md new file mode 100644 index 000000000000..1507a3d46559 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/implicit-function-types.md @@ -0,0 +1,6 @@ +--- +layout: doc-page +title: "Implicit Function Types" +--- + +The contents of this page have [moved](./context-functions.md). diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/import-delegate.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/import-delegate.md new file mode 100644 index 000000000000..09af1f3cc30c --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/import-delegate.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Import Given" +--- + +The contents of this page have [moved](./given-imports.md). + diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/import-implied.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/import-implied.md new file mode 100644 index 000000000000..6db3715fb218 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/import-implied.md @@ -0,0 +1 @@ +The contents of this page have [moved](./given-imports.md). diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/inferable-by-name-parameters.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/inferable-by-name-parameters.md new file mode 100644 index 000000000000..fd5984334404 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/inferable-by-name-parameters.md @@ -0,0 +1 @@ +The contents of this page have [moved](./by-name-context-parameters.md). diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/inferable-params.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/inferable-params.md new file mode 100644 index 000000000000..74b6b683301b --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/inferable-params.md @@ -0,0 +1 @@ +The contents of this page have [moved](./using-clauses.md). diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/instance-defs.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/instance-defs.md new file mode 100644 index 000000000000..df5513602021 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/instance-defs.md @@ -0,0 +1 @@ +The contents of this page have [moved](./givens.md). diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/motivation.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/motivation.md new file mode 100644 index 000000000000..2dc2781fdcec --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/motivation.md @@ -0,0 +1,81 @@ +--- +layout: doc-page +title: "Overview" +--- + +### Critique of the Status Quo + +Scala's implicits are its most distinguished feature. They are _the_ fundamental way to abstract over context. They represent a unified paradigm with a great variety of use cases, among them: implementing type classes, establishing context, dependency injection, expressing capabilities, computing new types and proving relationships between them. + +Following Haskell, Scala was the second popular language to have some form of implicits. Other languages have followed suit. E.g Rust's traits or Swift's protocol extensions. Design proposals are also on the table for Kotlin as [compile time dependency resolution](https://github.com/Kotlin/KEEP/blob/e863b25f8b3f2e9b9aaac361c6ee52be31453ee0/proposals/compile-time-dependency-resolution.md), for C# as [Shapes and Extensions](https://github.com/dotnet/csharplang/issues/164) +or for F# as [Traits](https://github.com/MattWindsor91/visualfsharp/blob/hackathon-vs/examples/fsconcepts.md). Implicits are also a common feature of theorem provers such as Coq or Agda. + +Even though these designs use widely different terminology, they are all variants of the core idea of _term inference_. Given a type, the compiler synthesizes a "canonical" term that has that type. Scala embodies the idea in a purer form than most other languages: An implicit parameter directly leads to an inferred argument term that could also be written down explicitly. By contrast, type class based designs are less direct since they hide term inference behind some form of type classification and do not offer the option of writing the inferred quantities (typically, dictionaries) explicitly. + +Given that term inference is where the industry is heading, and given that Scala has it in a very pure form, how come implicits are not more popular? In fact, it's fair to say that implicits are at the same time Scala's most distinguished and most controversial feature. I believe this is due to a number of aspects that together make implicits harder to learn than necessary and also make it harder to prevent abuses. + +Particular criticisms are: + +1. Being very powerful, implicits are easily over-used and mis-used. This observation holds in almost all cases when we talk about _implicit conversions_, which, even though conceptually different, share the same syntax with other implicit definitions. For instance, regarding the two definitions + + ```scala + implicit def i1(implicit x: T): C[T] = ... + implicit def i2(x: T): C[T] = ... + ``` + + the first of these is a conditional implicit _value_, the second an implicit _conversion_. Conditional implicit values are a cornerstone for expressing type classes, whereas most applications of implicit conversions have turned out to be of dubious value. The problem is that many newcomers to the language start with defining implicit conversions since they are easy to understand and seem powerful and convenient. Scala 3 will put under a language flag both definitions and applications of "undisciplined" implicit conversions between types defined elsewhere. This is a useful step to push back against overuse of implicit conversions. But the problem remains that syntactically, conversions and values just look too similar for comfort. + + 2. Another widespread abuse is over-reliance on implicit imports. This often leads to inscrutable type errors that go away with the right import incantation, leaving a feeling of frustration. Conversely, it is hard to see what implicits a program uses since implicits can hide anywhere in a long list of imports. + + 3. The syntax of implicit definitions is too minimal. It consists of a single modifier, `implicit`, that can be attached to a large number of language constructs. A problem with this for newcomers is that it conveys mechanism instead of intent. For instance, a type class instance is an implicit object or val if unconditional and an implicit def with implicit parameters referring to some class if conditional. This describes precisely what the implicit definitions translate to -- just drop the `implicit` modifier, and that's it! But the cues that define intent are rather indirect and can be easily misread, as demonstrated by the definitions of `i1` and `i2` above. + + 4. The syntax of implicit parameters also has shortcomings. While implicit _parameters_ are designated specifically, arguments are not. Passing an argument to an implicit parameter looks like a regular application `f(arg)`. This is problematic because it means there can be confusion regarding what parameter gets instantiated in a call. For instance, in + ```scala + def currentMap(implicit ctx: Context): Map[String, Int] + ``` + one cannot write `currentMap("abc")` since the string "abc" is taken as explicit argument to the implicit `ctx` parameter. One has to write `currentMap.apply("abc")` instead, which is awkward and irregular. For the same reason, a method definition can only have one implicit parameter section and it must always come last. This restriction not only reduces orthogonality, but also prevents some useful program constructs, such as a method with a regular parameter whose type depends on an implicit value. Finally, it's also a bit annoying that implicit parameters must have a name, even though in many cases that name is never referenced. + + 5. Implicits pose challenges for tooling. The set of available implicits depends on context, so command completion has to take context into account. This is feasible in an IDE but docs like ScalaDoc that are based static web pages can only provide an approximation. Another problem is that failed implicit searches often give very unspecific error messages, in particular if some deeply recursive implicit search has failed. Note that the Dotty compiler has already made a lot of progress in the error diagnostics area. If a recursive search fails some levels down, it shows what was constructed and what is missing. Also, it suggests imports that can bring missing implicits in scope. + +None of the shortcomings is fatal, after all implicits are very widely used, and many libraries and applications rely on them. But together, they make code using implicits a lot more cumbersome and less clear than it could be. + +Historically, many of these shortcomings come from the way implicits were gradually "discovered" in Scala. Scala originally had only implicit conversions with the intended use case of "extending" a class or trait after it was defined, i.e. what is expressed by implicit classes in later versions of Scala. Implicit parameters and instance definitions came later in 2006 and we picked similar syntax since it seemed convenient. For the same reason, no effort was made to distinguish implicit imports or arguments from normal ones. + +Existing Scala programmers by and large have gotten used to the status quo and see little need for change. But for newcomers this status quo presents a big hurdle. I believe if we want to overcome that hurdle, we should take a step back and allow ourselves to consider a radically new design. + +### The New Design + +The following pages introduce a redesign of contextual abstractions in Scala. They introduce four fundamental changes: + + 1. [Given Instances](./givens.md) are a new way to define basic terms that can be synthesized. They replace implicit definitions. The core principle of the proposal is that, rather than mixing the `implicit` modifier with a large number of features, we have a single way to define terms that can be synthesized for types. + + 2. [Using Clauses](./using-clauses.md) are a new syntax for implicit _parameters_ and their _arguments_. It unambiguously aligns parameters and arguments, solving a number of language warts. It also allows us to have several `using` clauses in a definition. + + 3. ["Given" Imports](./given-imports.md) are a new class of import selectors that specifically import + givens and nothing else. + + 4. [Implicit Conversions](./conversions.md) are now expressed as given instances of a standard `Conversion` class. All other forms of implicit conversions will be phased out. + +This section also contains pages describing other language features that are related to context abstraction. These are: + + - [Context Bounds](./context-bounds.md), which carry over unchanged. + - [Extension Methods](./extension-methods.md) replace implicit classes in a way that integrates better with type classes. + - [Implementing Type classes](type-classes.md) demonstrates how some common type classes can be implemented using the new constructs. + - [Type class Derivation](./derivation.md) introduces constructs to automatically derive type class instances for ADTs. + - [Multiversal Equality](./multiversal-equality.md) introduces a special type class to support type safe equality. + - [Context Functions](./context-functions.md) provide a way to abstract over context parameters. + - [By-Name Context Parameters](./by-name-context-parameters.md) are an essential tool to define recursive synthesized values without looping. + - [Relationship with Scala 2 Implicits](./relationship-implicits.md) discusses the relationship between old-style implicits and new-style givens and how to migrate from one to the other. + +Overall, the new design achieves a better separation of term inference from the rest of the language: There is a single way to define givens instead of a multitude of forms all taking an `implicit` modifier. There is a single way to introduce implicit parameters and arguments instead of conflating implicit with normal arguments. There is a separate way to import givens that does not allow them to hide in a sea of normal imports. And there is a single way to define an implicit conversion which is clearly marked as such and does not require special syntax. + +This design thus avoids feature interactions and makes the language more consistent and orthogonal. It will make implicits easier to learn and harder to abuse. It will greatly improve the clarity of the 95% of Scala programs that use implicits. It has thus the potential to fulfil the promise of term inference in a principled way that is also accessible and friendly. + +Could we achieve the same goals by tweaking existing implicits? After having tried for a long time, I believe now that this is impossible. + + - First, some of the problems are clearly syntactic and require different syntax to solve them. + - Second, there is the problem how to migrate. We cannot change the rules in mid-flight. At some stage of language evolution we need to accommodate both the new and the old rules. With a syntax change, this is easy: Introduce the new syntax with new rules, support the old syntax for a while to facilitate cross compilation, deprecate and phase out the old syntax at some later time. Keeping the same syntax does not offer this path, and in fact does not seem to offer any viable path for evolution + - Third, even if we would somehow succeed with migration, we still have the problem + how to teach this. We cannot make existing tutorials go away. Almost all existing tutorials start with implicit conversions, which will go away; they use normal imports, which will go away, and they explain calls to methods with implicit parameters by expanding them to plain applications, which will also go away. This means that we'd have + to add modifications and qualifications to all existing literature and courseware, likely causing more confusion with beginners instead of less. By contrast, with a new syntax there is a clear criterion: Any book or courseware that mentions `implicit` is outdated and should be updated. + diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/multiversal-equality.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/multiversal-equality.md new file mode 100644 index 000000000000..046d849ac9bf --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/multiversal-equality.md @@ -0,0 +1,214 @@ +--- +layout: doc-page +title: "Multiversal Equality" +--- + +Previously, Scala had universal equality: Two values of any types +could be compared with each other with `==` and `!=`. This came from +the fact that `==` and `!=` are implemented in terms of Java's +`equals` method, which can also compare values of any two reference +types. + +Universal equality is convenient. But it is also dangerous since it +undermines type safety. For instance, let's assume one is left after some refactoring +with an erroneous program where a value `y` has type `S` instead of the correct type `T`. + +```scala +val x = ... // of type T +val y = ... // of type S, but should be T +x == y // typechecks, will always yield false +``` + +If `y` gets compared to other values of type `T`, +the program will still typecheck, since values of all types can be compared with each other. +But it will probably give unexpected results and fail at runtime. + +Multiversal equality is an opt-in way to make universal equality +safer. It uses a binary type class `Eql` to indicate that values of +two given types can be compared with each other. +The example above would not typecheck if `S` or `T` was a class +that derives `Eql`, e.g. +```scala +class T derives Eql +``` +Alternatively, one can also provide an `Eql` given instance directly, like this: +```scala +given Eql[T, T] = Eql.derived +``` +This definition effectively says that values of type `T` can (only) be +compared to other values of type `T` when using `==` or `!=`. The definition +affects type checking but it has no significance for runtime +behavior, since `==` always maps to `equals` and `!=` always maps to +the negation of `equals`. The right hand side `Eql.derived` of the definition +is a value that has any `Eql` instance as its type. Here is the definition of class +`Eql` and its companion object: +```scala +package scala +import annotation.implicitNotFound + +@implicitNotFound("Values of types ${L} and ${R} cannot be compared with == or !=") +sealed trait Eql[-L, -R] + +object Eql { + object derived extends Eql[Any, Any] +} +``` + +One can have several `Eql` given instances for a type. For example, the four +definitions below make values of type `A` and type `B` comparable with +each other, but not comparable to anything else: + +```scala +given Eql[A, A] = Eql.derived +given Eql[B, B] = Eql.derived +given Eql[A, B] = Eql.derived +given Eql[B, A] = Eql.derived +``` +The `scala.Eql` object defines a number of `Eql` given instances that together +define a rule book for what standard types can be compared (more details below). + +There is also a "fallback" instance named `eqlAny` that allows comparisons +over all types that do not themselves have an `Eql` given. `eqlAny` is defined as follows: + +```scala +def eqlAny[L, R]: Eql[L, R] = Eql.derived +``` + +Even though `eqlAny` is not declared as `given`, the compiler will still construct an `eqlAny` instance as answer to an implicit search for the +type `Eql[L, R]`, unless `L` or `R` have `Eql` instances +defined on them, or the language feature `strictEquality` is enabled. + +The primary motivation for having `eqlAny` is backwards compatibility. +If this is of no concern, one can disable `eqlAny` by enabling the language +feature `strictEquality`. As for all language features this can be either +done with an import + +```scala +import scala.language.strictEquality +``` +or with a command line option `-language:strictEquality`. + +## Deriving Eql Instances + +Instead of defining `Eql` instances directly, it is often more convenient to derive them. Example: +```scala +class Box[T](x: T) derives Eql +``` +By the usual rules of [type class derivation](./derivation.md), +this generates the following `Eql` instance in the companion object of `Box`: +```scala +given [T, U](using Eql[T, U]) as Eql[Box[T], Box[U]] = Eql.derived +``` +That is, two boxes are comparable with `==` or `!=` if their elements are. Examples: +```scala +new Box(1) == new Box(1L) // ok since there is an instance for `Eql[Int, Long]` +new Box(1) == new Box("a") // error: can't compare +new Box(1) == 1 // error: can't compare +``` + +## Precise Rules for Equality Checking + +The precise rules for equality checking are as follows. + +If the `strictEquality` feature is enabled then +a comparison using `x == y` or `x != y` between values `x: T` and `y: U` +is legal if there is a `given` of type `Eql[T, U]`. + +In the default case where the `strictEquality` feature is not enabled the comparison is +also legal if + + 1. `T` and `U` are the same, or + 2. one of `T`, `U` is a subtype of the _lifted_ version of the other type, or + 3. neither `T` nor `U` have a _reflexive_ `Eql` instance. + +Explanations: + + - _lifting_ a type `S` means replacing all references to abstract types + in covariant positions of `S` by their upper bound, and replacing + all refinement types in covariant positions of `S` by their parent. + - a type `T` has a _reflexive_ `Eql` instance if the implicit search for `Eql[T, T]` + succeeds. + +## Predefined Eql Instances + +The `Eql` object defines instances for comparing + - the primitive types `Byte`, `Short`, `Char`, `Int`, `Long`, `Float`, `Double`, `Boolean`, and `Unit`, + - `java.lang.Number`, `java.lang.Boolean`, and `java.lang.Character`, + - `scala.collection.Seq`, and `scala.collection.Set`. + +Instances are defined so that every one of these types has a _reflexive_ `Eql` instance, and the following holds: + + - Primitive numeric types can be compared with each other. + - Primitive numeric types can be compared with subtypes of `java.lang.Number` (and _vice versa_). + - `Boolean` can be compared with `java.lang.Boolean` (and _vice versa_). + - `Char` can be compared with `java.lang.Character` (and _vice versa_). + - Two sequences (of arbitrary subtypes of `scala.collection.Seq`) can be compared + with each other if their element types can be compared. The two sequence types + need not be the same. + - Two sets (of arbitrary subtypes of `scala.collection.Set`) can be compared + with each other if their element types can be compared. The two set types + need not be the same. + - Any subtype of `AnyRef` can be compared with `Null` (and _vice versa_). + +## Why Two Type Parameters? + +One particular feature of the `Eql` type is that it takes _two_ type parameters, representing the types of the two items to be compared. By contrast, conventional +implementations of an equality type class take only a single type parameter which represents the common type of _both_ operands. +One type parameter is simpler than two, so why go through the additional complication? The reason has to do with the fact that, rather than coming up with a type class where no operation existed before, +we are dealing with a refinement of pre-existing, universal equality. It is best illustrated through an example. + +Say you want to come up with a safe version of the `contains` method on `List[T]`. The original definition of `contains` in the standard library was: +```scala +class List[+T] { + ... + def contains(x: Any): Boolean +} +``` +That uses universal equality in an unsafe way since it permits arguments of any type to be compared with the list's elements. The "obvious" alternative definition +```scala + def contains(x: T): Boolean +``` +does not work, since it refers to the covariant parameter `T` in a nonvariant context. The only variance-correct way to use the type parameter `T` in `contains` is as a lower bound: +```scala + def contains[U >: T](x: U): Boolean +``` +This generic version of `contains` is the one used in the current (Scala 2.13) version of `List`. +It looks different but it admits exactly the same applications as the `contains(x: Any)` definition we started with. +However, we can make it more useful (i.e. restrictive) by adding an `Eql` parameter: +```scala + def contains[U >: T](x: U)(using Eql[T, U]): Boolean // (1) +``` +This version of `contains` is equality-safe! More precisely, given +`x: T`, `xs: List[T]` and `y: U`, then `xs.contains(y)` is type-correct if and only if +`x == y` is type-correct. + +Unfortunately, the crucial ability to "lift" equality type checking from simple equality and pattern matching to arbitrary user-defined operations gets lost if we restrict ourselves to an equality class with a single type parameter. Consider the following signature of `contains` with a hypothetical `Eql1[T]` type class: +```scala + def contains[U >: T](x: U)(using Eql1[U]): Boolean // (2) +``` +This version could be applied just as widely as the original `contains(x: Any)` method, +since the `Eql1[Any]` fallback is always available! So we have gained nothing. What got lost in the transition to a single parameter type class was the original rule that `Eql[A, B]` is available only if neither `A` nor `B` have a reflexive `Eql` instance. That rule simply cannot be expressed if there is a single type parameter for `Eql`. + +The situation is different under `-language:strictEquality`. In that case, +the `Eql[Any, Any]` or `Eql1[Any]` instances would never be available, and the +single and two-parameter versions would indeed coincide for most practical purposes. + +But assuming `-language:strictEquality` immediately and everywhere poses migration problems which might well be unsurmountable. Consider again `contains`, which is in the standard library. Parameterizing it with the `Eql` type class as in (1) is an immediate win since it rules out non-sensical applications while still allowing all sensible ones. +So it can be done almost at any time, modulo binary compatibility concerns. +On the other hand, parameterizing `contains` with `Eql1` as in (2) would make `contains` +unusable for all types that have not yet declared an `Eql1` instance, including all +types coming from Java. This is clearly unacceptable. It would lead to a situation where, +rather than migrating existing libraries to use safe equality, the only upgrade path is to have parallel libraries, with the new version only catering to types deriving `Eql1` and the old version dealing with everything else. Such a split of the ecosystem would be very problematic, which means the cure is likely to be worse than the disease. + +For these reasons, it looks like a two-parameter type class is the only way forward because it can take the existing ecosystem where it is and migrate it towards a future where more and more code uses safe equality. + +In applications where `-language:strictEquality` is the default one could also introduce a one-parameter type alias such as +```scala +type Eq[-T] = Eql[T, T] +``` +Operations needing safe equality could then use this alias instead of the two-parameter `Eql` class. But it would only +work under `-language:strictEquality`, since otherwise the universal `Eq[Any]` instance would be available everywhere. + + +More on multiversal equality is found in a [blog post](http://www.scala-lang.org/blog/2016/05/06/multiversal-equality.html) +and a [GitHub issue](https://github.com/lampepfl/dotty/issues/1247). diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/query-types-spec.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/query-types-spec.md new file mode 100644 index 000000000000..174d14b94946 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/query-types-spec.md @@ -0,0 +1 @@ +The contents of this page have [moved](./context-functions-spec.md). diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/query-types.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/query-types.md new file mode 100644 index 000000000000..a1288ab659dd --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/query-types.md @@ -0,0 +1 @@ +The contents of this page have [moved](./context-functions.md). diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/relationship-implicits.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/relationship-implicits.md new file mode 100644 index 000000000000..b0a7295e74e5 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/relationship-implicits.md @@ -0,0 +1,204 @@ +--- +layout: doc-page +title: Relationship with Scala 2 Implicits +--- + +Many, but not all, of the new contextual abstraction features in Scala 3 can be mapped to Scala 2's implicits. This page gives a rundown on the relationships between new and old features. + +## Simulating Scala 3 Contextual Abstraction Concepts with Scala 2 Implicits + +### Given Instances + +Given instances can be mapped to combinations of implicit objects, classes and implicit methods. + + 1. Given instances without parameters are mapped to implicit objects. E.g., + + ```scala + given intOrd as Ord[Int] { ... } + ``` + + maps to + + ```scala + implicit object IntOrd extends Ord[Int] { ... } + ``` + + 2. Parameterized givens are mapped to combinations of classes and implicit methods. E.g., + + ```scala + given listOrd[T](using ord: Ord[T]) as Ord[List[T]] { ... } + ``` + + maps to + + ```scala + class ListOrd[T](implicit ord: Ord[T]) extends Ord[List[T]] { ... } + final implicit def ListOrd[T](implicit ord: Ord[T]): ListOrd[T] = new ListOrd[T] + ``` + + 3. Alias givens map to implicit methods or implicit lazy vals. If an alias has neither type nor context parameters, + it is treated as a lazy val, unless the right hand side is a simple reference, in which case we can use a forwarder to + that reference without caching it. + +Examples: + +```scala +given global as ExecutionContext = new ForkJoinContext() + +val ctx: Context +given Context = ctx +``` + +would map to + +```scala +final implicit lazy val global: ExecutionContext = new ForkJoinContext() +final implicit def given_Context = ctx +``` + +### Anonymous Given Instances + +Anonymous given instances get compiler synthesized names, which are generated in a reproducible way from the implemented type(s). For example, if the names of the `IntOrd` and `ListOrd` givens above were left out, the following names would be synthesized instead: + +```scala +given given_Ord_Int as Ord[Int] { ... } +given given_Ord_List_T[T](using ord: Ord[T]) as Ord[List[T]] { ... } +``` + +The synthesized type names are formed from + +1. the prefix `given_`, +2. the simple name(s) of the implemented type(s), leaving out any prefixes, +3. the simple name(s) of the toplevel argument type constructors to these types. + +Tuples are treated as transparent, i.e. a type `F[(X, Y)]` would get the synthesized name +`F_X_Y`. Directly implemented function types `A => B` are represented as `A_to_B`. Function types used as arguments to other type constructors are represented as `Function`. + +### Using Clauses + +Using clauses correspond largely to Scala-2's implicit parameter clauses. E.g. + +```scala +def max[T](x: T, y: T)(using ord: Ord[T]): T +``` + +would be written + +```scala +def max[T](x: T, y: T)(implicit ord: Ord[T]): T +``` + +in Scala 2. The main difference concerns applications of such parameters. +Explicit arguments to parameters of using clauses _must_ be written using `(using ...)`, +mirroring the definition syntax. E.g, `max(2, 3)(using IntOrd)`. +Scala 2 uses normal applications `max(2, 3)(IntOrd)` instead. The Scala 2 syntax has some inherent ambiguities and restrictions which are overcome by the new syntax. For instance, multiple implicit parameter lists are not available in the old syntax, even though they can be simulated using auxiliary objects in the "Aux" pattern. + +The `summon` method corresponds to `implicitly` in Scala 2. +It is precisely the same as the `the` method in Shapeless. +The difference between `summon` (or `the`) and `implicitly` is +that `summon` can return a more precise type than the type that was +asked for. + +### Context Bounds + +Context bounds are the same in both language versions. They expand to the respective forms of implicit parameters. + +**Note:** To ease migration, context bounds in Dotty map for a limited time to old-style implicit parameters for which arguments can be passed either in a using clause or +in a normal argument list. Once old-style implicits are deprecated, context bounds +will map to using clauses instead. + +### Extension Methods + +Extension methods have no direct counterpart in Scala 2, but they can be simulated with implicit classes. For instance, the extension method + +```scala +extension (c: Circle) def circumference: Double = c.radius * math.Pi * 2 +``` + +could be simulated to some degree by + +```scala +implicit class CircleDecorator(c: Circle) extends AnyVal { + def circumference: Double = c.radius * math.Pi * 2 +} +``` + +Abstract extension methods in traits that are implemented in given instances have no direct counterpart in Scala-2. The only way to simulate these is to make implicit classes available through imports. The Simulacrum macro library can automate this process in some cases. + +### Type class Derivation + +Type class derivation has no direct counterpart in the Scala 2 language. Comparable functionality can be achieved by macro-based libraries such as Shapeless, Magnolia, or scalaz-deriving. + +### Context Function Types + +Context function types have no analogue in Scala 2. + +### Implicit By-Name Parameters + +Implicit by-name parameters are not supported in Scala 2, but can be emulated to some degree by the `Lazy` type in Shapeless. + +## Simulating Scala 2 Implicits in Scala 3 + +### Implicit Conversions + +Implicit conversion methods in Scala 2 can be expressed as given instances of the `scala.Conversion` class in Dotty. E.g. instead of + +```scala +implicit def stringToToken(str: String): Token = new Keyword(str) +``` + +one can write + +```scala +given stringToToken as Conversion[String, Token] { + def apply(str: String): Token = KeyWord(str) +} +``` + +or + +```scala +given stringToToken as Conversion[String, Token] = KeyWord(_) +``` + +### Implicit Classes + +Implicit classes in Scala 2 are often used to define extension methods, which are directly supported in Dotty. Other uses of implicit classes can be simulated by a pair of a regular class and a given `Conversion` instance. + +### Implicit Values + +Implicit `val` definitions in Scala 2 can be expressed in Dotty using a regular `val` definition and an alias given. +E.g., Scala 2's + +```scala +lazy implicit val pos: Position = tree.sourcePos +``` + +can be expressed in Dotty as + +```scala +lazy val pos: Position = tree.sourcePos +given Position = pos +``` + +### Abstract Implicits + +An abstract implicit `val` or `def` in Scala 2 can be expressed in Dotty using a regular abstract definition and an alias given. E.g., Scala 2's + +```scala +implicit def symDecorator: SymDecorator +``` + +can be expressed in Dotty as + +```scala +def symDecorator: SymDecorator +given SymDecorator = symDecorator +``` + +## Implementation Status and Timeline + +The Dotty implementation implements both Scala-2's implicits and the new abstractions. In fact, support for Scala-2's implicits is an essential part of the common language subset between 2.13/2.14 and Dotty. +Migration to the new abstractions will be supported by making automatic rewritings available. + +Depending on adoption patterns, old style implicits might start to be deprecated in a version following Scala 3.0. diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/type-classes.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/type-classes.md new file mode 100644 index 000000000000..412ef7834103 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/type-classes.md @@ -0,0 +1,281 @@ +--- +layout: doc-page +title: "Implementing Type classes" +--- + +A _type class_ is an abstract, parameterized type that lets you add new behavior to any closed data type without using sub-typing. This can be useful in multiple use-cases, for example: + +* expressing how a type you don't own (from the standard or 3rd-party library) conforms to such behavior +* expressing such a behavior for multiple types without involving sub-typing relationships (one `extends` another) between those types (see: [ad hoc polymorphism](https://en.wikipedia.org/wiki/Ad_hoc_polymorphism) for instance) + +Therefore in Scala 3, _type classes_ are just _traits_ with one or more parameters whose implementations are not defined through the `extends` keyword, but by **given instances**. +Here are some examples of common type classes: + +### Semigroups and monoids + +Here's the `Monoid` type class definition: + +```scala +trait SemiGroup[T]: + extension (x: T) def combine (y: T): T + +trait Monoid[T] extends SemiGroup[T]: + def unit: T +``` + +An implementation of this `Monoid` type class for the type `String` can be the following: + +```scala +given Monoid[String]: + extension (x: String) def combine (y: String): String = x.concat(y) + def unit: String = "" +``` + +Whereas for the type `Int` one could write the following: + +```scala +given Monoid[Int]: + extension (x: Int) def combine (y: Int): Int = x + y + def unit: Int = 0 +``` + +This monoid can now be used as _context bound_ in the following `combineAll` method: + +```scala +def combineAll[T: Monoid](xs: List[T]): T = + xs.foldLeft(summon[Monoid[T]].unit)(_.combine(_)) +``` + +To get rid of the `summon[...]` we can define a `Monoid` object as follows: + +```scala +object Monoid: + def apply[T](using m: Monoid[T]) = m +``` + +Which would allow to re-write the `combineAll` method this way: + +```scala +def combineAll[T: Monoid](xs: List[T]): T = + xs.foldLeft(Monoid[T].unit)(_.combine(_)) +``` + +### Functors + +A `Functor` for a type provides the ability for its values to be "mapped over", i.e. apply a function that transforms inside a value while remembering its shape. For example, to modify every element of a collection without dropping or adding elements. +We can represent all types that can be "mapped over" with `F`. It's a type constructor: the type of its values becomes concrete when provided a type argument. +Therefore we write it `F[_]`, hinting that the type `F` takes another type as argument. +The definition of a generic `Functor` would thus be written as: + +```scala +trait Functor[F[_]]: + def map[A, B](x: F[A], f: A => B): F[B] +``` + +Which could read as follows: "A `Functor` for the type constructor `F[_]` represents the ability to transform `F[A]` to `F[B]` through the application of function `f` with type `A => B`". We call the `Functor` definition here a _type class_. +This way, we could define an instance of `Functor` for the `List` type: + +```scala +given Functor[List]: + def map[A, B](x: List[A], f: A => B): List[B] = + x.map(f) // List already has a `map` method +``` + +With this `given` instance in scope, everywhere a `Functor` is expected, the compiler will accept a `List` to be used. + +For instance, we may write such a testing method: + +```scala +def assertTransformation[F[_]: Functor, A, B](expected: F[B], original: F[A], mapping: A => B): Unit = + assert(expected == summon[Functor[F]].map(original, mapping)) +``` + +And use it this way, for example: + +```scala +assertTransformation(List("a1", "b1"), List("a", "b"), elt => s"${elt}1") +``` + +That's a first step, but in practice we probably would like the `map` function to be a method directly accessible on the type `F`. So that we can call `map` directly on instances of `F`, and get rid of the `summon[Functor[F]]` part. +As in the previous example of Monoids, [`extension` methods](extension-methods.html) help achieving that. Let's re-define the `Functor` type class with extension methods. + +```scala +trait Functor[F[_]]: + extension [A, B](x: F[A]) + def map(f: A => B): F[B] +``` + +The instance of `Functor` for `List` now becomes: + +```scala +given Functor[List]: + extension [A, B](xs: List[A]) + def map(f: A => B): List[B] = + xs.map(f) // List already has a `map` method + +``` + +It simplifies the `assertTransformation` method: + +```scala +def assertTransformation[F[_]: Functor, A, B](expected: F[B], original: F[A], mapping: A => B): Unit = + assert(expected == original.map(mapping)) +``` + +The `map` method is now directly used on `original`. It is available as an extension method +since `original`'s type is `F[A]` and a given instance for `Functor[F[A]]` which defines `map` +is in scope. + +### Monads + +Applying `map` in `Functor[List]` to a mapping function of type `A => B` results in a `List[B]`. So applying it to a mapping function of type `A => List[B]` results in a `List[List[B]]`. To avoid managing lists of lists, we may want to "flatten" the values in a single list. + +That's where `Monad` comes in. A `Monad` for type `F[_]` is a `Functor[F]` with two more operations: + +* `flatMap`, which turns an `F[A]` into an `F[B]` when given a function of type `A => F[B]`, +* `pure`, which creates an `F[A]` from a single value `A`. + +Here is the translation of this definition in Scala 3: + +```scala +trait Monad[F[_]] extends Functor[F]: + + /** The unit value for a monad */ + def pure[A](x: A): F[A] + + extension [A, B](x: F[A]) + /** The fundamental composition operation */ + def flatMap(f: A => F[B]): F[B] + + /** The `map` operation can now be defined in terms of `flatMap` */ + def map(f: A => B) = x.flatMap(f.andThen(pure)) + +end Monad +``` + +#### List + +A `List` can be turned into a monad via this `given` instance: + +```scala +given listMonad as Monad[List]: + def pure[A](x: A): List[A] = + List(x) + extension [A, B](xs: List[A]) + def flatMap(f: A => List[B]): List[B] = + xs.flatMap(f) // rely on the existing `flatMap` method of `List` +``` + +Since `Monad` is a subtype of `Functor`, `List` is also a functor. The Functor's `map` +operation is already provided by the `Monad` trait, so the instance does not need to define +it explicitly. + +#### Option + +`Option` is an other type having the same kind of behaviour: + +```scala +given optionMonad as Monad[Option]: + def pure[A](x: A): Option[A] = + Option(x) + extension [A, B](xo: Option[A]) + def flatMap(f: A => Option[B]): Option[B] = xo match + case Some(x) => f(x) + case None => None +``` + +#### Reader + +Another example of a `Monad` is the _Reader_ Monad, which acts on functions instead of +data types like `List` or `Option`. It can be used to combine multiple functions +that all need the same parameter. For instance multiple functions needing access to some configuration, context, environment variables, etc. + +Let's define a `Config` type, and two functions using it: + +```scala +trait Config +// ... +def compute(i: Int)(config: Config): String = ??? +def show(str: String)(config: Config): Unit = ??? +``` + +We may want to combine `compute` and `show` into a single function, accepting a `Config` as parameter, and showing the result of the computation, and we'd like to use +a monad to avoid passing the parameter explicitly multiple times. So postulating +the right `flatMap` operation, we could write: + +```scala +def computeAndShow(i: Int): Config => Unit = compute(i).flatMap(show) +``` + +instead of + +```scala +show(compute(i)(config))(config) +``` + +Let's define this m then. First, we are going to define a type named `ConfigDependent` representing a function that when passed a `Config` produces a `Result`. + +```scala +type ConfigDependent[Result] = Config => Result +``` + +The monad instance will look like this: + +```scala +given configDependentMonad as Monad[ConfigDependent]: + + def pure[A](x: A): ConfigDependent[A] = + config => x + + extension [A, B](x: ConfigDependent[A]) + def flatMap(f: A => ConfigDependent[B]): ConfigDependent[B] = + config => f(x(config))(config) + +end configDependentMonad +``` + +The type `ConfigDependent` can be written using [type lambdas](../new-types/type-lambdas.html): + +```scala +type ConfigDependent = [Result] =>> Config => Result +``` + +Using this syntax would turn the previous `configDependentMonad` into: + +```scala +given configDependentMonad as Monad[[Result] =>> Config => Result] + + def pure[A](x: A): Config => A = + config => x + + extension [A, B](x: Config => A) + def flatMap(f: A => Config => B): Config => B = + config => f(x(config))(config) + +end configDependentMonad +``` + +It is likely that we would like to use this pattern with other kinds of environments than our `Config` trait. The Reader monad allows us to abstract away `Config` as a type _parameter_, named `Ctx` in the following definition: + +```scala +given readerMonad[Ctx] as Monad[[X] =>> Ctx => X]: + + def pure[A](x: A): Ctx => A = + ctx => x + + extension [A, B](x: Ctx => A) + def flatMap(f: A => Ctx => B): Ctx => B = + ctx => f(x(ctx))(ctx) + +end readerMonad +``` + +### Summary + +The definition of a _type class_ is expressed with a parameterised type with abstract members, such as a `trait`. +The main difference between subtype polymorphism and ad-hoc polymorphism with _type classes_ is how the definition of the _type class_ is implemented, in relation to the type it acts upon. +In the case of a _type class_, its implementation for a concrete type is expressed through a `given` instance definition, which is supplied as an implicit argument alongside the value it acts upon. With subtype polymorphism, the implementation is mixed into the parents of a class, and only a single term is required to perform a polymorphic operation. The type class solution +takes more effort to set up, but is more extensible: Adding a new interface to a +class requires changing the source code of that class. But contrast, instances for type classes can be defined anywhere. + +To conclude, we have seen that traits and given instances, combined with other constructs like extension methods, context bounds and type lambdas allow a concise and natural expression of _type classes_. diff --git a/scala3doc/dotty-docs/docs/docs/reference/contextual/using-clauses.md b/scala3doc/dotty-docs/docs/docs/reference/contextual/using-clauses.md new file mode 100644 index 000000000000..7f494f7b5845 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/contextual/using-clauses.md @@ -0,0 +1,106 @@ +--- +layout: doc-page +title: "Using Clauses" +--- + +Functional programming tends to express most dependencies as simple function parameterization. +This is clean and powerful, but it sometimes leads to functions that take many parameters where the same value is passed over and over again in long call chains to many +functions. Context parameters can help here since they enable the compiler to synthesize +repetitive arguments instead of the programmer having to write them explicitly. + +For example, with the [given instances](./givens.md) defined previously, +a maximum function that works for any arguments for which an ordering exists can be defined as follows: +```scala +def max[T](x: T, y: T)(using ord: Ord[T]): T = + if ord.compare(x, y) < 0 then y else x +``` +Here, `ord` is a _context parameter_ introduced with a `using` clause. +The `max` method can be applied as follows: +```scala +max(2, 3)(using intOrd) +``` +The `(using intOrd)` part passes `intOrd` as an argument for the `ord` parameter. But the point of context parameters is that this argument can also be left out (and it usually is). So the following applications are equally valid: +```scala +max(2, 3) +max(List(1, 2, 3), Nil) +``` + +## Anonymous Context Parameters + +In many situations, the name of a context parameter need not be +mentioned explicitly at all, since it is used only in synthesized arguments for +other context parameters. In that case one can avoid defining a parameter name +and just provide its type. Example: +```scala +def maximum[T](xs: List[T])(using Ord[T]): T = + xs.reduceLeft(max) +``` +`maximum` takes a context parameter of type `Ord` only to pass it on as an +inferred argument to `max`. The name of the parameter is left out. + +Generally, context parameters may be defined either as a full parameter list `(p_1: T_1, ..., p_n: T_n)` or just as a sequence of types `T_1, ..., T_n`. Vararg parameters are not supported in using clauses. + +## Inferring Complex Arguments + +Here are two other methods that have a context parameter of type `Ord[T]`: +```scala +def descending[T](using asc: Ord[T]): Ord[T] = new Ord[T] { + def compare(x: T, y: T) = asc.compare(y, x) +} + +def minimum[T](xs: List[T])(using Ord[T]) = + maximum(xs)(using descending) +``` +The `minimum` method's right hand side passes `descending` as an explicit argument to `maximum(xs)`. +With this setup, the following calls are all well-formed, and they all normalize to the last one: +```scala +minimum(xs) +maximum(xs)(using descending) +maximum(xs)(using descending(using listOrd)) +maximum(xs)(using descending(using listOrd(using intOrd))) +``` + +## Multiple Using Clauses + +There can be several using clauses in a definition and using clauses can be freely mixed with normal parameter clauses. Example: +```scala +def f(u: Universe)(using ctx: u.Context)(using s: ctx.Symbol, k: ctx.Kind) = ... +``` +Multiple using clauses are matched left-to-right in applications. Example: +```scala +object global extends Universe { type Context = ... } +given ctx as global.Context { type Symbol = ...; type Kind = ... } +given sym as ctx.Symbol +given kind as ctx.Kind +``` +Then the following calls are all valid (and normalize to the last one) +```scala +f(global) +f(global)(using ctx) +f(global)(using ctx)(using sym, kind) +``` +But `f(global)(using sym, kind)` would give a type error. + + +## Summoning Instances + +The method `summon` in `Predef` returns the given of a specific type. For example, +the given instance for `Ord[List[Int]]` is produced by +```scala +summon[Ord[List[Int]]] // reduces to listOrd(using intOrd) +``` +The `summon` method is simply defined as the (non-widening) identity function over a context parameter. +```scala +def summon[T](using x: T): x.type = x +``` + +## Syntax + +Here is the new syntax of parameters and arguments seen as a delta from the [standard context free syntax of Scala 3](../../internals/syntax.md). `using` is a soft keyword, recognized only at the start of a parameter or argument list. It can be used as a normal identifier everywhere else. +``` +ClsParamClause ::= ... | UsingClsParamClause +DefParamClauses ::= ... | UsingParamClause +UsingClsParamClause ::= ‘(’ ‘using’ (ClsParams | Types) ‘)’ +UsingParamClause ::= ‘(’ ‘using’ (DefParams | Types) ‘)’ +ParArgumentExprs ::= ... | ‘(’ ‘using’ ExprsInParens ‘)’ +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/auto-apply.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/auto-apply.md new file mode 100644 index 000000000000..29bed8a7fe20 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/auto-apply.md @@ -0,0 +1,84 @@ +--- +layout: doc-page +title: "Dropped: Auto-Application" +--- + +Previously an empty argument list `()` was implicitly inserted when +calling a nullary method without arguments. E.g. +```scala +def next(): T = ... +next // is expanded to next() +``` +In Dotty, this idiom is an error. +```scala +next +^ +missing arguments for method next +``` +In Dotty, the application syntax has to follow exactly the parameter +syntax. Excluded from this rule are methods that are defined in Java +or that override methods defined in Java. The reason for being more +lenient with such methods is that otherwise everyone would have to +write +```scala +xs.toString().length() +``` +instead of +```scala +xs.toString.length +``` +The latter is idiomatic Scala because it conforms to the _uniform +access principle_. This principle states that one should be able to +change an object member from a field to a non-side-effecting method +and back without affecting clients that access the +member. Consequently, Scala encourages to define such "property" +methods without a `()` parameter list whereas side-effecting methods +should be defined with it. Methods defined in Java cannot make this +distinction; for them a `()` is always mandatory. So Scala fixes the +problem on the client side, by allowing the parameterless references. +But where Scala allows that freedom for all method references, Dotty +restricts it to references of external methods that are not defined +themselves in Dotty. + +For reasons of backwards compatibility, Dotty for the moment also +auto-inserts `()` for nullary methods that are defined in Scala 2, or +that override a method defined in Scala 2. It turns out that, because +the correspondence between definition and call was not enforced in +Scala so far, there are quite a few method definitions in Scala 2 +libraries that use `()` in an inconsistent way. For instance, we +find in `scala.math.Numeric` +```scala +def toInt(): Int +``` +whereas `toInt` is written without parameters everywhere +else. Enforcing strict parameter correspondence for references to +such methods would project the inconsistencies to client code, which +is undesirable. So Dotty opts for more leniency when type-checking +references to such methods until most core libraries in Scala 2 have +been cleaned up. + +Stricter conformance rules also apply to overriding of nullary +methods. It is no longer allowed to override a parameterless method +by a nullary method or _vice versa_. Instead, both methods must agree +exactly in their parameter lists. +```scala +class A { + def next(): Int +} +class B extends A { + def next: Int // overriding error: incompatible type +} +``` +Methods overriding Java or Scala-2 methods are again exempted from this +requirement. + +### Migrating code + +Existing Scala code with inconsistent parameters can still be compiled +in Dotty under `-source 3.0-migration`. When paired with the `-rewrite` +option, the code will be automatically rewritten to conform to Dotty's +stricter checking. + +### Reference + +For more info, see [Issue #2570](https://github.com/lampepfl/dotty/issues/2570) and [PR #2716](https://github.com/lampepfl/dotty/pull/2716). diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/class-shadowing-spec.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/class-shadowing-spec.md new file mode 100644 index 000000000000..84e4cfdba9ce --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/class-shadowing-spec.md @@ -0,0 +1,25 @@ +--- +layout: doc-page +title: "Dropped: Class Shadowing - More Details" +--- + +Spec diff: in section [5.1.4 Overriding](https://www.scala-lang.org/files/archive/spec/2.12/05-classes-and-objects.html), add *M' must not be a class*. + +> Why do we want to make this change to the language? + +Class shadowing is irregular compared to other types of overrides. Indeed, inner classes are not actually overriden but simply shadowed. + + +> How much existing code is going to be affected? + +From all the code compiled so far with Dotty the only instance of this I could find is in the stdlib. Looking at [this commit](https://github.com/lampepfl/scala/commit/68f13bf39979b631ed211ec1751934306ceb5d6c#diff-7aa508b70e055b47c823764e3e5646b8) it seems like the usage of class shadowing was accidental. + + +> How exactly is existing code going to be affected? + +Code that relies on overridden inner classes will stop compiling. + + +> Is this change going to be migratable automatically? + +No. diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/class-shadowing.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/class-shadowing.md new file mode 100644 index 000000000000..721a4d18139d --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/class-shadowing.md @@ -0,0 +1,27 @@ +--- +layout: doc-page +title: Dropped: Class Shadowing +--- + +Scala so far allowed patterns like this: +```scala +class Base { + class Ops { ... } +} + +class Sub extends Base { + class Ops { ... } +} +``` +Dotty rejects this with the error message: +```scala +6 | class Ops { } + | ^ + |class Ops cannot have the same name as class Ops in class Base -- class definitions cannot be overridden +``` +The issue is that the two `Ops` classes _look_ like one overrides the +other, but classes in Scala cannot be overridden. To keep things clean +(and its internal operations consistent) the Dotty compiler forces you +to rename the inner classes so that their names are different. + +[More details](./class-shadowing-spec.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/delayed-init.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/delayed-init.md new file mode 100644 index 000000000000..d38c84e6222f --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/delayed-init.md @@ -0,0 +1,26 @@ +--- +layout: doc-page +title: Dropped: Delayedinit +--- + +The special handling of the `DelayedInit` trait is no longer +supported. + +One consequence is that the `App` class, which used `DelayedInit` is +now partially broken. You can still use `App` for an easy and concise +way to set up a main program. Example: +```scala +object HelloWorld extends App { + println("Hello, world!") +} +``` +However, the code is now run in the initializer of the object, which on +some JVM's means that it will only be interpreted. So, better not use it +for benchmarking! Also, if you want to access the command line arguments, +you need to use an explicit `main` method for that. +```scala +object Hello { + def main(args: Array[String]) = + println(s"Hello, ${args(0)}") +} +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/do-while.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/do-while.md new file mode 100644 index 000000000000..6f76207208c6 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/do-while.md @@ -0,0 +1,49 @@ +--- +layout: doc-page +title: Dropped: Do-While +--- + +The syntax construct +```scala +do while +``` +is no longer supported. Instead, it is recommended to use the equivalent `while` loop +below: +```scala +while ({ ; }) () +``` +For instance, instead of +```scala +do + i += 1 +while (f(i) == 0) +``` +one writes +```scala +while ({ + i += 1 + f(i) == 0 +}) () +``` +Under the [new syntax rules](../other-new-features/control-syntax), this code can be written also without the awkward `({...})` bracketing like this: +```scala +while { + i += 1 + f(i) == 0 +} do () +``` +The idea to use a block as the condition of a while also gives a solution +to the "loop-and-a-half" problem. For instance: +```scala +while { + val x: Int = iterator.next + x >= 0 +} do print(".") +``` + +### Why Drop The Construct? + + - `do-while` is used relatively rarely and it can expressed faithfully using just while. So there seems to be little point in having it as a separate syntax construct. + - Under the [new syntax rules](../other-new-features/control-syntax) `do` is used + as a statement continuation, which would clash with its meaning as a statement + introduction. diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/early-initializers.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/early-initializers.md new file mode 100644 index 000000000000..91dc3f1946f5 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/early-initializers.md @@ -0,0 +1,11 @@ +--- +layout: doc-page +title: Dropped: Early Initializers +--- + +Early initializers of the form +```scala +class C extends { ... } with SuperClass ... +``` +have been dropped. They were rarely used, and mostly to compensate for the lack of +[trait parameters](../other-new-features/trait-parameters.md), which are now directly supported in Dotty. diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/existential-types.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/existential-types.md new file mode 100644 index 000000000000..3a9d638adeff --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/existential-types.md @@ -0,0 +1,32 @@ +--- +layout: doc-page +title: Dropped: Existential Types +--- + +Existential types using `forSome` have been dropped. The reasons for dropping them were: + + - Existential types violate a type soundness principle on which DOT + and Dotty are constructed. That principle says that every + prefix (`p`, respectvely `S`) of a type selection `p.T` or `S#T` + must either come from a value constructed at runtime or refer to a + type that is known to have only good bounds. + + - Existential types create many difficult feature interactions + with other Scala constructs. + + - Existential types largely overlap with path-dependent types, + so the gain of having them is relatively minor. + +Existential types that can be expressed using only wildcards (but not +`forSome`) are still supported, but are treated as refined types. +For instance, the type +```scala +Map[_ <: AnyRef, Int] +``` +is treated as the type `Map`, where the first type parameter +is upper-bounded by `AnyRef` and the second type parameter is an alias +of `Int`. + +When reading classfiles compiled with _scalac_, Dotty will do a best +effort to approximate existential types with its own types. It will +issue a warning that a precise emulation is not possible. diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/limit22.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/limit22.md new file mode 100644 index 000000000000..ed1fe7e44435 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/limit22.md @@ -0,0 +1,14 @@ +--- +layout: doc-page +title: Dropped: Limit 22 +--- + +The limits of 22 for the maximal number of parameters of function types +and the maximal number of fields in tuple types have been dropped. + +Functions can now have an arbitrary number of +parameters. Functions beyond Function22 are erased to a new trait +`scala.FunctionXXL` and tuples beyond Tuple22 are erased to a new trait `scala.TupleXXL`. +Both of these are implemented using arrays. + +Tuples can also have an arbitrary number of fields. Furthermore, they support generic operation such as concatenation and indexing. diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/macros.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/macros.md new file mode 100644 index 000000000000..8cd8861ebad2 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/macros.md @@ -0,0 +1,13 @@ +--- +layout: doc-page +title: Dropped: Scala 2 Macros +--- + +The previous, experimental macro system has been dropped. Instead, there is a cleaner, more restricted system based on two complementary concepts: `inline` and `'{ ... }`/`${ ... }` code generation. +`'{ ... }` delays the compilation of the code and produces an object containing the code, dually `${ ... }` evaluates an expression which produces code and inserts it in the surrounding `${ ... }`. +In this setting, a definition marked as inlined containing a `${ ... }` is a macro, the code inside the `${ ... }` is executed at compile-time and produces code in the form of `'{ ... }`. +Additionally, the contents of code can be inspected and created with a more complex reflection API (TASTy Reflect) as an extension of `'{ ... }`/`${ ... }` framework. + +* `inline` has been [implemented](../metaprogramming/inline.md) in Dotty. +* Quotes `'{ ... }` and splices `${ ... }` has been [implemented](../metaprogramming/macros.md) in Dotty. + * [TASTy reflect](../metaprogramming/tasty-reflect.md) provides more complex tree based APIs to inspect or create quoted code. diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/nonlocal-returns.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/nonlocal-returns.md new file mode 100644 index 000000000000..075eef088f75 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/nonlocal-returns.md @@ -0,0 +1,14 @@ +--- +layout: doc-page +title: Deprecated: Nonlocal Returns +--- + +Returning from nested anonymous functions has been deprecated. Nonlocal returns are implemented by throwing and catching `scala.runtime.NonLocalReturnException`-s. This is rarely what is intended by the programmer. It can be problematic because of the hidden performance cost of throwing and catching exceptions. Furthermore, it is a leaky implementation: a catch-all exception handler can intercept a `NonLocalReturnException`. + +A drop-in library replacement is provided in `scala.util.control.NonLocalReturns`: + +```scala +import scala.util.control.NonLocalReturns._ + +returning { ... throwReturn(x) ... } +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/package-objects.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/package-objects.md new file mode 100644 index 000000000000..beb304a48b16 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/package-objects.md @@ -0,0 +1,47 @@ +--- +layout: doc-page +title: "Dropped: Package Objects" +--- + +Package objects +```scala +package object p { + val a = ... + def b = ... +} +``` +will be dropped. They are still available in Scala 3.0, but will be deprecated and removed afterwards. + +Package objects are no longer needed since all kinds of definitions can now be written at the top-level. E.g. +```scala +package p +type Labelled[T] = (String, T) +val a: Labelled[Int] = ("count", 1) +def b = a._2 + +case class C() + +implicit object Cops { + extension (x: C) def pair(y: C) = (x, y) +} +``` +There may be several source files in a package containing such toplevel definitions, and source files can freely mix toplevel value, method, and type definitions with classes and objects. + +The compiler generates synthetic objects that wrap toplevel definitions falling into one of the following categories: + + - all pattern, value, method, and type definitions, + - implicit classes and objects, + - companion objects of opaque type aliases. + +If a source file `src.scala` contains such toplevel definitions, they will be put in a synthetic object named `src$package`. The wrapping is transparent, however. The definitions in `src` can still be accessed as members of the enclosing package. + +**Note 1:** This means that the name of a source file containing wrapped toplevel definitions is relevant for binary compatibility. If the name changes, so does the name of the generated object and its class. + +**Note 2:** A toplevel main method `def main(args: Array[String]): Unit = ...` is wrapped as any other method. If it appears +in a source file `src.scala`, it could be invoked from the command line using a command like `scala src$package`. Since the +"program name" is mangled it is recommended to always put `main` methods in explicitly named objects. + +**Note 3:** The notion of `private` is independent of whether a definition is wrapped or not. A `private` toplevel definition is always visible from everywhere in the enclosing package. + +**Note 4:** If several toplevel definitions are overloaded variants with the same name, +they must all come from the same source file. diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/procedure-syntax.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/procedure-syntax.md new file mode 100644 index 000000000000..9a5416e37db4 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/procedure-syntax.md @@ -0,0 +1,18 @@ +--- +layout: doc-page +title: Dropped: Procedure Syntax +--- + +Procedure syntax +```scala +def f() { ... } +``` +has been dropped. You need to write one of the following instead: +```scala +def f() = { ... } +def f(): Unit = { ... } +``` +Dotty will accept the old syntax under the `-source:3.0-migration` option. +If the `-migration` option is set, it can even rewrite old syntax to new. +The [ScalaFix](https://scalacenter.github.io/scalafix/) tool also +can rewrite procedure syntax to make it Dotty-compatible. diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/symlits.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/symlits.md new file mode 100644 index 000000000000..211ceab1aef0 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/symlits.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: Dropped: Symbol Literals +--- + +Symbol literals are no longer supported. The `scala.Symbol` class still exists, so a +literal translation of the symbol literal `'xyz` is `Symbol("xyz")`. However, it is recommended to use a plain string literal `"xyz"` instead. (The `Symbol` class will be deprecated and removed in the future). diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/this-qualifier.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/this-qualifier.md new file mode 100644 index 000000000000..4e50048b8d76 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/this-qualifier.md @@ -0,0 +1,14 @@ +--- +layout: doc-page +title: Dropped: private[this] and protected[this] +--- + +The `private[this]` and `protected[this]` access modifiers are deprecated and will be phased out. + +Previously, these modifiers were needed + + - for avoiding the generation of getters and setters + - for excluding code under a `private[this]` from variance checks. (Scala 2 also excludes `protected[this]` but this was found to be unsound and was therefore removed). + +The compiler now infers for `private` members the fact that they are only accessed via `this`. Such members are treated as if they had been declared `private[this]`. `protected[this]` is dropped without a replacement. + diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/type-projection.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/type-projection.md new file mode 100644 index 000000000000..ccc4259e2230 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/type-projection.md @@ -0,0 +1,17 @@ +--- +layout: doc-page +title: Dropped: General Type Projection +--- + +Scala so far allowed general type projection `T#A` where `T` is an arbitrary type +and `A` names a type member of `T`. + +Dotty disallows this if `T` is an abstract type (class types and type aliases +are fine). This change was made because unrestricted type projection +is [unsound](https://github.com/lampepfl/dotty/issues/1050). + +This restriction rules out the [type-level encoding of a combinator +calculus](https://michid.wordpress.com/2010/01/29/scala-type-level-encoding-of-the-ski-calculus/). + +To rewrite code using type projections on abstract types, consider using +path-dependent types or implicit parameters. diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/weak-conformance-spec.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/weak-conformance-spec.md new file mode 100644 index 000000000000..50dafcb5d946 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/weak-conformance-spec.md @@ -0,0 +1,48 @@ +--- +layout: doc-page +title: Dropped: Weak Conformance - More Details +--- + +To simplify the underlying type theory, Dotty drops the notion of weak +conformance altogether. Instead, it provides more flexibility when +assigning a type to a constant expression. The new rule is: + + - If a list of expressions `Es` appears as one of + + - the elements of a vararg parameter, or + - the alternatives of an if-then-else or match expression, or + - the body and catch results of a try expression, + + + and all expressions have primitive numeric types, but they do not + all have the same type, then the following is attempted: + + - the expressions `Es` are partitioned into `Int` constants on the + one hand, and all other expressions on the other hand, + - if all the other expressions have the same numeric type `T` + (which can be one of `Byte`, `Short`, `Char`, `Int`, `Long`, `Float`, + `Double`), possibly after widening, and if none of the `Int` + literals would incur a loss of precision when converted to `T`, + then they are thus converted (the other expressions are left + unchanged regardless), + - otherwise, the expressions `Es` are used unchanged. + + A loss of precision occurs for an `Int -> Float` conversion of a constant + `c` if `c.toFloat.toInt != c`. For an `Int -> Byte` conversion it occurs + if `c.toByte.toInt != c`. For an `Int -> Short` conversion, it occurs + if `c.toShort.toInt != c`. + +### Examples + +```scala +inline val b = 33 +def f(): Int = b + 1 +Array(b, 33, 5.5) : Array[Double] // b is an inline val +Array(f(), 33, 5.5) : Array[AnyVal] // f() is not a constant +Array(5, 11L) : Array[Long] +Array(5, 11L, 5.5) : Array[AnyVal] // Long and Double found +Array(1.0f, 2) : Array[Float] +Array(1.0f, 1234567890): Array[AnyVal] // loss of precision +Array(b, 33, 'a') : Array[Char] +Array(5.toByte, 11) : Array[Byte] +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/weak-conformance.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/weak-conformance.md new file mode 100644 index 000000000000..2ed20cd07af1 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/weak-conformance.md @@ -0,0 +1,46 @@ +--- +layout: doc-page +title: Dropped: Weak Conformance +--- + +In some situations, Scala used a _weak conformance_ relation when +testing type compatibility or computing the least upper bound of a set +of types. The principal motivation behind weak conformance was to +make an expression like this have type `List[Double]`: + +```scala +List(1.0, math.sqrt(3.0), 0, -3.3) // : List[Double] +``` + +It's "obvious" that this should be a `List[Double]`. However, without +some special provision, the least upper bound of the lists's element +types `(Double, Double, Int, Double)` would be `AnyVal`, hence the list +expression would be given type `List[AnyVal]`. + +A less obvious example is the following one, which was also typed as a +`List[Double]`, using the weak conformance relation. + +```scala +val n: Int = 3 +val c: Char = 'X' +val d: Double = math.sqrt(3.0) +List(n, c, d) // used to be: List[Double], now: List[AnyVal] +``` + +Here, it is less clear why the type should be widened to +`List[Double]`, a `List[AnyVal]` seems to be an equally valid -- and +more principled -- choice. + +Weak conformance applies to all "numeric" types (including `Char`), and +independently of whether the expressions are literals or not. However, +in hindsight, the only intended use case is for *integer literals* to +be adapted to the type of the other expressions. Other types of numerics +have an explicit type annotation embedded in their syntax (`f`, `d`, +`.`, `L` or `'` for `Char`s) which ensures that their author really +meant them to have that specific type). + +Therefore, Dotty drops the general notion of weak conformance, and +instead keeps one rule: `Int` literals are adapted to other numeric +types if necessary. + +[More details](weak-conformance-spec.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/dropped-features/xml.md b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/xml.md new file mode 100644 index 000000000000..8e8769336e53 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/dropped-features/xml.md @@ -0,0 +1,10 @@ +--- +layout: doc-page +title: Dropped: XML Literals +--- + +XML Literals are still supported, but will be dropped in the near future, to +be replaced with XML string interpolation: +```scala +xml""" ... """ +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/enums/adts.md b/scala3doc/dotty-docs/docs/docs/reference/enums/adts.md new file mode 100644 index 000000000000..8e05280384b4 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/enums/adts.md @@ -0,0 +1,116 @@ +--- +layout: doc-page +title: "Algebraic Data Types" +--- + +The [`enum` concept](./enums.html) is general enough to also support algebraic data +types (ADTs) and their generalized version (GADTs). Here is an example +how an `Option` type can be represented as an ADT: + +```scala +enum Option[+T] { + case Some(x: T) + case None +} +``` + +This example introduces an `Option` enum with a covariant type +parameter `T` consisting of two cases, `Some` and `None`. `Some` is +parameterized with a value parameter `x`. It is a shorthand for writing a +case class that extends `Option`. Since `None` is not parameterized, it +is treated as a normal enum value. + +The `extends` clauses that were omitted in the example above can also +be given explicitly: + +```scala +enum Option[+T] { + case Some(x: T) extends Option[T] + case None extends Option[Nothing] +} +``` + +Note that the parent type of the `None` value is inferred as +`Option[Nothing]`. Generally, all covariant type parameters of the enum +class are minimized in a compiler-generated extends clause whereas all +contravariant type parameters are maximized. If `Option` was non-variant, +you would need to give the extends clause of `None` explicitly. + +As for normal enum values, the cases of an `enum` are all defined in +the `enum`s companion object. So it's `Option.Some` and `Option.None` +unless the definitions are "pulled out" with an import: + +```scala +scala> Option.Some("hello") +val res1: t2.Option[String] = Some(hello) + +scala> Option.None +val res2: t2.Option[Nothing] = None +``` + +Note that the type of the expressions above is always `Option`. That +is, the implementation case classes are not visible in the result +types of their `apply` methods. This is a subtle difference with +respect to normal case classes. The classes making up the cases do +exist, and can be unveiled by constructing them directly with a `new`. + +```scala +scala> new Option.Some(2) +val res3: t2.Option.Some[Int] = Some(2) +``` + +As all other enums, ADTs can define methods. For instance, here is `Option` again, with an +`isDefined` method and an `Option(...)` constructor in its companion object. + +```scala +enum Option[+T] { + case Some(x: T) + case None + + def isDefined: Boolean = this match { + case None => false + case some => true + } +} +object Option { + def apply[T >: Null](x: T): Option[T] = + if (x == null) None else Some(x) +} +``` + +Enumerations and ADTs have been presented as two different +concepts. But since they share the same syntactic construct, they can +be seen simply as two ends of a spectrum and it is perfectly possible +to construct hybrids. For instance, the code below gives an +implementation of `Color` either with three enum values or with a +parameterized case that takes an RGB value. + +```scala +enum Color(val rgb: Int) { + case Red extends Color(0xFF0000) + case Green extends Color(0x00FF00) + case Blue extends Color(0x0000FF) + case Mix(mix: Int) extends Color(mix) +} +``` + +### Syntax of Enums + +Changes to the syntax fall in two categories: enum definitions and cases inside enums. +The changes are specified below as deltas with respect to the Scala syntax given [here](../../internals/syntax.md) + + 1. Enum definitions are defined as follows: + ``` + TmplDef ::= `enum' EnumDef + EnumDef ::= id ClassConstr [`extends' [ConstrApps]] EnumBody + EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ + EnumStat ::= TemplateStat + | {Annotation [nl]} {Modifier} EnumCase + ``` + 2. Cases of enums are defined as follows: + ``` + EnumCase ::= `case' (id ClassConstr [`extends' ConstrApps]] | ids) + ``` +### Reference + +For more info, see [Issue #1970](https://github.com/lampepfl/dotty/issues/1970). diff --git a/scala3doc/dotty-docs/docs/docs/reference/enums/desugarEnums.md b/scala3doc/dotty-docs/docs/docs/reference/enums/desugarEnums.md new file mode 100644 index 000000000000..3070602c6c11 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/enums/desugarEnums.md @@ -0,0 +1,208 @@ +--- +layout: doc-page +title: "Translation of Enums and ADTs" +--- + +The compiler expands enums and their cases to code that only uses +Scala's other language features. As such, enums in Scala are +convenient _syntactic sugar_, but they are not essential to understand +Scala's core. + +We now explain the expansion of enums in detail. First, +some terminology and notational conventions: + + - We use `E` as a name of an enum, and `C` as a name of a case that appears in `E`. + - We use `<...>` for syntactic constructs that in some circumstances might be empty. For instance, + `` represents one or more parameter lists `(...)` or nothing at all. + + - Enum cases fall into three categories: + + - _Class cases_ are those cases that are parameterized, either with a type parameter section `[...]` or with one or more (possibly empty) parameter sections `(...)`. + - _Simple cases_ are cases of a non-generic enum that have neither parameters nor an extends clause or body. That is, they consist of a name only. + - _Value cases_ are all cases that do not have a parameter section but that do have a (possibly generated) `extends` clause and/or a body. + + Simple cases and value cases are collectively called _singleton cases_. + +The desugaring rules imply that class cases are mapped to case classes, and singleton cases are mapped to `val` definitions. + +There are nine desugaring rules. Rule (1) desugar enum definitions. Rules +(2) and (3) desugar simple cases. Rules (4) to (6) define `extends` clauses for cases that +are missing them. Rules (7) to (9) define how such cases with `extends` clauses +map into `case class`es or `val`s. + +1. An `enum` definition + ```scala + enum E ... { } + ``` + expands to a `sealed abstract` class that extends the `scala.Enum` trait and + an associated companion object that contains the defined cases, expanded according + to rules (2 - 8). The enum trait starts with a compiler-generated import that imports + the names `` of all cases so that they can be used without prefix in the trait. + ```scala + sealed abstract class E ... extends with scala.Enum { + import E.{ } + + } + object E { } + ``` + +2. A simple case consisting of a comma-separated list of enum names + ```scala + case C_1, ..., C_n + ``` + expands to + ```scala + case C_1; ...; case C_n + ``` + Any modifiers or annotations on the original case extend to all expanded + cases. + +3. A simple case + ```scala + case C + ``` + of an enum `E` that does not take type parameters expands to + ```scala + val C = $new(n, "C") + ``` + Here, `$new` is a private method that creates an instance of `E` (see + below). + +4. If `E` is an enum with type parameters + ```scala + V1 T1 >: L1 <: U1 , ... , Vn Tn >: Ln <: Un (n > 0) + ``` + where each of the variances `Vi` is either `'+'` or `'-'`, then a simple case + ```scala + case C + ``` + expands to + ```scala + case C extends E[B1, ..., Bn] + ``` + where `Bi` is `Li` if `Vi = '+'` and `Ui` if `Vi = '-'`. This result is then further + rewritten with rule (8). Simple cases of enums with non-variant type + parameters are not permitted (however value cases with explicit `extends` clause are) + +5. A class case without an extends clause + ```scala + case C + ``` + of an enum `E` that does not take type parameters expands to + ```scala + case C extends E + ``` + This result is then further rewritten with rule (9). + +6. If `E` is an enum with type parameters `Ts`, a class case with neither type parameters nor an extends clause + ```scala + case C + ``` + expands to + ```scala + case C[Ts] extends E[Ts] + ``` + This result is then further rewritten with rule (9). For class cases that have type parameters themselves, an extends clause needs to be given explicitly. + +7. If `E` is an enum with type parameters `Ts`, a class case without type parameters but with an extends clause + ```scala + case C extends + ``` + expands to + ```scala + case C[Ts] extends + ``` + provided at least one of the parameters `Ts` is mentioned in a parameter type in + `` or in a type argument in ``. + +8. A value case + ```scala + case C extends + ``` + expands to a value definition in `E`'s companion object: + ```scala + val C = new { ; def ordinal = n; $values.register(this) } + ``` + where `n` is the ordinal number of the case in the companion object, + starting from 0. The statement `$values.register(this)` registers the value + as one of the `values` of the enumeration (see below). `$values` is a + compiler-defined private value in the companion object. The anonymous class also + implements the abstract `Product` methods that it inherits from `Enum`. + + + It is an error if a value case refers to a type parameter of the enclosing `enum` + in a type argument of ``. + +9. A class case + ```scala + case C extends + ``` + expands analogous to a final case class in `E`'s companion object: + ```scala + final case class C extends + ``` + However, unlike for a regular case class, the return type of the associated + `apply` method is a fully parameterized type instance of the enum class `E` + itself instead of `C`. Also the enum case defines an `ordinal` method of + the form + ```scala + def ordinal = n + ``` + where `n` is the ordinal number of the case in the companion object, + starting from 0. + + It is an error if a value case refers to a type parameter of the enclosing `enum` + in a parameter type in `` or in a type argument of ``, unless that parameter is already + a type parameter of the case, i.e. the parameter name is defined in ``. + + +### Translation of Enumerations + +Non-generic enums `E` that define one or more singleton cases +are called _enumerations_. Companion objects of enumerations define +the following additional synthetic members. + + - A method `valueOf(name: String): E`. It returns the singleton case value whose + `toString` representation is `name`. + - A method `values` which returns an `Array[E]` of all singleton case + values in `E`, in the order of their definitions. + +Companion objects of enumerations that contain at least one simple case define in addition: + + - A private method `$new` which defines a new simple case value with given + ordinal number and name. This method can be thought as being defined as + follows. + ```scala + private def $new(_$ordinal: Int, $name: String) = new E { + def $ordinal = $_ordinal + override def toString = $name + $values.register(this) // register enum value so that `valueOf` and `values` can return it. + } + ``` + +The anonymous class also implements the abstract `Product` methods that it inherits from `Enum`. +The `$ordinal` method above is used to generate the `ordinal` method if the enum does not extend a `java.lang.Enum` (as Scala enums do not extend `java.lang.Enum`s unless explicitly specified). In case it does, there is no need to generate `ordinal` as `java.lang.Enum` defines it. + +### Scopes for Enum Cases + +A case in an `enum` is treated similarly to a secondary constructor. It can access neither the enclosing `enum` using `this`, nor its value parameters or instance members using simple +identifiers. + +Even though translated enum cases are located in the enum's companion object, referencing +this object or its members via `this` or a simple identifier is also illegal. The compiler typechecks enum cases in the scope of the enclosing companion object but flags any such illegal accesses as errors. + +### Translation of Java-compatible enums +A Java-compatible enum is an enum that extends `java.lang.Enum`. The translation rules are the same as above, with the reservations defined in this section. + +It is a compile-time error for a Java-compatible enum to have class cases. + +Cases such as `case C` expand to a `@static val` as opposed to a `val`. This allows them to be generated as static fields of the enum type, thus ensuring they are represented the same way as Java enums. + +### Other Rules + + - A normal case class which is not produced from an enum case is not allowed to extend +`scala.Enum`. This ensures that the only cases of an enum are the ones that are +explicitly declared in it. + + - If an enum case has an extends clause, the enum class must be one of the + classes that's extended. diff --git a/scala3doc/dotty-docs/docs/docs/reference/enums/enums.md b/scala3doc/dotty-docs/docs/docs/reference/enums/enums.md new file mode 100644 index 000000000000..90e52bdb2414 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/enums/enums.md @@ -0,0 +1,150 @@ +--- +layout: doc-page +title: "Enumerations" +--- + +An enumeration is used to define a type consisting of a set of named values. + +```scala +enum Color { + case Red, Green, Blue +} +``` + +This defines a new `sealed` class, `Color`, with three values, `Color.Red`, +`Color.Green`, `Color.Blue`. The color values are members of `Color`s +companion object. + +### Parameterized enums + +Enums can be parameterized. + +```scala +enum Color(val rgb: Int) { + case Red extends Color(0xFF0000) + case Green extends Color(0x00FF00) + case Blue extends Color(0x0000FF) +} +``` + +As the example shows, you can define the parameter value by using an +explicit extends clause. + +### Methods defined for enums + +The values of an enum correspond to unique integers. The integer +associated with an enum value is returned by its `ordinal` method: + +```scala +scala> val red = Color.Red +val red: Color = Red +scala> red.ordinal +val res0: Int = 0 +``` + +The companion object of an enum also defines two utility methods. +The `valueOf` method obtains an enum value +by its name. The `values` method returns all enum values +defined in an enumeration in an `Array`. + +```scala +scala> Color.valueOf("Blue") +val res0: Color = Blue +scala> Color.values +val res1: Array[Color] = Array(Red, Green, Blue) +``` + +### User-defined members of enums + +It is possible to add your own definitions to an enum. Example: + +```scala +enum Planet(mass: Double, radius: Double) { + private final val G = 6.67300E-11 + def surfaceGravity = G * mass / (radius * radius) + def surfaceWeight(otherMass: Double) = otherMass * surfaceGravity + + case Mercury extends Planet(3.303e+23, 2.4397e6) + case Venus extends Planet(4.869e+24, 6.0518e6) + case Earth extends Planet(5.976e+24, 6.37814e6) + case Mars extends Planet(6.421e+23, 3.3972e6) + case Jupiter extends Planet(1.9e+27, 7.1492e7) + case Saturn extends Planet(5.688e+26, 6.0268e7) + case Uranus extends Planet(8.686e+25, 2.5559e7) + case Neptune extends Planet(1.024e+26, 2.4746e7) +} +``` + +It is also possible to define an explicit companion object for an enum: + +```scala +object Planet { + def main(args: Array[String]) = { + val earthWeight = args(0).toDouble + val mass = earthWeight / Earth.surfaceGravity + for (p <- values) + println(s"Your weight on $p is ${p.surfaceWeight(mass)}") + } +} +``` + +### Compatibility with Java Enums +If you want to use the Scala-defined enums as Java enums, you can do so by extending `java.lang.Enum` class as follows: + +```scala +enum Color extends java.lang.Enum[Color] { case Red, Green, Blue } +``` + +The type parameter comes from the Java enum [definition](https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Enum.html) and should be the same as the type of the enum. +There is no need to provide constructor arguments (as defined in the Java API docs) to `java.lang.Enum` when extending it – the compiler will generate them automatically. + +After defining `Color` like that, you can use it like you would a Java enum: + +```scala +scala> Color.Red.compareTo(Color.Green) +val res15: Int = -1 +``` + +For a more in-depth example of using Scala 3 enums from Java, see [this test](https://github.com/lampepfl/dotty/tree/master/tests/run/enum-java). In the test, the enums are defined in the `MainScala.scala` file and used from a Java source, `Test.java`. + +### Implementation + +Enums are represented as `sealed` classes that extend the `scala.Enum` trait. +This trait defines a single public method, `ordinal`: + +```scala +package scala + +/** A base trait of all enum classes */ +trait Enum extends Product with Serializable { + + /** A number uniquely identifying a case of an enum */ + def ordinal: Int +} +``` + +Enum values with `extends` clauses get expanded to anonymous class instances. +For instance, the `Venus` value above would be defined like this: + +```scala +val Venus: Planet = + new Planet(4.869E24, 6051800.0) { + def ordinal: Int = 1 + override def toString: String = "Venus" + // internal code to register value + } +``` + +Enum values without `extends` clauses all share a single implementation +that can be instantiated using a private method that takes a tag and a name as arguments. +For instance, the first +definition of value `Color.Red` above would expand to: + +```scala +val Red: Color = $new(0, "Red") +``` + +### Reference + +For more info, see [Issue #1970](https://github.com/lampepfl/dotty/issues/1970) and +[PR #4003](https://github.com/lampepfl/dotty/pull/4003). diff --git a/scala3doc/dotty-docs/docs/docs/reference/features-classification.md b/scala3doc/dotty-docs/docs/docs/reference/features-classification.md new file mode 100644 index 000000000000..1b74cde065c9 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/features-classification.md @@ -0,0 +1,202 @@ +--- +layout: doc-page +title: A Classification of Proposed Language Features +date: April 6, 2019 +author: Martin Odersky +--- + +This document provides an overview of the constructs proposed for Scala 3 with the aim to facilitate the discussion what to include and when to include it. It classifies features into eight groups: (1) essential foundations, (2) simplifications, (3) restrictions, (4) dropped features, (5) changed features, (6) new features, (7) features oriented towards metaprogramming with the aim to replace existing macros, and (8) changes to type checking and inference. + +Each group contains sections classifying the status (i.e. relative importance to be a part of Scala 3, and relative urgency when to decide this) and the migration cost +of the constructs in it. + +The current document reflects the state of things as of April, 2019. It will be updated to reflect any future changes in that status. + +## Essential Foundations + +These new constructs directly model core features of DOT, higher-kinded types, and the [SI calculus for implicit resolution](https://infoscience.epfl.ch/record/229878/files/simplicitly_1.pdf). + + - [Intersection types](new-types/intersection-types.md), replacing compound types, + - [Union types](new-types/union-types.md), + - [Type lambdas](new-types/type-lambdas.md), + replacing encodings using structural types and type projection. + - [Implicit Function Types](contextual/implicit-function-types.md) offering abstraction over given parameters. + +**Status: essential** + +These are essential core features of Scala 3. Without them, Scala 3 would be a completely different language, with different foundations. + +**Migration cost: none to low** + +Since these are additions, there's generally no migration cost for old code. An exception are intersection types which replace compound types with slightly cleaned-up semantics. But few programs would be affected by this change. + +## Simplifications + +These constructs replace existing constructs with the aim of making the language safer and simpler to use, and to promote uniformity in code style. + + - [Trait Parameters](other-new-features/trait-parameters.md) replace [early initializers](dropped-features/early-initializers.md) with a more generally useful construct. + - [Given Instances](contextual/delegates.md) + replace implicit objects and defs, focussing on intent over mechanism. + - [Given Clauses](contextual/given-clauses.md) replace implicit parameters, avoiding their ambiguities. + - [Extension Methods](contextual/extension-methods.md) replace implicit classes with a clearer and simpler mechanism. + - [Opaque Type Aliases](other-new-features/opaques.md) replace most uses + of value classes while guaranteeing absence of boxing. + - [Toplevel definitions](dropped-features/package-objects.md) replace package objects, dropping syntactic boilerplate. + - [Export clauses](other-new-features/export.md) + provide a simple and general way to express aggregation, which can replace the + previous facade pattern of package objects inheriting from classes. + - [Vararg patterns](changed-features/vararg-patterns.md) now use the form `: _*` instead of `@ _*`, mirroring vararg expressions, + - [Creator applications](other-new-features/creator-applications.md) allow using simple function call syntax + instead of `new` expressions. `new` expressions stay around as a fallback for + the cases where creator applications cannot be used. + +With the exception of early initializers and old-style vararg patterns, all superseded constructs continue to be available in Scala 3.0. The plan is to deprecate and phase them out later. + +Value classes (superseded by opaque type aliases) are a special case. There are currently no deprecation plans for value classes, since we might want to bring them back in a more general form if they are supported natively by the JVM as is planned by project Valhalla. + +**Status: bimodal: now or never / can delay** + +These are essential simplifications. If we decide to adopt them, we should do it for 3.0. Otherwise we are faced with the awkward situation that the Scala 3 documentation has to describe an old feature that will be replaced or superseded by a simpler one in the future. + +On the other hand, we need to decide now only about the new features in this list. The decision to drop the superseded features can be delayed. Of course, adopting a new feature without deciding to drop the superseded feature will make the language larger. + +**Migration cost: moderate** + +For the next several versions, old features will remain available and deprecation and rewrite techniques can make any migration effort low and gradual. + + +## Restrictions + +These constructs are restricted to make the language safer. + + - [Implicit Conversions](contextual/conversions.md): there is only one way to define implicit conversions instead of many, and potentially surprising implicit conversions require a language import. + - [Given Imports](contextual/import-delegate.md): implicits now require a special form of import, to make the import clearly visible. + - [Type Projection](dropped-features/type-projection.md): only classes can be used as prefix `C` of a type projection `C#A`. Type projection on abstract types is no longer supported since it is unsound. + - [Multiversal Equality](contextual/multiversal-equality.md) implements an "opt-in" scheme to rule out nonsensical comparisons with `==` and `!=`. + - [@infix and @alpha](https://github.com/lampepfl/dotty/pull/5975) + make method application syntax uniform across code bases and require alphanumeric aliases for all symbolic names (proposed, not implemented). + +Unrestricted implicit conversions continue to be available in Scala 3.0, but will be deprecated and removed later. Unrestricted versions of the other constructs in the list above are available only under `-source 3.0-migration`. + +**Status: now or never** + +These are essential restrictions. If we decide to adopt them, we should do it for 3.0. Otherwise we are faced with the awkward situation that the Scala 3 documentation has to describe a feature that will be restricted in the future. + +**Migration cost: low to high** + + - _low_: multiversal equality rules out code that is nonsensical, so any rewrites required by its adoption should be classified as bug fixes. + - _moderate_: Restrictions to implicits can be accommodated by straightforward rewriting. + - _high_: Unrestricted type projection cannot always rewritten directly since it is unsound in general. + +## Dropped Constructs + +These constructs are proposed to be dropped without a new construct replacing them. The motivation for dropping these constructs is to simplify the language and its implementation. + + - [DelayedInit](dropped-features/delayed-init.md), + - [Existential types](dropped-features/existential-types.md), + - [Procedure syntax](dropped-features/procedure-syntax.md), + - [Class shadowing](dropped-features/class-shadowing.md), + - [XML literals](dropped-features/xml.md), + - [Symbol literals](dropped-features/symlits.md), + - [Auto application](dropped-features/auto-apply.md), + - [Weak conformance](dropped-features/weak-conformance.md), + - [Compound types](new-types/intersection-types.md), + - [Auto tupling](https://github.com/lampepfl/dotty/pull/4311) (implemented, but not merged). + +The date when these constructs are dropped varies. The current status is: + + - Not implemented at all: + - DelayedInit, existential types, weak conformance. + - Supported under `-source 3.0-migration`: + - procedure syntax, class shadowing, symbol literals, auto application, auto tupling in a restricted form. + - Supported in 3.0, to be deprecated and phased out later: + - XML literals, compound types. + +**Status: mixed** + +Currently unimplemented features would require considerable implementation effort which would in most cases make the compiler more buggy and fragile and harder to understand. If we do not decide to drop them, they will probably show up as "not yet implemented" in the Scala 3.0 release. + +Currently implemented features could stay around indefinitely. Updated docs may simply ignore them, in the expectation that they might go away eventually. So the decision about their removal can be delayed. + +**Migration cost: moderate to high** + +Dropped features require rewrites to avoid their use in programs. These rewrites can sometimes be automatic (e.g. for procedure syntax, symbol literals, auto application) +and sometimes need to be manual (e.g. class shadowing, auto tupling). Sometimes the rewrites would have to be non-local, affecting use sites as well as definition sites (e.g., in the case of DelayedInit, unless we find a solution). + +## Changes + +These constructs have undergone changes to make them more regular and useful. + + - [Structural Types](changed-features/structural-types.md): They now allow pluggable implementations, which greatly increases their usefulness. Some usage patterns are restricted compared to the status quo. + - [Name-based pattern matching](changed-features/pattern-matching.md): The existing undocumented Scala 2 implementation has been codified in a slightly simplified form. + - [Eta expansion](changed-features/eta-expansion.md) is now performed universally also in the absence of an expected type. The postfix `_` operator is thus made redundant. It will be deprecated and dropped after Scala 3.0. + - [Implicit Resolution](changed-features/implicit-resolution.md): The implicit resolution rules have been cleaned up to make them more useful and less surprising. Implicit scope is restricted to no longer include package prefixes. + +Most aspects of old-style implicit resolution are still available under `-source 3.0-migration`. The other changes in this list are applied unconditionally. + +**Status: strongly advisable** + +The features have been implemented in their new form in Scala 3.0's compiler. They provide clear improvements in simplicity and functionality compared to the status quo. Going back would require significant implementation effort for a net loss of functionality. + +**Migration cost: low to high** + +Only a few programs should require changes, but some necessary changes might be non-local (as in the case of restrictions to implicit scope). + +## New Constructs + +These are additions to the language that make it more powerful or pleasant to use. + + - [Enums](enums/enums.md) provide concise syntax for enumerations and [algebraic data types](enums/adts.md). + - [Parameter Untupling](other-new-features/parameter-untupling.md) avoids having to use `case` for tupled parameter destructuring. + - [Dependent Function Types](new-types/dependent-function-types.md) generalize dependent methods to dependent function values and types. + - [Polymorphic Function Types](https://github.com/lampepfl/dotty/pull/4672) generalize polymorphic methods to dependent function values and types. _Current status_: There is a proposal, and a prototype implementation, but the implementation has not been finalized or merged yet. + - [Kind Polymorphism](other-new-features/kind-polymorphism.md) allows the definition of operators working equally on types and type constructors. + +**Status: mixed** + +Enums offer an essential simplification of fundamental use patterns, so they should be adopted for Scala 3.0. Auto-parameter tupling is a very small change that removes some awkwardness, so it might as well be adopted now. The other features constitute more specialized functionality which could be introduced in later versions. On the other hand, except for polymorphic function types they are all fully implemented, so if the Scala 3.0 spec does not include them, they might be still made available under a language flag. + +**Migration cost: none** + +Being new features, existing code migrates without changes. To be sure, sometimes it would be attractive to rewrite code to make use of the new features in order to increase clarity and conciseness. + +## Metaprogramming + +The following constructs together aim to put metaprogramming in Scala on a new basis. So far, metaprogramming was achieved by a combination of macros and libraries such as Shapeless that were in turn based on some key macros. Current Scala 2 macro mechanisms are a thin veneer on top the current Scala 2 compiler, which makes them fragile and in many cases impossible to port to Scala 3. + +It's worth noting that macros were never included in the Scala 2 language specification and were so far made available only under an `-experimental` flag. This has not prevented their widespread usage. + +To enable porting most uses of macros, we are experimenting with the advanced language constructs listed below. These designs are more provisional than the rest of the proposed language constructs for Scala 3.0. There might still be some changes until the final release. Stabilizing the feature set needed for metaprogramming is our first priority. + +- [Match Types](new-types/match-types.md) allow computation on types. +- [Inline](metaprogramming/inline.md) provides +by itself a straightforward implementation of some simple macros and is at the same time an essential building block for the implementation of complex macros. +- [Quotes and Splices](metaprogramming/macros.md) provide a principled way to express macros and staging with a unified set of abstractions. +- [Type class derivation](contextual/derivation.md) provides an in-language implementation of the `Gen` macro in Shapeless and other foundational libraries. The new implementation is more robust, efficient and easier to use than the macro. +- [Implicit by-name parameters](contextual/implicit-by-name-parameters.md) provide a more robust in-language implementation of the `Lazy` macro in Shapeless. + +**Status: not yet settled** + +We know we need a practical replacement for current macros. The features listed above are very promising in that respect, but we need more complete implementations and more use cases to reach a final verdict. + +**Migration cost: very high** + +Existing macro libraries will have to be rewritten from the ground up. In many cases the rewritten libraries will turn out to be simpler and more robust than the old ones, but that does not relieve one of the cost of the rewrites. It's currently unclear to what degree users of macro libraries will be affected. We aim to provide sufficient functionality so that core macros can be re-implemented fully, but given the vast feature set of the various macro extensions to Scala 2 it is difficult to arrive at a workable limitation of scope. + +## Changes to Type Checking and Inference + +The Scala 3 compiler uses a new algorithm for type inference, which relies on +a general subtype constraint solver. The new algorithm often +[works better than the old](https://contributors.scala-lang.org/t/better-type-inference-for-scala-send-us-your-problematic-cases/2410), but there are inevitably situations where the results of both algorithms differ, leading to errors diagnosed by Scala 3 for programs that the Scala 2 compiler accepts. + +**Status: essential** + +The new type-checking and inference algorithms are the essential core of the new compiler. They cannot be reverted without dropping the whole implementation of Scala 3. + +**Migration cost: high** + +Some existing programs will break and, given the complex nature of type inference, it will not always be clear what change caused the breakage and how to fix it. + +In our experience, macros and changes in type and implicit argument inference together cause the large majority of problems encountered when porting existing code to Scala 3. The latter source of problems could be addressed systematically by a tool that added all inferred types and implicit arguments to a Scala 2 source code file. Most likely such a tool would be implemented as a Scala 2 compiler plugin. The resulting code would have a greatly increased likelihood to compile under Scala 3, but would often be bulky to the point of being unreadable. A second part of the rewriting tool should then selectively and iteratively remove type and implicit annotations that were synthesized by the first part as long as they compile under Scala 3. This second part could be implemented as a program that invokes the Scala 3 compiler `dotc` programmatically. + +Several people have proposed such a tool for some time now. I believe it is time we find the will and the resources to actually implement it. \ No newline at end of file diff --git a/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/erased-terms-spec.md b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/erased-terms-spec.md new file mode 100644 index 000000000000..639ab241b177 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/erased-terms-spec.md @@ -0,0 +1,63 @@ +--- +layout: doc-page +title: "Erased Terms Spec" +--- + +# Implementation + +## Rules + +1. The `erased` modifier can appear: + * At the start of a parameter block of a method, function or class + * In a method definition + * In a `val` definition (but not `lazy val` or `var`) + + ```scala + erased val x = ... + erased def f = ... + + def g(erased x: Int) = ... + + (erased x: Int) => ... + def h(x: (erased Int) => Int) = ... + + class K(erased x: Int) { ... } + ``` + + +2. A reference to an `erased` definition can only be used + * Inside the expression of argument to an `erased` parameter + * Inside the body of an `erased` `val` or `def` + + +3. Functions + * `(erased x1: T1, x2: T2, ..., xN: TN) => y : (erased T1, T2, ..., TN) => R` + * `(given erased x1: T1, x2: T2, ..., xN: TN) => y as (given erased T1, T2, ..., TN) => R` + * `(given erased T1) => R <:< erased T1 => R` + * `(given erased T1, T2) => R <:< (erased T1, T2) => R` + * ... + + Note that there is no subtype relation between `(erased T) => R` and `T => R` (or `(given erased T) => R` and `(given T) => R`) + + +4. Eta expansion + + if `def f(erased x: T): U` then `f: (erased T) => U`. + + +5. Erasure Semantics + * All `erased` parameters are removed from the function + * All argument to `erased` parameters are not passed to the function + * All `erased` definitions are removed + * All `(erased T1, T2, ..., TN) => R` and `(given erased T1, T2, ..., TN) => R` become `() => R` + + +6. Overloading + + Method with `erased` parameters will follow the normal overloading constraints after erasure. + + +7. Overriding + * Member definitions overriding each other must both be `erased` or not be `erased` + * `def foo(x: T): U` cannot be overridden by `def foo(erased x: T): U` and vice-versa + diff --git a/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/erased-terms.md b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/erased-terms.md new file mode 100644 index 000000000000..2313538c570a --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/erased-terms.md @@ -0,0 +1,199 @@ +--- +layout: doc-page +title: "Erased Terms" +--- + +# Why erased terms? + +Let's describe the motivation behind erased terms with an example. In the +following we show a simple state machine which can be in a state `On` or `Off`. +The machine can change state from `Off` to `On` with `turnedOn` only if it is +currently `Off`. This last constraint is captured with the `IsOff[S]` implicit +evidence which only exists for `IsOff[Off]`. For example, not allowing calling +`turnedOn` on in an `On` state as we would require an evidence of type +`IsOff[On]` that will not be found. + +```scala +sealed trait State +final class On extends State +final class Off extends State + +@implicitNotFound("State must be Off") +class IsOff[S <: State] +object IsOff { + implicit def isOff: IsOff[Off] = new IsOff[Off] +} + +class Machine[S <: State] { + def turnedOn(implicit ev: IsOff[S]): Machine[On] = new Machine[On] +} + +val m = new Machine[Off] +m.turnedOn +m.turnedOn.turnedOn // ERROR +// ^ +// State must be Off +``` + +Note that in the code above the actual context arguments for `IsOff` are never +used at runtime; they serve only to establish the right constraints at compile +time. As these terms are never used at runtime there is not real need to have +them around, but they still need to be present in some form in the generated +code to be able to do separate compilation and retain binary compatibility. We +introduce _erased terms_ to overcome this limitation: we are able to enforce the +right constrains on terms at compile time. These terms have no run time +semantics and they are completely erased. + +# How to define erased terms? + +Parameters of methods and functions can be declared as erased, placing `erased` +in front of a parameter list (like `given`). + +```scala +def methodWithErasedEv(erased ev: Ev): Int = 42 + +val lambdaWithErasedEv: erased Ev => Int = + (erased ev: Ev) => 42 +``` + +`erased` parameters will not be usable for computations, though they can be used +as arguments to other `erased` parameters. + +```scala +def methodWithErasedInt1(erased i: Int): Int = + i + 42 // ERROR: can not use i + +def methodWithErasedInt2(erased i: Int): Int = + methodWithErasedInt1(i) // OK +``` + +Not only parameters can be marked as erased, `val` and `def` can also be marked +with `erased`. These will also only be usable as arguments to `erased` +parameters. + +```scala +erased val erasedEvidence: Ev = ... +methodWithErasedEv(erasedEvidence) +``` + +# What happens with erased values at runtime? + +As `erased` are guaranteed not to be used in computations, they can and will be +erased. + +```scala +// becomes def methodWithErasedEv(): Int at runtime +def methodWithErasedEv(erased ev: Ev): Int = ... + +def evidence1: Ev = ... +erased def erasedEvidence2: Ev = ... // does not exist at runtime +erased val erasedEvidence3: Ev = ... // does not exist at runtime + +// evidence1 is not evaluated and no value is passed to methodWithErasedEv +methodWithErasedEv(evidence1) +``` + +# State machine with erased evidence example + +The following example is an extended implementation of a simple state machine +which can be in a state `On` or `Off`. The machine can change state from `Off` +to `On` with `turnedOn` only if it is currently `Off`, conversely from `On` to +`Off` with `turnedOff` only if it is currently `On`. These last constraint are +captured with the `IsOff[S]` and `IsOn[S]` given evidence only exist for +`IsOff[Off]` and `IsOn[On]`. For example, not allowing calling `turnedOff` on in +an `Off` state as we would require an evidence `IsOn[Off]` that will not be +found. + +As the given evidences of `turnedOn` and `turnedOff` are not used in the +bodies of those functions we can mark them as `erased`. This will remove the +evidence parameters at runtime, but we would still evaluate the `isOn` and +`isOff` givens that were found as arguments. As `isOn` and `isOff` are not +used except as `erased` arguments, we can mark them as `erased`, hence removing +the evaluation of the `isOn` and `isOff` evidences. + +```scala +import scala.annotation.implicitNotFound + +sealed trait State +final class On extends State +final class Off extends State + +@implicitNotFound("State must be Off") +class IsOff[S <: State] +object IsOff { + // will not be called at runtime for turnedOn, the compiler will only require that this evidence exists + given IsOff[Off] = new IsOff[Off] +} + +@implicitNotFound("State must be On") +class IsOn[S <: State] +object IsOn { + // will not exist at runtime, the compiler will only require that this evidence exists at compile time + erased given IsOn[On] = new IsOn[On] +} + +class Machine[S <: State] private { + // ev will disappear from both functions + def turnedOn(using erased ev: IsOff[S]): Machine[On] = new Machine[On] + def turnedOff(using erased ev: IsOn[S]): Machine[Off] = new Machine[Off] +} + +object Machine { + def newMachine(): Machine[Off] = new Machine[Off] +} + +object Test { + def main(args: Array[String]): Unit = { + val m = Machine.newMachine() + m.turnedOn + m.turnedOn.turnedOff + + // m.turnedOff + // ^ + // State must be On + + // m.turnedOn.turnedOn + // ^ + // State must be Off + } +} +``` + +Note that in [Inline](./inline.md) we discussed `erasedValue` and inline +matches. `erasedValue` is implemented with `erased`, so the state machine above +can be encoded as follows: + +```scala +import scala.compiletime._ + +sealed trait State +final class On extends State +final class Off extends State + +class Machine[S <: State] { + transparent inline def turnOn(): Machine[On] = inline erasedValue[S] match { + case _: Off => new Machine[On] + case _: On => error("Turning on an already turned on machine") + } + transparent inline def turnOff(): Machine[Off] = inline erasedValue[S] match { + case _: On => new Machine[Off] + case _: Off => error("Turning off an already turned off machine") + } +} + +object Machine { + def newMachine(): Machine[Off] = { + println("newMachine") + new Machine[Off] + } +} + +object Test { + val m = Machine.newMachine() + m.turnOn() + m.turnOn().turnOff() + m.turnOn().turnOn() // error: Turning on an already turned on machine +} +``` + +[More Details](./erased-terms-spec.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/inline.md b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/inline.md new file mode 100644 index 000000000000..998da914f615 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/inline.md @@ -0,0 +1,622 @@ +--- +layout: doc-page +title: Inline +--- + +## Inline Definitions + +`inline` is a new [soft modifier](../soft-modifier.md) that guarantees that a +definition will be inlined at the point of use. Example: + +```scala +object Config { + inline val logging = false +} + +object Logger { + + private var indent = 0 + + inline def log[T](msg: String, indentMargin: =>Int)(op: => T): T = + if (Config.logging) { + println(s"${" " * indent}start $msg") + indent += indentMargin + val result = op + indent -= indentMargin + println(s"${" " * indent}$msg = $result") + result + } + else op +} +``` + +The `Config` object contains a definition of the **inline value** `logging`. +This means that `logging` is treated as a _constant value_, equivalent to its +right-hand side `false`. The right-hand side of such an `inline val` must itself +be a [constant expression](https://scala-lang.org/files/archive/spec/2.13/06-expressions.html#constant-expressions). +Used in this way, `inline` is equivalent to Java and Scala 2's `final`. Note that `final`, meaning +_inlined constant_, is still supported in Dotty, but will be phased out. + +The `Logger` object contains a definition of the **inline method** `log`. This +method will always be inlined at the point of call. + +In the inlined code, an `if-then-else` with a constant condition will be rewritten +to its `then`- or `else`-part. Consequently, in the `log` method above the +`if (Config.logging)` with `Config.logging == true` will get rewritten into its +`then`-part. + +Here's an example: + +```scala +var indentSetting = 2 + +def factorial(n: BigInt): BigInt = { + log(s"factorial($n)", indentSetting) { + if (n == 0) 1 + else n * factorial(n - 1) + } +} +``` + +If `Config.logging == false`, this will be rewritten (simplified) to: + +```scala +def factorial(n: BigInt): BigInt = { + if (n == 0) 1 + else n * factorial(n - 1) +} +``` + +As you notice, since neither `msg` or `indentMargin` were used, they do not +appear in the generated code for `factorial`. Also note the body of our `log` +method: the `else-` part reduces to just an `op`. In the generated code we do +not generate any closures because we only refer to a by-name parameter *once*. +Consequently, the code was inlined directly and the call was beta-reduced. + +In the `true` case the code will be rewritten to: + +```scala +def factorial(n: BigInt): BigInt = { + val msg = s"factorial($n)" + println(s"${" " * indent}start $msg") + Logger.inline$indent_=(indent.+(indentSetting)) + val result = + if (n == 0) 1 + else n * factorial(n - 1) + Logger.inline$indent_=(indent.-(indentSetting)) + println(s"${" " * indent}$msg = $result") + result +} +``` + +Note, that the by-value parameter `msg` is evaluated only once, per the usual Scala +semantics, by binding the value and reusing the `msg` through the body of +`factorial`. Also, note the special handling of the assignment to the private var +`indent`. It is achieved by generating a setter method `def inline$indent_=` and calling it instead. + +### Recursive Inline Methods + +Inline methods can be recursive. For instance, when called with a constant +exponent `n`, the following method for `power` will be implemented by +straight inline code without any loop or recursion. + +```scala +inline def power(x: Double, n: Int): Double = { + if (n == 0) 1.0 + else if (n == 1) x + else { + val y = power(x, n / 2) + if (n % 2 == 0) y * y else y * y * x + } +} + +power(expr, 10) +// translates to +// +// val x = expr +// val y1 = x * x // ^2 +// val y2 = y1 * y1 // ^4 +// val y3 = y2 * x // ^5 +// y3 * y3 // ^10 +``` + +Parameters of inline methods can have an `inline` modifier as well. This means +that actual arguments to these parameters will be inlined in the body of the +`inline def`. `inline` parameters have call semantics equivalent to by-name parameters +but allow for duplication of the code in the argument. It is usually useful when constant +values need to be propagated to allow further optimizations/reductions. + +The following example shows the difference in translation between by-value, by-name and `inline` +parameters: + +```scala +inline def funkyAssertEquals(actual: Double, expected: =>Double, inline delta: Double): Unit = + if (actual - expected).abs > delta then + throw new AssertionError(s"difference between ${expected} and ${actual} was larger than ${delta}") + +funkyAssertEquals(computeActual(), computeExpected(), computeDelta()) +// translates to +// +// val actual = computeActual() +// def expected = computeExpected() +// if (actual - expected).abs > computeDelta() then +// throw new AssertionError(s"difference between ${expected} and ${actual} was larger than ${computeDelta()}") +``` +### Rules for Overriding + +Inline methods can override other non-inline methods. The rules are as follows: + +1. If an inline method `f` implements or overrides another, non-inline method, the inline method can also be invoked at runtime. For instance, consider the scenario: + ```scala + abstract class A { + def f(): Int + def g(): Int = f() + } + class B extends A { + inline def f() = 22 + override inline def g() = f() + 11 + } + val b = B() + val a: A = b + // inlined invocatons + assert(b.f() == 22) + assert(b.g() == 33) + // dynamic invocations + assert(a.f() == 22) + assert(a.g() == 33) + ``` + The inlined invocations and the dynamically dispatched invocations give the same results. + +2. Inline methods are effectively final. + +3. Inline methods can also be abstract. An abstract inline method can be implemented only by other inline methods. It cannot be invoked directly: + ```scala + abstract class A { + inline def f(): Int + } + object B extends A { + inline def f(): Int = 22 + } + B.f() // OK + val a: A = B + a.f() // error: cannot inline f() in A. + ``` + +### Relationship to @inline + +Scala also defines a `@inline` annotation which is used as a hint +for the backend to inline. The `inline` modifier is a more powerful +option: Expansion is guaranteed instead of best effort, +it happens in the frontend instead of in the backend, and it also applies +to recursive methods. + +To cross compile between both Dotty and Scalac, we introduce a new `@forceInline` +annotation which is equivalent to the new `inline` modifier. Note that +Scala 2 ignores the `@forceInline` annotation, so one must use both +annotations to guarantee inlining for Dotty and at the same time hint inlining +for Scala 2 (i.e. `@forceInline @inline`). + + + +#### The definition of constant expression + +Right-hand sides of inline values and of arguments for inline parameters must be +constant expressions in the sense defined by the [SLS § +6.24](https://www.scala-lang.org/files/archive/spec/2.12/06-expressions.html#constant-expressions), +including _platform-specific_ extensions such as constant folding of pure +numeric computations. + +An inline value must have a literal type such as `1` or `true`. +```scala +inline val four = 4 +// equivalent to +inline val four: 4 = 4 +``` + +It is also possible to have inline vals of types that do not have a syntax, such as `Short(4)`. + +```scala +trait InlineConstants { + inline val myShort: Short +} + +object Constants extends InlineConstants { + inline val myShort/*: Short(4)*/ = 4 +} +``` + +## Transparent Inline Methods + +Inline methods can additionally be declared `transparent`. +This means that the return type of the inline method can be +specialized to a more precise type upon expansion. Example: + +```scala +class A +class B extends A { + def m() = true +} + +transparent inline def choose(b: Boolean): A = + if b then new A() else new B() + +val obj1 = choose(true) // static type is A +val obj2 = choose(false) // static type is B + +// obj1.m() // compile-time error: `m` is not defined on `A` +obj2.m() // OK +``` +Here, the inline method `choose` returns an instance of either of the two types `A` or `B`. +If `choose` had not been declared to be `transparent`, the result +of its expansion would always be of type `A`, even though the computed value might be of the subtype `B`. +The inline method is a "blackbox" in the sense that details of its implementation do not leak out. +But if a `transparent` modifier is given, the expansion is the type of the expanded body. If the argument `b` +is `true`, that type is `A`, otherwise it is `B`. Consequently, calling `m` on `obj2` +type-checks since `obj2` has the same type as the expansion of `choose(false)`, which is `B`. +Transparent inline methods are "whitebox" in the sense that the type +of an application of such a method can be more specialized than its declared +return type, depending on how the method expands. + +In the following example, we see how the return type of `zero` is specialized to +the singleton type `0` permitting the addition to be ascribed with the correct +type `1`. + +```scala +transparent inline def zero(): Int = 0 + +val one: 1 = zero() + 1 +``` + +## Inline Conditionals + +If the condition of an if-then-else expressions is a constant expression then it simplifies to +the selected branch. Prefixing an if-then-else expression with `inline` enforces that +the condition has to be a constant expression, and thus guarantees that the conditional will always +simplify. + +Example: + +```scala +inline def update(delta: Int) = + inline if (delta >= 0) increaseBy(delta) + else decreaseBy(-delta) +``` +A call `update(22)` would rewrite to `increaseBy(22)`. But if `update` was called with +a value that was not a compile-time constant, we would get a compile time error like the one +below: + +```scala + | inline if (delta >= 0) ??? + | ^ + | cannot reduce inline if + | its condition + | delta >= 0 + | is not a constant value + | This location is in code that was inlined at ... +``` + +## Inline Matches + +A `match` expression in the body of an `inline` method definition may be +prefixed by the `inline` modifier. If there is enough static information to +unambiguously take a branch, the expression is reduced to that branch and the +type of the result is taken. If not, a compile-time error is raised that +reports that the match cannot be reduced. + +The example below defines an inline method with a +single inline match expression that picks a case based on its static type: + +```scala +transparent inline def g(x: Any): Any = inline x match { + case x: String => (x, x) // Tuple2[String, String](x, x) + case x: Double => x +} + +g(1.0d) // Has type 1.0d which is a subtype of Double +g("test") // Has type (String, String) +``` + +The scrutinee `x` is examined statically and the inline match is reduced +accordingly returning the corresponding value (with the type specialized because `g` is declared `transparent`). This example performs a simple type test over the +scrutinee. The type can have a richer structure like the simple ADT below. +`toInt` matches the structure of a number in [Church-encoding](https://en.wikipedia.org/wiki/Church_encoding) +and _computes_ the corresponding integer. + +```scala +trait Nat +case object Zero extends Nat +case class Succ[N <: Nat](n: N) extends Nat + +transparent inline def toInt(n: Nat): Int = inline n match { + case Zero => 0 + case Succ(n1) => toInt(n1) + 1 +} + +final val natTwo = toInt(Succ(Succ(Zero))) +val intTwo: 2 = natTwo +``` + +`natTwo` is inferred to have the singleton type 2. + +## The `scala.compiletime` Package + +The `scala.compiletime` package contains helper definitions that provide support for compile time operations over values. They are described in the following. + +### `constValue`, `constValueOpt`, and the `S` combinator + +`constValue` is a function that produces the constant value represented by a +type. + +```scala +import scala.compiletime.{constValue, S} + +transparent inline def toIntC[N]: Int = + inline constValue[N] match { + case 0 => 0 + case _: S[n1] => 1 + toIntC[n1] + } + +final val ctwo = toIntC[2] +``` + +`constValueOpt` is the same as `constValue`, however returning an `Option[T]` +enabling us to handle situations where a value is not present. Note that `S` is +the type of the successor of some singleton type. For example the type `S[1]` is +the singleton type `2`. + +### `erasedValue` + +So far we have seen inline methods that take terms (tuples and integers) as +parameters. What if we want to base case distinctions on types instead? For +instance, one would like to be able to write a function `defaultValue`, that, +given a type `T`, returns optionally the default value of `T`, if it exists. +We can already express this using rewrite match expressions and a simple +helper function, `scala.compiletime.erasedValue`, which is defined as follows: + +```scala +erased def erasedValue[T]: T = ??? +``` + +The `erasedValue` function _pretends_ to return a value of its type argument +`T`. In fact, it would always raise a `NotImplementedError` exception when +called. But the function can in fact never be called, since it is declared +`erased`, so can only be used at compile-time during type checking. + +Using `erasedValue`, we can then define `defaultValue` as follows: + +```scala +import scala.compiletime.erasedValue + +inline def defaultValue[T] = inline erasedValue[T] match { + case _: Byte => Some(0: Byte) + case _: Char => Some(0: Char) + case _: Short => Some(0: Short) + case _: Int => Some(0) + case _: Long => Some(0L) + case _: Float => Some(0.0f) + case _: Double => Some(0.0d) + case _: Boolean => Some(false) + case _: Unit => Some(()) + case _ => None +} +``` + +Then: +```scala + val dInt: Some[Int] = defaultValue[Int] + val dDouble: Some[Double] = defaultValue[Double] + val dBoolean: Some[Boolean] = defaultValue[Boolean] + val dAny: None.type = defaultValue[Any] +``` + +As another example, consider the type-level version of `toInt` below: +given a _type_ representing a Peano number, +return the integer _value_ corresponding to it. +Consider the definitions of numbers as in the _Inline +Match_ section above. Here is how `toIntT` can be defined: + +```scala +transparent inline def toIntT[N <: Nat]: Int = + inline scala.compiletime.erasedValue[N] match { + case _: Zero.type => 0 + case _: Succ[n] => toIntT[n] + 1 + } + +final val two = toIntT[Succ[Succ[Zero.type]]] +``` + +`erasedValue` is an `erased` method so it cannot be used and has no runtime +behavior. Since `toIntT` performs static checks over the static type of `N` we +can safely use it to scrutinize its return type (`S[S[Z]]` in this case). + +### `error` + +The `error` method is used to produce user-defined compile errors during inline expansion. +It has the following signature: + +```scala +inline def error(inline msg: String): Nothing +``` + +If an inline expansion results in a call `error(msgStr)` the compiler +produces an error message containing the given `msgStr`. + +```scala +import scala.compiletime.{error, code} + +inline def fail() = { + error("failed for a reason") +} +fail() // error: failed for a reason +``` + +or + +```scala +inline def fail(p1: => Any) = { + error(code"failed on: $p1") +} +fail(identity("foo")) // error: failed on: identity("foo") +``` + +### The `scala.compiletime.ops` package + +The `scala.compiletime.ops` package contains types that provide support for +primitive operations on singleton types. For example, +`scala.compiletime.ops.int.*` provides support for multiplying two singleton +`Int` types, and `scala.compiletime.ops.boolean.&&` for the conjunction of two +`Boolean` types. When all arguments to a type in `scala.compiletime.ops` are +singleton types, the compiler can evaluate the result of the operation. + +```scala +import scala.compiletime.ops.int._ +import scala.compiletime.ops.boolean._ + +val conjunction: true && true = true +val multiplication: 3 * 5 = 15 +``` + +Many of these singleton operation types are meant to be used infix (as in [SLS § +3.2.8](https://www.scala-lang.org/files/archive/spec/2.12/03-types.html#infix-types)), +and are annotated with [`@infix`](scala.annotation.infix) accordingly. + +Since type aliases have the same precedence rules as their term-level +equivalents, the operations compose with the expected precedence rules: + +```scala +import scala.compiletime.ops.int._ +val x: 1 + 2 * 3 = 7 +``` + +The operation types are located in packages named after the type of the +left-hand side parameter: for instance, `scala.compiletime.ops.int.+` represents +addition of two numbers, while `scala.compiletime.ops.string.+` represents string +concatenation. To use both and distinguish the two types from each other, a +match type can dispatch to the correct implementation: + +```scala +import scala.compiletime.ops._ +import scala.annotation.infix + +@infix type +[X <: Int | String, Y <: Int | String] = (X, Y) match { + case (Int, Int) => int.+[X, Y] + case (String, String) => string.+[X, Y] +} + +val concat: "a" + "b" = "ab" +val addition: 1 + 1 = 2 +``` + +## Summoning Implicits Selectively + +It is foreseen that many areas of typelevel programming can be done with rewrite +methods instead of implicits. But sometimes implicits are unavoidable. The +problem so far was that the Prolog-like programming style of implicit search +becomes viral: Once some construct depends on implicit search it has to be +written as a logic program itself. Consider for instance the problem of creating +a `TreeSet[T]` or a `HashSet[T]` depending on whether `T` has an `Ordering` or +not. We can create a set of implicit definitions like this: + +```scala +trait SetFor[T, S <: Set[T]] +class LowPriority { + implicit def hashSetFor[T]: SetFor[T, HashSet[T]] = ... +} +object SetsFor extends LowPriority { + implicit def treeSetFor[T: Ordering]: SetFor[T, TreeSet[T]] = ... +} +``` + +Clearly, this is not pretty. Besides all the usual indirection of implicit +search, we face the problem of rule prioritization where we have to ensure that +`treeSetFor` takes priority over `hashSetFor` if the element type has an +ordering. This is solved (clumsily) by putting `hashSetFor` in a superclass +`LowPriority` of the object `SetsFor` where `treeSetFor` is defined. Maybe the +boilerplate would still be acceptable if the crufty code could be contained. +However, this is not the case. Every user of the abstraction has to be +parameterized itself with a `SetFor` implicit. Considering the simple task _"I +want a `TreeSet[T]` if `T` has an ordering and a `HashSet[T]` otherwise"_, this +seems like a lot of ceremony. + +There are some proposals to improve the situation in specific areas, for +instance by allowing more elaborate schemes to specify priorities. But they all +keep the viral nature of implicit search programs based on logic programming. + +By contrast, the new `summonFrom` construct makes implicit search available +in a functional context. To solve the problem of creating the right set, one +would use it as follows: +```scala +import scala.compiletime.summonFrom + +inline def setFor[T]: Set[T] = summonFrom { + case given ord: Ordering[T] => new TreeSet[T] + case _ => new HashSet[T] +} +``` +A `summonFrom` call takes a pattern matching closure as argument. All patterns +in the closure are type ascriptions of the form `identifier : Type`. + +Patterns are tried in sequence. The first case with a pattern `x: T` such that +an implicit value of type `T` can be summoned is chosen. If the pattern is prefixed +with `given`, the variable `x` is bound to the implicit value for the remainder of the case. It can in turn be used as an implicit in the right hand side of the case. It is an error if one of the tested patterns gives rise to an ambiguous implicit search. + +`summonFrom` applications must be reduced at compile time. + +Consequently, if we summon an `Ordering[String]` the code above will return a +new instance of `TreeSet[String]`. + +```scala +summon[Ordering[String]] + +println(setFor[String].getClass) // prints class scala.collection.immutable.TreeSet +``` + +**Note** `summonFrom` applications can raise ambiguity errors. Consider the following +code with two implicit values in scope of type `A`. The pattern match in `f` will raise +an ambiguity error of `f` is applied. + +```scala +class A +implicit val a1: A = new A +implicit val a2: A = new A + +inline def f: Any = summonFrom { + case given _: A => ??? // error: ambiguous implicits +} +``` + +## `summonInline` + +The shorthand `summonInline` provides a simple way to write a `summon` that is delayed until the call is inlined. +```scala +transparent inline def summonInline[T]: T = summonFrom { + case t: T => t +} +``` + +### Reference + +For more info, see [PR #4768](https://github.com/lampepfl/dotty/pull/4768), +which explains how `summonFrom`'s predecessor (implicit matches) can be used for typelevel programming and code specialization and [PR #7201](https://github.com/lampepfl/dotty/pull/7201) which explains the new `summonFrom` syntax. diff --git a/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/macros-spec.md b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/macros-spec.md new file mode 100644 index 000000000000..dde74371dadb --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/macros-spec.md @@ -0,0 +1,245 @@ +--- +layout: doc-page +title: "Macros Spec" +--- + +## Implementation + +### Syntax + +Compared to the [Dotty reference grammar](../../internals/syntax.md) +there are the following syntax changes: +``` +SimpleExpr ::= ... + | ‘'’ ‘{’ Block ‘}’ + | ‘'’ ‘[’ Type ‘]’ + | ‘$’ ‘{’ Block ‘}’ +SimpleType ::= ... + | ‘$’ ‘{’ Block ‘}’ +``` +In addition, an identifier `$x` starting with a `$` that appears inside +a quoted expression or type is treated as a splice `${x}` and a quoted identifier +`'x` that appears inside a splice is treated as a quote `'{x}` + +### Implementation in `dotc` + +Quotes and splices are primitive forms in the generated abstract syntax trees. +Top-level splices are eliminated during macro expansion while typing. On the +other hand, top-level quotes are eliminated in an expansion phase `ReifyQuotes` +phase (after typing and pickling). PCP checking occurs while preparing the RHS +of an inline method for top-level splices and in the `Staging` phase (after +typing and before pickling). + +Macro-expansion works outside-in. If the outermost scope is a splice, +the spliced AST will be evaluated in an interpreter. A call to a +previously compiled method can be implemented as a reflective call to +that method. With the restrictions on splices that are currently in +place that’s all that’s needed. We might allow more interpretation in +splices in the future, which would allow us to loosen the +restriction. Quotes in spliced, interpreted code are kept as they +are, after splices nested in the quotes are expanded. + +If the outermost scope is a quote, we need to generate code that +constructs the quoted tree at run-time. We implement this by +serializing the tree as a Tasty structure, which is stored +in a string literal. At runtime, an unpickler method is called to +deserialize the string into a tree. + +Splices inside quoted code insert the spliced tree as is, after +expanding any quotes in the spliced code recursively. + +## Formalization + +The phase consistency principle can be formalized in a calculus that +extends simply-typed lambda calculus with quotes and splices. + +### Syntax + +The syntax of terms, values, and types is given as follows: +``` +Terms t ::= x variable + (x: T) => t lambda + t t application + 't quote + $t splice + +Values v ::= (x: T) => t lambda + 'u quote + +Simple terms u ::= x | (x: T) => u | u u | 't + +Types T ::= A base type + T -> T function type + expr T quoted +``` +Typing rules are formulated using a stack of environments +`Es`. Individual environments `E` consist as usual of variable +bindings `x: T`. Environments can be combined using the two +combinators `'` and `$`. +``` +Environment E ::= () empty + E, x: T + +Env. stack Es ::= () empty + E simple + Es * Es combined + +Separator * ::= ' + $ +``` +The two environment combinators are both associative with left and +right identity `()`. + +### Operational semantics: + +We define a small step reduction relation `-->` with the following rules: +``` + ((x: T) => t) v --> [x := v]t + + ${'u} --> u + + t1 --> t2 + ----------------- + e[t1] --> e[t2] +``` +The first rule is standard call-by-value beta-reduction. The second +rule says that splice and quotes cancel each other out. The third rule +is a context rule; it says that reduction is allowed in the hole `[ ]` +position of an evaluation context. Evaluation contexts `e` and +splice evaluation context `e_s` are defined syntactically as follows: +``` +Eval context e ::= [ ] | e t | v e | 'e_s[${e}] +Splice context e_s ::= [ ] | (x: T) => e_s | e_s t | u e_s +``` +### Typing rules + +Typing judgments are of the form `Es |- t: T`. There are two +substructural rules which express the fact that quotes and splices +cancel each other out: +``` + Es1 * Es2 |- t: T + --------------------------- + Es1 $ E1 ' E2 * Es2 |- t: T + + + Es1 * Es2 |- t: T + --------------------------- + Es1 ' E1 $ E2 * Es2 |- t: T +``` +The lambda calculus fragment of the rules is standard, except that we +use a stack of environments. The rules only interact with the topmost +environment of the stack. +``` + x: T in E + -------------- + Es * E |- x: T + + + Es * E, x: T1 |- t: T2 + ------------------------------- + Es * E |- (x: T1) => t: T -> T2 + + + Es |- t1: T2 -> T Es |- t2: T2 + --------------------------------- + Es |- t1 t2: T +``` +The rules for quotes and splices map between `expr T` and `T` by trading `'` and `$` between +environments and terms. +``` + Es $ () |- t: expr T + -------------------- + Es |- $t: T + + + Es ' () |- t: T + ---------------- + Es |- 't: expr T +``` +The meta theory of a slightly simplified variant 2-stage variant of this calculus +is studied [separately](./simple-smp.md). + +## Going Further + +The metaprogramming framework as presented and currently implemented is quite restrictive +in that it does not allow for the inspection of quoted expressions and +types. It’s possible to work around this by providing all necessary +information as normal, unquoted inline parameters. But we would gain +more flexibility by allowing for the inspection of quoted code with +pattern matching. This opens new possibilities. For instance, here is a +version of `power` that generates the multiplications directly if the +exponent is statically known and falls back to the dynamic +implementation of power otherwise. +```scala +inline def power(n: Int, x: Double): Double = ${ + 'n match { + case Constant(n1) => powerCode(n1, 'x) + case _ => '{ dynamicPower(n, x) } + } +} + +private def dynamicPower(n: Int, x: Double): Double = + if (n == 0) 1.0 + else if (n % 2 == 0) dynamicPower(n / 2, x * x) + else x * dynamicPower(n - 1, x) +``` + +--- + +This assumes a `Constant` extractor that maps tree nodes representing +constants to their values. + +With the right extractors, the "AsFunction" conversion +that maps expressions over functions to functions over expressions can +be implemented in user code: +```scala +given AsFunction1[T, U] as Conversion[Expr[T => U], Expr[T] => Expr[U]] { + def apply(f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => f match { + case Lambda(g) => g(x) + case _ => '{ ($f)($x) } + } +} +``` +This assumes an extractor +```scala +object Lambda { + def unapply[T, U](x: Expr[T => U]): Option[Expr[T] => Expr[U]] +} +``` +Once we allow inspection of code via extractors, it’s tempting to also +add constructors that create typed trees directly without going +through quotes. Most likely, those constructors would work over `Expr` +types which lack a known type argument. For instance, an `Apply` +constructor could be typed as follows: +```scala +def Apply(fn: Expr[Any], args: List[Expr[Any]]): Expr[Any] +``` +This would allow constructing applications from lists of arguments +without having to match the arguments one-by-one with the +corresponding formal parameter types of the function. We then need "at +the end" a method to convert an `Expr[Any]` to an `Expr[T]` where `T` is +given from the outside. E.g. if `code` yields a `Expr[Any]`, then +`code.atType[T]` yields an `Expr[T]`. The `atType` method has to be +implemented as a primitive; it would check that the computed type +structure of `Expr` is a subtype of the type structure representing +`T`. + +Before going down that route, we should evaluate in detail the tradeoffs it +presents. Constructing trees that are only verified _a posteriori_ +to be type correct loses a lot of guidance for constructing the right +trees. So we should wait with this addition until we have more +use-cases that help us decide whether the loss in type-safety is worth +the gain in flexibility. In this context, it seems that deconstructing types is +less error-prone than deconstructing terms, so one might also +envisage a solution that allows the former but not the latter. + +## Conclusion + +Metaprogramming has a reputation of being difficult and confusing. +But with explicit `Expr/Type` types and quotes and splices it can become +downright pleasant. A simple strategy first defines the underlying quoted or unquoted +values using `Expr` and `Type` and then inserts quotes and splices to make the types +line up. Phase consistency is at the same time a great guideline +where to insert a splice or a quote and a vital sanity check that +the result makes sense. diff --git a/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/macros.md b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/macros.md new file mode 100644 index 000000000000..a9bfc0d8932c --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/macros.md @@ -0,0 +1,791 @@ +--- +layout: doc-page +title: "Macros" +--- + +### Macros: Quotes and Splices + +Macros are built on two well-known fundamental operations: quotation and +splicing. Quotation is expressed as `'{...}` for expressions and as `'[...]` +for types. Splicing is expressed as `${ ... }`. Additionally, within a quote +or a splice we can quote or splice identifiers directly (i.e. `'e` and `$e`). +Readers may notice the resemblance of the two aforementioned syntactic +schemes with the familiar string interpolation syntax. + +```scala +println(s"Hello, $name, here is the result of 1 + 1 = ${1 + 1}") +``` + +In string interpolation we _quoted_ a string and then we _spliced_ into it, two +others. The first, `name`, is a reference to a value of type `string`, and the +second is an arithmetic expression that will be _evaluated_ followed by the +splicing of its string representation. + +Quotes and splices in this section allow us to treat code in a similar way, +effectively supporting macros. The entry point for macros is an inline method +with a top-level splice. We call it a top-level because it is the only occasion +where we encounter a splice outside a quote (consider as a quote the +compilation-unit at the call-site). For example, the code below presents an +`inline` method `assert` which calls at compile-time a method `assertImpl` with +a boolean expression tree as argument. `assertImpl` evaluates the expression and +prints it again in an error message if it evaluates to `false`. + +```scala +import scala.quoted._ + +inline def assert(inline expr: Boolean): Unit = + ${ assertImpl('expr) } + +def assertImpl(expr: Expr[Boolean])(using QuoteContext) = '{ + if (!$expr) + throw new AssertionError(s"failed assertion: ${${ showExpr(expr) }}") +} + +def showExpr(expr: Expr[Boolean])(using QuoteContext): Expr[String] = + '{ "" } // Better implementation later in this document +``` + +If `e` is an expression, then `'{e}` represents the typed +abstract syntax tree representing `e`. If `T` is a type, then `'[T]` +represents the type structure representing `T`. The precise +definitions of "typed abstract syntax tree" or "type-structure" do not +matter for now, the terms are used only to give some +intuition. Conversely, `${e}` evaluates the expression `e`, which must +yield a typed abstract syntax tree or type structure, and embeds the +result as an expression (respectively, type) in the enclosing program. + +Quotations can have spliced parts in them; in this case the embedded +splices are evaluated and embedded as part of the formation of the +quotation. + +Quotes and splices can also be applied directly to identifiers. An identifier +`$x` starting with a `$` that appears inside a quoted expression or type is treated as a +splice `${x}`. Analogously, an quoted identifier `'x` that appears inside a splice +is treated as a quote `'{x}`. See the Syntax section below for details. + +Quotes and splices are duals of each other. For arbitrary +expressions `e` and types `T` we have: +``` +${'{e}} = e +'{${e}} = e +${'[T]} = T +'[${T}] = T +``` +### Types for Quotations + +The type signatures of quotes and splices can be described using +two fundamental types: + + - `Expr[T]`: abstract syntax trees representing expressions of type `T` + - `Type[T]`: type structures representing type `T`. + +Quoting takes expressions of type `T` to expressions of type `Expr[T]` +and it takes types `T` to expressions of type `Type[T]`. Splicing +takes expressions of type `Expr[T]` to expressions of type `T` and it +takes expressions of type `Type[T]` to types `T`. + +The two types can be defined in package `scala.quoted` as follows: +```scala +package scala.quoted + +sealed abstract class Expr[+T] +sealed abstract class Type[T] +``` +Both `Expr` and `Type` are abstract and sealed, so all constructors for +these types are provided by the system. One way to construct values of +these types is by quoting, the other is by type-specific lifting +operations that will be discussed later on. + +### The Phase Consistency Principle + +A fundamental *phase consistency principle* (PCP) regulates accesses +to free variables in quoted and spliced code: + + - _For any free variable reference `x`, the number of quoted scopes and the number of spliced scopes between the reference to `x` and the definition of `x` must be equal_. + +Here, `this`-references count as free variables. On the other +hand, we assume that all imports are fully expanded and that `_root_` is +not a free variable. So references to global definitions are +allowed everywhere. + +The phase consistency principle can be motivated as follows: First, +suppose the result of a program `P` is some quoted text `'{ ... x +... }` that refers to a free variable `x` in `P`. This can be +represented only by referring to the original variable `x`. Hence, the +result of the program will need to persist the program state itself as +one of its parts. We don’t want to do this, hence this situation +should be made illegal. Dually, suppose a top-level part of a program +is a spliced text `${ ... x ... }` that refers to a free variable `x` +in `P`. This would mean that we refer during _construction_ of `P` to +a value that is available only during _execution_ of `P`. This is of +course impossible and therefore needs to be ruled out. Now, the +small-step evaluation of a program will reduce quotes and splices in +equal measure using the cancellation rules above. But it will neither +create nor remove quotes or splices individually. So the PCP ensures +that program elaboration will lead to neither of the two unwanted +situations described above. + +In what concerns the range of features it covers, this form of macros introduces +a principled metaprogramming framework that is quite close to the MetaML family of +languages. One difference is that MetaML does not have an equivalent of the PCP +- quoted code in MetaML _can_ access variables in its immediately enclosing +environment, with some restrictions and caveats since such accesses involve +serialization. However, this does not constitute a fundamental gain in +expressiveness. + +### From `Expr`s to Functions and Back + +It is possible to convert any `Expr[T => R]` into `Expr[T] => Expr[R]` and back. +These conversions can be implemented as follows: + +```scala +def to[T: Type, R: Type](f: Expr[T] => Expr[R])(using QuoteContext): Expr[T => R] = + '{ (x: T) => ${ f('x) } } + +def from[T: Type, R: Type](f: Expr[T => R])(using QuoteContext): Expr[T] => Expr[R] = + (x: Expr[T]) => '{ $f($x) } +``` + +Note how the fundamental phase consistency principle works in two +different directions here for `f` and `x`. In the method `to`, the reference to `f` is +legal because it is quoted, then spliced, whereas the reference to `x` +is legal because it is spliced, then quoted. + +They can be used as follows: + +```scala +val f1: Expr[Int => String] = to((x: Expr[Int]) => '{ $x.toString }) // '{ (x: Int) => x.toString } + +val f2: Expr[Int] => Expr[String] = from('{ (x: Int) => x.toString }) // (x: Expr[Int]) => '{ ((x: Int) => x.toString)($x) } +f2('{2}) // '{ ((x: Int) => x.toString)(2) } +``` + +One limitation of `from` is that it does not β-reduce when a lambda is called immediately, as evidenced in the code `{ ((x: Int) => x.toString)(2) }`. +In some cases we want to remove the lambda from the code, for this we provide the method `Expr.betaReduce` that turns a tree +describing a function into a function mapping trees to trees. +```scala +object Expr { + ... + def betaReduce[...](...)(...): ... = ... +} +``` +The definition of `Expr.betaReduce(f)(x)` is assumed to be functionally the same as +`'{($f)($x)}`, however it should optimize this call by returning the +result of beta-reducing `f(x)` if `f` is a known lambda expression. +`Expr.betaReduce` distributes applications of `Expr` over function arrows: +```scala +Expr.betaReduce(_): Expr[(T1, ..., Tn) => R] => ((Expr[T1], ..., Expr[Tn]) => Expr[R]) +``` + +### Lifting Types + +Types are not directly affected by the phase consistency principle. +It is possible to use types defined at any level in any other level. +But, if a type is used in a subsequent stage it will need to be lifted to a `Type`. +The resulting value of `Type` will be subject to PCP. +Indeed, the definition of `to` above uses `T` in the next stage, there is a +quote but no splice between the parameter binding of `T` and its +usage. But the code can be rewritten by adding a binding of a `Type[T]` tag: +```scala +def to[T, R: Type](f: Expr[T] => Expr[R])(using t: Type[T])(using QuoteContext): Expr[T => R] = + '{ (x: $t) => ${ f('x) } } +``` +In this version of `to`, the type of `x` is now the result of +splicing the `Type` value `t`. This operation _is_ splice correct -- there +is one quote and one splice between the use of `t` and its definition. + +To avoid clutter, the Scala implementation tries to convert any type +reference to a type `T` in subsequent phases to a type-splice, by rewriting `T` to `${ summon[Type[T]] }`. +For instance, the user-level definition of `to`: + +```scala +def to[T: Type, R: Type](f: Expr[T] => Expr[R])(using QuoteContext): Expr[T => R] = + '{ (x: T) => ${ f('x) } } +``` +would be rewritten to +```scala +def to[T: Type, R: Type](f: Expr[T] => Expr[R])(using QuoteContext): Expr[T => R] = + '{ (x: ${ summon[Type[T]] }) => ${ f('x) } } +``` +The `summon` query succeeds because there is a given instance of +type `Type[T]` available (namely the given parameter corresponding +to the context bound `: Type`), and the reference to that value is +phase-correct. If that was not the case, the phase inconsistency for +`T` would be reported as an error. + +### Lifting Expressions + +Consider the following implementation of a staged interpreter that implements +a compiler through staging. +```scala +import scala.quoted._ + +enum Exp { + case Num(n: Int) + case Plus(e1: Exp, e2: Exp) + case Var(x: String) + case Let(x: String, e: Exp, in: Exp) +} +``` +The interpreted language consists of numbers `Num`, addition `Plus`, and variables +`Var` which are bound by `Let`. Here are two sample expressions in the language: +```scala +val exp = Plus(Plus(Num(2), Var("x")), Num(4)) +val letExp = Let("x", Num(3), exp) +``` +Here’s a compiler that maps an expression given in the interpreted +language to quoted Scala code of type `Expr[Int]`. +The compiler takes an environment that maps variable names to Scala `Expr`s. +```scala +import scala.quoted._ + +def compile(e: Exp, env: Map[String, Expr[Int]])(using QuoteContext): Expr[Int] = e match { + case Num(n) => + Expr(n) + case Plus(e1, e2) => + '{ ${ compile(e1, env) } + ${ compile(e2, env) } } + case Var(x) => + env(x) + case Let(x, e, body) => + '{ val y = ${ compile(e, env) }; ${ compile(body, env + (x -> 'y)) } } +} +``` +Running `compile(letExp, Map())` would yield the following Scala code: +```scala +'{ val y = 3; (2 + y) + 4 } +``` +The body of the first clause, `case Num(n) => Expr(n)`, looks suspicious. `n` +is declared as an `Int`, yet it is converted to an `Expr[Int]` with `Expr()`. +Shouldn’t `n` be quoted? In fact this would not +work since replacing `n` by `'n` in the clause would not be phase +correct. + +The `Expr.apply` method is defined in package `quoted`: +```scala +package quoted + +object Expr { + ... + def apply[T: Liftable](x: T)(using QuoteContext): Expr[T] = summon[Liftable[T]].toExpr(x) + ... +} +``` +This method says that values of types implementing the `Liftable` type class can be +converted ("lifted") to `Expr` values using `Expr.apply`. + +Dotty comes with given instances of `Liftable` for +several types including `Boolean`, `String`, and all primitive number +types. For example, `Int` values can be converted to `Expr[Int]` +values by wrapping the value in a `Literal` tree node. This makes use +of the underlying tree representation in the compiler for +efficiency. But the `Liftable` instances are nevertheless not _magic_ +in the sense that they could all be defined in a user program without +knowing anything about the representation of `Expr` trees. For +instance, here is a possible instance of `Liftable[Boolean]`: +```scala +given Liftable[Boolean] { + def toExpr(b: Boolean) = + if (b) '{ true } else '{ false } +} +``` +Once we can lift bits, we can work our way up. For instance, here is a +possible implementation of `Liftable[Int]` that does not use the underlying +tree machinery: +```scala +given Liftable[Int] { + def toExpr(n: Int) = n match { + case Int.MinValue => '{ Int.MinValue } + case _ if n < 0 => '{ - ${ toExpr(-n) } } + case 0 => '{ 0 } + case _ if n % 2 == 0 => '{ ${ toExpr(n / 2) } * 2 } + case _ => '{ ${ toExpr(n / 2) } * 2 + 1 } + } +} +``` +Since `Liftable` is a type class, its instances can be conditional. For example, +a `List` is liftable if its element type is: +```scala +given [T: Liftable : Type] as Liftable[List[T]] { + def toExpr(xs: List[T]) = xs match { + case head :: tail => '{ ${ Expr(head) } :: ${ toExpr(tail) } } + case Nil => '{ Nil: List[T] } + } +} +``` +In the end, `Liftable` resembles very much a serialization +framework. Like the latter it can be derived systematically for all +collections, case classes and enums. Note also that the synthesis +of _type-tag_ values of type `Type[T]` is essentially the type-level +analogue of lifting. + +Using lifting, we can now give the missing definition of `showExpr` in the introductory example: +```scala +def showExpr[T](expr: Expr[T])(using QuoteContext): Expr[String] = { + val code: String = expr.show + Expr(code) +} +``` +That is, the `showExpr` method converts its `Expr` argument to a string (`code`), and lifts +the result back to an `Expr[String]` using `Expr.apply`. + + +### Lifting Types + +The previous section has shown that the metaprogramming framework has +to be able to take a type `T` and convert it to a type tree of type +`Type[T]` that can be reified. This means that all free variables of +the type tree refer to types and values defined in the current stage. + +For a reference to a global class, this is easy: Just issue the fully +qualified name of the class. Members of reifiable types are handled by +just reifying the containing type together with the member name. But +what to do for references to type parameters or local type definitions +that are not defined in the current stage? Here, we cannot construct +the `Type[T]` tree directly, so we need to get it from a recursive +implicit search. For instance, to implement +```scala +summon[Type[List[T]]] +``` +where `T` is not defined in the current stage, we construct the type constructor +of `List` applied to the splice of the result of searching for a given instance for `Type[T]`: +```scala +'[ List[ ${ summon[Type[T]] } ] ] +``` +This is exactly the algorithm that Scala 2 uses to search for type tags. +In fact Scala 2's type tag feature can be understood as a more ad-hoc version of +`quoted.Type`. As was the case for type tags, the implicit search for a `quoted.Type` +is handled by the compiler, using the algorithm sketched above. + +### Relationship with Inline + +Seen by itself, principled metaprogramming looks more like a framework for +runtime metaprogramming than one for compile-time metaprogramming with macros. +But combined with Dotty’s `inline` feature it can be turned into a compile-time +system. The idea is that macro elaboration can be understood as a combination of +a macro library and a quoted program. For instance, here’s the `assert` macro +again together with a program that calls `assert`. + +```scala +object Macros { + + inline def assert(inline expr: Boolean): Unit = + ${ assertImpl('expr) } + + def assertImpl(expr: Expr[Boolean])(using QuoteContext) = + val failMsg: Expr[String] = Expr("failed assertion: " + expr.show) + '{ if !($expr) then throw new AssertionError($failMsg) } +} + +object App { + val program = { + val x = 1 + Macros.assert(x != 0) + } +} +``` +Inlining the `assert` function would give the following program: +```scala +val program = { + val x = 1 + ${ Macros.assertImpl('{ x != 0) } } +} +``` +The example is only phase correct because `Macros` is a global value and +as such not subject to phase consistency checking. Conceptually that’s +a bit unsatisfactory. If the PCP is so fundamental, it should be +applicable without the global value exception. But in the example as +given this does not hold since both `assert` and `program` call +`assertImpl` with a splice but no quote. + +However, one could argue that the example is really missing +an important aspect: The macro library has to be compiled in a phase +prior to the program using it, but in the code above, macro +and program are defined together. A more accurate view of +macros would be to have the user program be in a phase after the macro +definitions, reflecting the fact that macros have to be defined and +compiled before they are used. Hence, conceptually the program part +should be treated by the compiler as if it was quoted: +```scala +val program = '{ + val x = 1 + ${ Macros.assertImpl('{ x != 0 }) } +} +``` +If `program` is treated as a quoted expression, the call to +`Macro.assertImpl` becomes phase correct even if macro library and +program are conceptualized as local definitions. + +But what about the call from `assert` to `assertImpl`? Here, we need a +tweak of the typing rules. An inline function such as `assert` that +contains a splice operation outside an enclosing quote is called a +_macro_. Macros are supposed to be expanded in a subsequent phase, +i.e. in a quoted context. Therefore, they are also type checked as if +they were in a quoted context. For instance, the definition of +`assert` is typechecked as if it appeared inside quotes. This makes +the call from `assert` to `assertImpl` phase-correct, even if we +assume that both definitions are local. + +The `inline` modifier is used to declare a `val` that is +either a constant or is a parameter that will be a constant when instantiated. This +aspect is also important for macro expansion. + +To get values out of expressions containing constants `Expr` provides the method +`unlift` (or `unliftOrError`). This will convert the `Expr[T]` into a `Some[T]` (or `T`) when the +expression contains value. Otherwise it will retrun `None` (or emit an error). +To avoid having incidental val bindings generated by the inlining of the `def` +it is recommended to use an inline parameter. To illustrate this, consider an +implementation of the `power` function that makes use of a statically known exponent: +```scala +inline def power(x: Double, inline n: Int) = ${ powerCode('x, 'n) } + +private def powerCode(x: Expr[Double], n: Expr[Int])(using QuoteContext): Expr[Double] = + n.unlift match + case Some(m) => powerCode(x, m) + case None => '{ Math.pow($x, $y) } + +private def powerCode(x: Expr[Double], n: Int)(using QuoteContext): Expr[Double] = + if (n == 0) '{ 1.0 } + else if (n == 1) x + else if (n % 2 == 0) '{ val y = $x * $x; ${ powerCode('y, n / 2) } } + else '{ $x * ${ powerCode(x, n - 1) } } +``` + +### Scope Extrusion + +Quotes and splices are duals as far as the PCP is concerned. But there is an +additional restriction that needs to be imposed on splices to guarantee +soundness: code in splices must be free of side effects. The restriction +prevents code like this: + +```scala +var x: Expr[T] = ... +'{ (y: T) => ${ x = 'y; 1 } } +``` + +This code, if it was accepted, would _extrude_ a reference to a quoted variable +`y` from its scope. This would subsequently allow access to a variable outside the +scope where it is defined, which is likely problematic. The code is clearly +phase consistent, so we cannot use PCP to rule it out. Instead we postulate a +future effect system that can guarantee that splices are pure. In the absence of +such a system we simply demand that spliced expressions are pure by convention, +and allow for undefined compiler behavior if they are not. This is analogous to +the status of pattern guards in Scala, which are also required, but not +verified, to be pure. + +[Multi-Stage Programming](./staging.md) introduces one additional method where +you can expand code at runtime with a method `run`. There is also a problem with +that invokation of `run` in splices. Consider the following expression: + +```scala +'{ (x: Int) => ${ run('x); 1 } } +``` +This is again phase correct, but will lead us into trouble. Indeed, evaluating +the splice will reduce the expression `run('x)` to `x`. But then the result + +```scala +'{ (x: Int) => ${ x; 1 } } +``` + +is no longer phase correct. To prevent this soundness hole it seems easiest to +classify `run` as a side-effecting operation. It would thus be prevented from +appearing in splices. In a base language with side effects we would have to do this +anyway: Since `run` runs arbitrary code it can always produce a side effect if +the code it runs produces one. + +### Example Expansion + +Assume we have two methods, one `map` that takes an `Expr[Array[T]]` and a +function `f` and one `sum` that performs a sum by delegating to `map`. + +```scala +object Macros { + def map[T](arr: Expr[Array[T]], f: Expr[T] => Expr[Unit])(using t: Type[T], qctx: QuoteContext): Expr[Unit] = '{ + var i: Int = 0 + while (i < ($arr).length) { + val element: $t = ($arr)(i) + ${f('element)} + i += 1 + } + } + + def sum(arr: Expr[Array[Int]])(using QuoteContext): Expr[Int] = '{ + var sum = 0 + ${ map(arr, x => '{sum += $x}) } + sum + } + + inline def sum_m(arr: Array[Int]): Int = ${sum('arr)} +} +``` + +A call to `sum_m(Array(1,2,3))` will first inline `sum_m`: + +```scala +val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) +${_root_.Macros.sum('arr)} +``` + +then it will splice `sum`: + +```scala +val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) + +var sum = 0 +${ map(arr, x => '{sum += $x}) } +sum +``` + +then it will inline `map`: + +```scala +val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) + +var sum = 0 +val f = x => '{sum += $x} +${ _root_.Macros.map(arr, 'f)('[Int])} +sum +``` + +then it will expand and splice inside quotes `map`: + +```scala +val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) + +var sum = 0 +val f = x => '{sum += $x} +var i: Int = 0 +while (i < (arr).length) { + val element: Int = (arr)(i) + sum += element + i += 1 +} +sum +``` + +Finally cleanups and dead code elimination: +```scala +val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) +var sum = 0 +var i: Int = 0 +while (i < arr.length) { + val element: Int = arr(i) + sum += element + i += 1 +} +sum +``` + +### Find implicits within a macro + +Similarly to the `summonFrom` construct, it is possible to make implicit search available +in a quote context. For this we simply provide `scala.quoted.Expr.summon`: + +```scala +inline def setFor[T]: Set[T] = ${ setForExpr[T] } + +def setForExpr[T: Type](using QuoteContext): Expr[Set[T]] = { + Expr.summon[Ordering[T]] match { + case Some(ord) => '{ new TreeSet[T]()($ord) } + case _ => '{ new HashSet[T] } + } +} +``` + +### Relationship with Whitebox Inline + +[Inline](./inline.md) documents inlining. The code below introduces a whitebox +inline method that can calculate either a value of type `Int` or a value of type +`String`. + +```scala +transparent inline def defaultOf(inline str: String) = ${ defaultOfImpl('str) } + +def defaultOfImpl(strExpr: Expr[String])(using QuoteContext): Expr[Any] = + strExpr.unliftOrError match + case "int" => '{1} + case "string" => '{"a"} + +// in a separate file +val a: Int = defaultOf("int") +val b: String = defaultOf("string") + +``` + +### Defining a macro and using it in a single project +It is possible to define macros and use them in the same project as long as the implementation +of the macros does not have run-time dependencies on code in the file where it is used. +It might still have compile-time dependencies on types and quoted code that refers to the use-site file. + +To provide this functionality Dotty provides a transparent compilation mode where files that +try to expand a macro but fail because the macro has not been compiled yet are suspended. +If there are any suspended files when the compilation ends, the compiler will automatically restart +compilation of the suspended files using the output of the previous (partial) compilation as macro classpath. +In case all files are suspended due to cyclic dependencies the compilation will fail with an error. + + +### Pattern matching on quoted expressions + +It is possible to deconstruct or extract values out of `Expr` using pattern matching. + +`scala.quoted` contains objects that can help extracting values from `Expr`. + +* `scala.quoted.Const`/`scala.quoted.Consts`: matches an expression of a literal value (or list of values) and returns the value (or list of values). +* `scala.quoted.Unlifted`: matches an expression of a value (or list of values) and returns the value (or list of values). +* `scala.quoted.Varargs`: matches an explicit sequence of expresions and returns them. These sequences are useful to get individual `Expr[T]` out of a varargs expression of type `Expr[Seq[T]]`. + +These could be used in the following way to optimize any call to `sum` that has statically known values. +```scala +inline def sum(inline args: Int*): Int = ${ sumExpr('args) } +private def sumExpr(argsExpr: Expr[Seq[Int]])(using QuoteContext): Expr[Int] = argsExpr match { + case Varargs(Consts(args)) => // args is of type Seq[Int] + Expr(args.sum) // precompute result of sum + case Varargs(argExprs) => // argExprs is of type Seq[Expr[Int]] + val staticSum: Int = argExprs.map { + case Const(arg) => arg + case _ => 0 + }.sum + val dynamicSum: Seq[Expr[Int]] = argExprs.filter { + case Const(_) => false + case arg => true + } + dynamicSum.foldLeft(Expr(staticSum))((acc, arg) => '{ $acc + $arg }) + case _ => + '{ $argsExpr.sum } +} +``` + +#### Quoted patterns + +Quoted pattens allow to deconstruct complex code that contains a precise structure, types or methods. +Patterns `'{ ... }` can be placed in any location where Scala expects a pattern. + +For example +```scala +optimize { + sum(sum(1, a, 2), 3, b) +} // should be optimized to 6 + a + b +``` + +```scala +def sum(args: Int*): Int = args.sum +inline def optimize(inline arg: Int): Int = ${ optimizeExpr('arg) } +private def optimizeExpr(body: Expr[Int])(using QuoteContext): Expr[Int] = body match { + // Match a call to sum without any arguments + case '{ sum() } => Expr(0) + // Match a call to sum with an argument $n of type Int. n will be the Expr[Int] representing the argument. + case '{ sum($n) } => n + // Match a call to sum and extracts all its args in an `Expr[Seq[Int]]` + case '{ sum(${Varargs(args)}: _*) } => sumExpr(args) + case body => body +} +private def sumExpr(args1: Seq[Expr[Int]])(using QuoteContext): Expr[Int] = { + def flatSumArgs(arg: Expr[Int]): Seq[Expr[Int]] = arg match { + case '{ sum(${Varargs(subArgs)}: _*) } => subArgs.flatMap(flatSumArgs) + case arg => Seq(arg) + } + val args2 = args1.flatMap(flatSumArgs) + val staticSum: Int = args2.map { + case Const(arg) => arg + case _ => 0 + }.sum + val dynamicSum: Seq[Expr[Int]] = args2.filter { + case Const(_) => false + case arg => true + } + dynamicSum.foldLeft(Expr(staticSum))((acc, arg) => '{ $acc + $arg }) +} +``` + +#### Recovering precise types using patterns + +Sometimes it is necessary to get a more precise type for an expression. This can be achived using the following pattern match. + +```scala +def f(exp: Expr[Any])(using QuoteContext) = + expr match + case '{ $x: $t } => + // If the pattern match succeeds, then there is some type `T` such that + // - `x` is bound to a variable of type `Expr[T]` + // - `t` is bound to a given instance of type `Type[T]` + // That is, we have `x: Expr[T]` and `given t: Type[T]`, for some (unknown) type `T`. +``` + +This might be used to then perform an implicit search as in: + + +```scala +extension (inline sc: StringContext) inline def showMe(inline args: Any*): String = ${ showMeExpr('sc, 'args) } + +private def showMeExpr(sc: Expr[StringContext], argsExpr: Expr[Seq[Any]])(using QuoteContext): Expr[String] = { + argsExpr match { + case Varargs(argExprs) => + val argShowedExprs = argExprs.map { + case '{ $arg: $tp } => + val showTp = '[Show[$tp]] + Expr.summon(using showTp) match { + case Some(showExpr) => '{ $showExpr.show($arg) } + case None => Reporting.error(s"could not find implicit for ${showTp.show}", arg); '{???} + } + } + val newArgsExpr = Varargs(argShowedExprs) + '{ $sc.s($newArgsExpr: _*) } + case _ => + // `new StringContext(...).showMeExpr(args: _*)` not an explicit `showMeExpr"..."` + Reporting.error(s"Args must be explicit", argsExpr) + '{???} + } +} + +trait Show[-T] { + def show(x: T): String +} +``` + +#### Open code patterns + +Quote pattern matching also provides higher-order patterns to match open terms. If a quoted term contains a definition, +then the rest of the quote can refer to this definition. +``` +'{ + val x: Int = 4 + x * x +} +``` + +To match such a term we need to match the definition and the rest of the code, but we need to explicitly state that the rest of the code may refer to this definition. +```scala +case '{ val y: Int = $x; $body(y): Int } => +``` +Here `$x` will match any closed expression while `$body(y)` will match an expression that is closed under `y`. Then +the subexpression of type `Expr[Int]` is bound to `body` as an `Expr[Int => Int]`. The extra argument represents the references to `y`. Usually this expression is used in combination with `Expr.betaReduce` to replace the extra argument. + +```scala +inline def eval(inline e: Int): Int = ${ evalExpr('e) } + +private def evalExpr(e: Expr[Int])(using QuoteContext): Expr[Int] = { + e match { + case '{ val y: Int = $x; $body(y): Int } => + // body: Expr[Int => Int] where the argument represents references to y + evalExpr(Expr.betaReduce(body)(evalExpr(x))) + case '{ ($x: Int) * ($y: Int) } => + (x, y) match + case (Const(a), Const(b)) => Expr(a * b) + case _ => e + case _ => e + } +} +``` + +```scala +eval { // expands to the code: (16: Int) + val x: Int = 4 + x * x +} +``` + +We can also close over several bindings using `$b(a1, a2, ..., an)`. +To match an actual application we can use braces on the function part `${b}(a1, a2, ..., an)`. + + +### More details +[More details](./macros-spec.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/simple-smp.md b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/simple-smp.md new file mode 100644 index 000000000000..ec5709ed8f45 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/simple-smp.md @@ -0,0 +1,231 @@ +--- +layout: doc-page +title: "The Meta-theory of Symmetric Metaprogramming" +--- + +This note presents a simplified variant of +[principled metaprogramming](./macros.md) +and sketches its soundness proof. The variant treats only dialogues +between two stages. A program can have quotes which can contain +splices (which can contain quotes, which can contain splices, and so +on). Or the program could start with a splice with embedded +quotes. The essential restriction is that (1) a term can contain top-level +quotes or top-level splices, but not both, and (2) quotes cannot appear +directly inside quotes and splices cannot appear directly inside +splices. In other words, the universe is restricted to two phases +only. + +Under this restriction we can simplify the typing rules so that there are +always exactly two environments instead of having a stack of environments. +The variant presented here differs from the full calculus also in that we +replace evaluation contexts with contextual typing rules. While this +is more verbose, it makes it easier to set up the meta theory. + +## Syntax +``` +Terms t ::= x variable + (x: T) => t lambda + t t application + ’t quote + ~t splice + +Simple terms u ::= x | (x: T) => u | u u + +Values v ::= (x: T) => t lambda + ’u quoted value + +Types T ::= A base type + T -> T function type + ’T quoted type +``` +## Operational semantics + +### Evaluation +``` + ((x: T) => t) v --> [x := v]t + + t1 --> t2 + --------------- + t1 t --> t2 t + + t1 --> t2 + --------------- + v t1 --> v t2 + + t1 ==> t2 + ------------- + ’t1 --> ’t2 +``` + +### Splicing +``` + ~’u ==> u + + t1 ==> t2 + ------------------------------- + (x: T) => t1 ==> (x: T) => t2 + + t1 ==> t2 + --------------- + t1 t ==> t2 t + + t1 ==> t2 + --------------- + u t1 ==> u t2 + + t1 --> t2 + ------------- + ~t1 ==> ~t2 + +``` +## Typing Rules + +Typing judgments are of the form `E1 * E2 |- t: T` where `E1, E2` are environments and +`*` is one of `~` and `’`. +``` + x: T in E2 + --------------- + E1 * E2 |- x: T + + + E1 * E2, x: T1 |- t: T2 + -------------------------------- + E1 * E2 |- (x: T1) => t: T -> T2 + + + E1 * E2 |- t1: T2 -> T E1 * E2 |- t2: T2 + ------------------------------------------- + E1 * E2 |- t1 t2: T + + + E2 ’ E1 |- t: T + ----------------- + E1 ~ E2 |- ’t: ’T + + + E2 ~ E1 |- t: ’T + ---------------- + E1 ’ E2 |- ~t: T +``` + +(Curiously, this looks a bit like a Christmas tree). + +## Soundness + +The meta-theory typically requires mutual inductions over two judgments. + +### Progress Theorem + + 1. If `E1 ~ |- t: T` then either `t = v` for some value `v` or `t --> t2` for some term `t2`. + 2. If ` ’ E2 |- t: T` then either `t = u` for some simple term `u` or `t ==> t2` for some term `t2`. + +Proof by structural induction over terms. + +To prove (1): + + - the cases for variables, lambdas and applications are as in STL. + - If `t = ’t2`, then by inversion we have ` ’ E1 |- t2: T2` for some type `T2`. + By the second I.H., we have one of: + - `t2 = u`, hence `’t2` is a value, + - `t2 ==> t3`, hence `’t2 --> ’t3`. + - The case `t = ~t2` is not typable. + +To prove (2): + + - If `t = x` then `t` is a simple term. + - If `t = (x: T) => t2`, then either `t2` is a simple term, in which case `t` is as well. + Or by the second I.H. `t2 ==> t3`, in which case `t ==> (x: T) => t3`. + - If `t = t1 t2` then one of three cases applies: + + - `t1` and `t2` are a simple term, then `t` is as well a simple term. + - `t1` is not a simple term. Then by the second IH, `t1 ==> t12`, hence `t ==> t12 t2`. + - `t1` is a simple term but `t2` is not. Then by the second IH. `t2 ==> t22`, hence `t ==> t1 t22`. + + - The case `t = ’t2` is not typable. + - If `t = ~t2` then by inversion we have `E2 ~ |- t2: ’T2`, for some type `T2`. + By the first I.H., we have one of + + - `t2 = v`. Since `t2: ’T2`, we must have `v = ’u`, for some simple term `u`, hence `t = ~’u`. + By quote-splice reduction, `t ==> u`. + - `t2 --> t3`. Then by the context rule for `’t`, `t ==> ’t3`. + + +### Substitution Lemma + + 1. If `E1 ~ E2 |- s: S` and `E1 ~ E2, x: S |- t: T` then `E1 ~ E2 |- [x := s]t: T`. + 2. If `E1 ~ E2 |- s: S` and `E2, x: S ’ E1 |- t: T` then `E2 ’ E1 |- [x := s]t: T`. + +The proofs are by induction on typing derivations for `t`, analogous +to the proof for STL (with (2) a bit simpler than (1) since we do not +need to swap lambda bindings with the bound variable `x`). The +arguments that link the two hypotheses are as follows. + +To prove (1), let `t = ’t1`. Then `T = ’T1` for some type `T1` and the last typing rule is +``` + E2, x: S ’ E1 |- t1: T1 + ------------------------- + E1 ~ E2, x: S |- ’t1: ’T1 +``` +By the second I.H. `E2 ’ E1 |- [x := s]t1: T1`. By typing, `E1 ~ E2 |- ’[x := s]t1: ’T1`. +Since `[x := s]t = [x := s](’t1) = ’[x := s]t1` we get `[x := s]t: ’T1`. + +To prove (2), let `t = ~t1`. Then the last typing rule is +``` + E1 ~ E2, x: S |- t1: ’T + ----------------------- + E2, x: S ’ E1 |- ~t1: T +``` +By the first I.H., `E1 ~ E2 |- [x := s]t1: ’T`. By typing, `E2 ’ E1 |- ~[x := s]t1: T`. +Since `[x := s]t = [x := s](~t1) = ~[x := s]t1` we get `[x := s]t: T`. + + +### Preservation Theorem + + 1. If `E1 ~ E2 |- t1: T` and `t1 --> t2` then `E1 ~ E2 |- t2: T`. + 2. If `E1 ’ E2 |- t1: T` and `t1 ==> t2` then `E1 ’ E2 |- t2: T`. + +The proof is by structural induction on evaluation derivations. The proof of (1) is analogous +to the proof for STL, using the substitution lemma for the beta reduction case, with the addition of reduction of quoted terms, which goes as follows: + + - Assume the last rule was + ``` + t1 ==> t2 + ------------- + ’t1 --> ’t2 + ``` + By inversion of typing rules, we must have `T = ’T1` for some type `T1` such that `t1: T1`. + By the second I.H., `t2: T1`, hence `’t2: `T1`. + + +To prove (2): + + - Assume the last rule was `~’u ==> u`. The typing proof of `~’u` must have the form + + ``` + E1 ’ E2 |- u: T + ----------------- + E1 ~ E2 |- ’u: ’T + ----------------- + E1 ’ E2 |- ~’u: T + ``` + Hence, `E1 ’ E2 |- u: T`. + + - Assume the last rule was + ``` + t1 ==> t2 + ------------------------------- + (x: S) => t1 ==> (x: T) => t2 + ``` + By typing inversion, `E1 ' E2, x: S |- t1: T1` for some type `T1` such that `T = S -> T1`. + By the I.H, `t2: T1`. By the typing rule for lambdas the result follows. + + - The context rules for applications are equally straightforward. + + - Assume the last rule was + ``` + t1 ==> t2 + ------------- + ~t1 ==> ~t2 + ``` + By inversion of typing rules, we must have `t1: ’T`. + By the first I.H., `t2: ’T`, hence `~t2: T`. diff --git a/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/staging.md b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/staging.md new file mode 100644 index 000000000000..1a64d698093c --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/staging.md @@ -0,0 +1,119 @@ +--- +layout: doc-page +title: "Multi-Stage Programming" +--- + +The framework expresses at the same time compile-time metaprogramming and +multi-stage programming. We can think of compile-time metaprogramming as a +two stage compilation process: one that we write the code in top-level splices, +that will be used for code generation (macros) and one that will perform all +necessary evaluations at compile-time and an object program that we will run +as usual. What if we could synthesize code at run-time and offer one extra stage +to the programmer? Then we can have a value of type `Expr[T]` at run-time that we +can essentially treat as a typed-syntax tree that we can either _show_ as a +string (pretty-print) or compile and run. If the number of quotes exceeds the +number of splices by more than one (effectively handling at run-time values of type +`Expr[Expr[T]]`, `Expr[Expr[Expr[T]]]`, ...) then we talk about Multi-Stage +Programming. + +The motivation behind this _paradigm_ is to let runtime information affect or +guide code-generation. + +Intuition: The phase in which code is run is determined by the difference +between the number of splice scopes and quote scopes in which it is embedded. + + - If there are more splices than quotes, the code is run at compile-time i.e. + as a macro. In the general case, this means running an interpreter that + evaluates the code, which is represented as a typed abstract syntax tree. The + interpreter can fall back to reflective calls when evaluating an application + of a previously compiled method. If the splice excess is more than one, it + would mean that a macro’s implementation code (as opposed to the code it + expands to) invokes other macros. If macros are realized by interpretation, + this would lead to towers of interpreters, where the first interpreter would + itself interpret an interpreter code that possibly interprets another + interpreter and so on. + + - If the number of splices equals the number of quotes, the code is compiled + and run as usual. + + - If the number of quotes exceeds the number of splices, the code is staged. + That is, it produces a typed abstract syntax tree or type structure at + run-time. A quote excess of more than one corresponds to multi-staged + programming. + +Providing an interpreter for the full language is quite difficult, and it is +even more difficult to make that interpreter run efficiently. So we currently +impose the following restrictions on the use of splices. + + 1. A top-level splice must appear in an inline method (turning that method + into a macro) + + 2. The splice must call a previously compiled + method passing quoted arguments, constant arguments or inline arguments. + + 3. Splices inside splices (but no intervening quotes) are not allowed. + + +## API + +The framework as discussed so far allows code to be staged, i.e. be prepared +to be executed at a later stage. To run that code, there is another method +in class `Expr` called `run`. Note that `$` and `run` both map from `Expr[T]` +to `T` but only `$` is subject to the PCP, whereas `run` is just a normal method. +Run provides a `QuoteContext` that can be used to show the expression in the scope of `run`. +On the other hand `withQuoteContext` provides a `QuoteContext` without evaluating the expression. + +```scala +package scala.quoted.staging + +def run[T](expr: QuoteContext ?=> Expr[T])(using toolbox: Toolbox): T = ... + +def withQuoteContext[T](thunk: QuoteContext ?=> T)(using toolbox: Toolbox): T = ... +``` + +## Create a new Dotty project with staging enabled + +```shell +sbt new lampepfl/dotty-staging.g8 +``` + +From [lampepfl/dotty-staging.g8](https://github.com/lampepfl/dotty-staging.g8). + +It will create a project with the necessary dependencies and some examples. + +In case you prefer to create the project on your own, make sure to define the following dependency in your build.sbt + +```scala +libraryDependencies += "ch.epfl.lamp" %% "dotty-staging" % scalaVersion.value +``` + +and in case you use `dotc`/`dotr` directly, then use the `-with-compiler` flag for both: + +```shell +dotc -with-compiler -d out Test.scala +dotr -with-compiler -classpath out Test +``` + +## Example + +Now take exactly the same example as in [Macros](./macros.md). Assume that we +do not want to pass an array statically but generate code at run-time and pass +the value, also at run-time. Note, how we make a future-stage function of type +`Expr[Array[Int] => Int]` in line 6 below. Using `run { ... }` we can evaluate an +expression at runtime. Within the scope of `run` we can also invoke `show` on an expression +to get a source-like representation of the expression. + +```scala +import scala.quoted.staging._ + +// make available the necessary toolbox for runtime code generation +given Toolbox = Toolbox.make(getClass.getClassLoader) + +val f: Array[Int] => Int = run { + val stagedSum: Expr[Array[Int] => Int] = '{ (arr: Array[Int]) => ${sum('arr)}} + println(stagedSum.show) // Prints "(arr: Array[Int]) => { var sum = 0; ... }" + stagedSum +} + +f.apply(Array(1, 2, 3)) // Returns 6 +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/tasty-inspect.md b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/tasty-inspect.md new file mode 100644 index 000000000000..fc2764d8dd18 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/tasty-inspect.md @@ -0,0 +1,59 @@ +--- +layout: doc-page +title: "TASTy Inspection" +--- + +```scala +libraryDependencies += "ch.epfl.lamp" %% "dotty-tasty-inspector" % scalaVersion.value +``` + +TASTy files contain the full typed tree of a class including source positions +and documentation. This is ideal for tools that analyze or extract semantic +information from the code. To avoid the hassle of working directly with the TASTy +file we provide the `TastyInspector` which loads the contents and exposes it +through the TASTy reflect API. + + +## Inspecting TASTy files + +To inspect the TASTy Reflect trees of a TASTy file a consumer can be defined in +the following way. + +```scala +import scala.tasty.Reflection +import scala.tasty.file._ + +class Consumer extends TastyInspector { + final def apply(reflect: Reflection)(root: reflect.Tree): Unit = { + import reflect._ + // Do something with the tree + } +} +``` + +Then the consumer can be instantiated with the following code to get the tree of +the class `foo.Bar` for a foo in the classpath. + +```scala +object Test { + def main(args: Array[String]): Unit = { + InspectTasty("", List("foo.Bar"), new Consumer) + } +} +``` + +Note that if we need to run the main (in the example below defined in an object called `Test`) after +compilation we need to make the compiler available to the runtime: + +```shell +dotc -d out Test.scala +dotr -with-compiler -classpath out Test +``` + + +## Template project +Using sbt version `1.1.5+`, do: +``` +sbt new lampepfl/dotty-tasty-inspector.g8 +``` +in the folder where you want to clone the template. diff --git a/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/tasty-reflect.md b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/tasty-reflect.md new file mode 100644 index 000000000000..d383c1974b97 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/tasty-reflect.md @@ -0,0 +1,157 @@ +--- +layout: doc-page +title: "TASTy Reflect" +--- + +TASTy Reflect enables inspection and construction of Typed Abstract Syntax Trees +(Typed-AST). It may be used on quoted expressions (`quoted.Expr`) and quoted +types (`quoted.Type`) from [Macros](./macros.md) or on full TASTy files. + +If you are writing macros, please first read [Macros](./macros.md). +You may find all you need without using TASTy Reflect. + + +## API: From quotes and splices to TASTy reflect trees and back + +With `quoted.Expr` and `quoted.Type` we can compute code but also analyze code +by inspecting the ASTs. [Macros](./macros.md) provide the guarantee that the +generation of code will be type-correct. Using TASTy Reflect will break these +guarantees and may fail at macro expansion time, hence additional explicit +checks must be done. + +To provide reflection capabilities in macros we need to add an implicit +parameter of type `scala.quoted.QuoteContext` and import `tasty._` from it in +the scope where it is used. + +```scala +import scala.quoted._ + +inline def natConst(x: => Int): Int = ${natConstImpl('{x})} + +def natConstImpl(x: Expr[Int])(using qctx: QuoteContext): Expr[Int] = { + import qctx.tasty._ + ... +} +``` + +### Extractors + +`import qctx.tasty._` will provide all extractors and methods on TASTy Reflect +trees. For example the `Literal(_)` extractor used below. + +```scala +def natConstImpl(x: Expr[Int])(using qctx: QuoteContext): Expr[Int] = { + import qctx.tasty._ + val xTree: Term = x.unseal + xTree match { + case Inlined(_, _, Literal(Constant(n: Int))) => + if (n <= 0) { + Reporting.error("Parameter must be natural number") + '{0} + } else { + xTree.seal.cast[Int] + } + case _ => + Reporting.error("Parameter must be a known constant") + '{0} + } +} +``` + +To easily know which extractors are needed, the `showExtractors` method on a +`qctx.tasty.Term` returns the string representation of the extractors. + +The method `qctx.tasty.Term.seal` provides a way to go back to a +`quoted.Expr[Any]`. Note that the type is `Expr[Any]`. Consequently, the type +must be set explicitly with a checked `cast` call. If the type does not conform +to it an exception will be thrown at runtime. + +### Obtaining the underlying argument + +A macro can access the tree of the actual argument passed on the call-site. The +`underlyingArgument` method on a `Term` object will give access to the tree +defining the expression passed. For example the code below matches a selection +operation expression passed while calling the `macro` below. + +```scala +inline def macro(param: => Boolean): Unit = ${ macroImpl('param) } + +def macroImpl(param: Expr[Boolean])(using qctx: QuoteContext): Expr[Unit] = { + import qctx.tasty._ + import util._ + + param.unseal.underlyingArgument match { + case t @ Apply(Select(lhs, op), rhs :: Nil) => .. + } +} + +// example +macro(this.checkCondition()) +``` + +### Positions + +The tasty context provides a `rootPosition` value. It corresponds to +the expansion site for macros. The macro authors can obtain various information about that +expansion site. The example below shows how we can obtain position information +such as the start line, the end line or even the source code at the expansion +point. + +```scala +def macroImpl()(qctx: QuoteContext): Expr[Unit] = { + import qctx.tasty._ + val pos = rootPosition + + val path = pos.sourceFile.jpath.toString + val start = pos.start + val end = pos.end + val startLine = pos.startLine + val endLine = pos.endLine + val startColumn = pos.startColumn + val endColumn = pos.endColumn + val sourceCode = pos.sourceCode + ... +``` + +### Tree Utilities + +`scala.tasty.reflect` contains three facilities for tree traversal and +transformation. + +`TreeAccumulator` ties the knot of a traversal. By calling `foldOver(x, tree))` +we can dive into the `tree` node and start accumulating values of type `X` (e.g., +of type List[Symbol] if we want to collect symbols). The code below, for +example, collects the pattern variables of a tree. + +```scala +def collectPatternVariables(tree: Tree)(implicit ctx: Context): List[Symbol] = { + val acc = new TreeAccumulator[List[Symbol]] { + def apply(syms: List[Symbol], tree: Tree)(implicit ctx: Context) = tree match { + case Bind(_, body) => apply(tree.symbol :: syms, body) + case _ => foldOver(syms, tree) + } + } + acc(Nil, tree) +} +``` + +A `TreeTraverser` extends a `TreeAccumulator` and performs the same traversal +but without returning any value. Finally a `TreeMap` performs a transformation. + +#### Let + +`scala.tasty.Reflection` also offers a method `let` that allows us +to bind the `rhs` (right-hand side) to a `val` and use it in `body`. Additionally, `lets` binds +the given `terms` to names and allows to use them in the `body`. Their type definitions +are shown below: + +```scala +def let(rhs: Term)(body: Ident => Term): Term = ... + +def lets(terms: List[Term])(body: List[Term] => Term): Term = ... +``` + +## More Examples + +* Start experimenting with TASTy Reflect ([link](https://github.com/nicolasstucki/tasty-reflection-exercise)) + diff --git a/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/toc.md b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/toc.md new file mode 100644 index 000000000000..edf512f8caa4 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/metaprogramming/toc.md @@ -0,0 +1,44 @@ +--- +layout: doc-page +title: "Overview" +--- + +The following pages introduce the redesign of metaprogramming in Scala. They +introduce the following fundamental facilities: + +1. [Inline](./inline.md) `inline` is a new modifier that guarantees that + a definition will be inlined at the point of use. The primary motivation + behind inline is to reduce the overhead behind function calls and access to + values. The expansion will be performed by the Scala compiler during the + `Typer` compiler phase. As opposed to inlining in some other ecosystems, + inlining in Scala is not merely a request to the compiler but is a + _command_. The reason is that inlining in Scala can drive other compile-time + operations, like inline pattern matching (enabling type-level + programming), macros (enabling compile-time, generative, metaprogramming) and + runtime code generation (multi-stage programming). + +2. [Macros](./macros.md) Macros are built on two well-known fundamental + operations: quotation and splicing. Quotation converts program code to + data, specifically, a (tree-like) representation of this code. It is + expressed as `'{...}` for expressions and as `'[...]` for types. Splicing, + expressed as `${ ... }`, goes the other way: it converts a program's representation + to program code. Together with `inline`, these two abstractions allow + to construct program code programmatically. + +3. [Staging](./staging.md) Where macros construct code at _compile-time_, + staging lets programs construct new code at _runtime_. That way, + code generation can depend not only on static data but also on data available at runtime. This splits the evaluation of the program in two or more phases or ... + stages. Consequently, this method of generative programming is called "Multi-Stage Programming". Staging is built on the same foundations as macros. It uses + quotes and splices, but leaves out `inline`. + +4. [TASTy Reflection](./tasty-reflect.md) Quotations are a "black-box" + representation of code. They can be parameterized and composed using + splices, but their structure cannot be analyzed from the outside. Tasty + reflection gives a way to analyze code structure by partly revealing the representation type of a piece of code in a standard API. The representation + type is a form of typed abstract syntax tree, which gives rise to the `TASTy` + moniker. + +5. [TASTy Inspection](./tasty-inspect.md) Typed abstract syntax trees are serialized + in a custom compressed binary format stored in `.tasty` files. TASTy inspection allows + to load these files and analyze their content's tree structure. + diff --git a/scala3doc/dotty-docs/docs/docs/reference/new-types/dependent-function-types-spec.md b/scala3doc/dotty-docs/docs/docs/reference/new-types/dependent-function-types-spec.md new file mode 100644 index 000000000000..7b07b8631a97 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/new-types/dependent-function-types-spec.md @@ -0,0 +1,52 @@ +--- +layout: doc-page +title: "Dependent Function Types - More Details" +--- + +Initial implementation in [#3464](https://github.com/lampepfl/dotty/pull/3464) + +## Syntax + +``` +FunArgTypes ::= InfixType + | ‘(’ [ FunArgType {',' FunArgType } ] ‘)’ + | ‘(’ TypedFunParam {',' TypedFunParam } ‘)’ +TypedFunParam ::= id ‘:’ Type +``` + +Dependent function types associate to the right, e.g. +`(s: S) ⇒ (t: T) ⇒ U` is the same as `(s: S) ⇒ ((t: T) ⇒ U)`. + +## Implementation + +Dependent function types are shorthands for class types that define `apply` +methods with a dependent result type. Dependent function types desugar to +refinement types of `scala.FunctionN`. A dependent function type +`(x1: K1, ..., xN: KN) => R` of arity `N` translates to: + +```scala +FunctionN[K1, ..., Kn, R'] { + def apply(x1: K1, ..., xN: KN): R +} +``` + +where the result type parameter `R'` is the least upper approximation of the +precise result type `R` without any reference to value parameters `x1, ..., xN`. + +The syntax and sementics of anonymous dependent functions is identical to the +one of regular functions. Eta expansion is naturally generalized to produce +dependent function types for methods with dependent result types. + +Dependent functions can be implicit, and generalize to arity `N > 22` in the +same way that other functions do, see [the corresponding +documentation](../dropped-features/limit22.md). + +## Examples + +- [depfuntype.scala](https://github.com/lampepfl/dotty/blob/master/tests/pos/depfuntype.scala) + +- [eff-dependent.scala](https://github.com/lampepfl/dotty/blob/master/tests/run/eff-dependent.scala) + +### Type Checking + +After desugaring no additional typing rules are required for dependent function types. diff --git a/scala3doc/dotty-docs/docs/docs/reference/new-types/dependent-function-types.md b/scala3doc/dotty-docs/docs/docs/reference/new-types/dependent-function-types.md new file mode 100644 index 000000000000..b7003d7c66fc --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/new-types/dependent-function-types.md @@ -0,0 +1,46 @@ +--- +layout: doc-page +title: "Dependent Function Types" +--- + +A dependent function type describes functions where the result type may depend +on the function's parameter values. Example: +```scala +trait Entry { type Key; val key: Key } + +def extractKey(e: Entry): e.Key = e.key // a dependent method +val extractor: (e: Entry) => e.Key = extractKey // a dependent function value +// ║ ⇓ ⇓ ⇓ ⇓ ⇓ ⇓ ⇓ ║ +// ║ Dependent ║ +// ║ Function Type ║ +// ╚═══════════════════╝ +``` +Scala already has _dependent methods_, i.e. methods where the result +type refers to some of the parameters of the method. Method +`extractKey` is an example. Its result type, `e.Key` refers to its +parameter `e` (we also say, `e.Key` _depends_ on `e`). But so far it +was not possible to turn such methods into function values, so that +they can be passed as parameters to other functions, or returned as +results. Dependent methods could not be turned into functions simply +because there was no type that could describe them. + +In Dotty this is now possible. The type of the `extractor` value above is + +```scala +(e: Entry) => e.Key +``` + +This type describes function values that take any argument `e` of type +`Entry` and return a result of type `e.Key`. + +Recall that a normal function type `A => B` is represented as an +instance of the `Function1` trait (i.e. `Function1[A, B]`) and +analogously for functions with more parameters. Dependent functions +are also represented as instances of these traits, but they get an additional +refinement. In fact, the dependent function type above is just syntactic sugar for +```scala +Function1[Entry, Entry#Key] { + def apply(e: Entry): e.Key +} +``` +[More details](./dependent-function-types-spec.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/new-types/implicit-function-types-spec.md b/scala3doc/dotty-docs/docs/docs/reference/new-types/implicit-function-types-spec.md new file mode 100644 index 000000000000..f845cff5dfd2 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/new-types/implicit-function-types-spec.md @@ -0,0 +1,87 @@ +--- +layout: doc-page +title: "Implicit Function Types - More Details" +--- + +Initial implementation in (#1775)[https://github.com/lampepfl/dotty/pull/1775]. + +## Syntax +``` +Type ::= ['implicit'] FunArgTypes '=>' Type + | HkTypeParamClause '=>' Type + | InfixType +Expr ::= ['implicit'] FunParams '=>' Expr +BlockResult ::= ['implicit'] FunParams '=>' Block + | Expr1 +``` + +Implicit function types associate to the right, e.g. +`implicit S ⇒ implicit T ⇒ U` is the same as `implicit S ⇒ (implicit T ⇒ U)`. + +## Implementation + +Implicit function types are shorthands for class types that define `apply` +methods with implicit parameters. Specifically, the `N`-ary function type +`implicit T1, ..., TN ⇒ R` is a shorthand for the class type +`ImplicitFunctionN[T1 , ... , TN, R]`. Such class types are defined in the +Scala library for `N` between 1 and 22 as follows. + +```scala +package scala +trait ImplicitFunctionN[-T1 , ... , -TN, +R] { + def apply(implicit x1: T1 , ... , xN: TN): R +} +``` + +Anonymous implicit functions `implicit (x1: T1, ..., xn: Tn) => e` map +implicit parameters `xi` of types `Ti` to a result given by expression `e`. +The scope of each implicit parameter `xi` is `e`. Implicit parameters must +have pairwise distinct names. + +If the expected type of the anonymous implicit function is of the form +`scala.ImplicitFunctionN[S1, ..., Sn, R]`, the expected type of `e` is `R` and +the type `Ti` of any of the parameters `xi` can be omitted, in which case `Ti += Si` is assumed. If the expected type of the anonymous implicit function is +some other type, all implicit parameter types must be explicitly given, and +the expected type of `e` is undefined. The type of the anonymous implicit +function is `scala.ImplicitFunctionN[S1, ...,Sn, T]`, where `T` is the widened +type of `e`. `T` must be equivalent to a type which does not refer to any of +the implicit parameters `xi`. + +The anonymous implicit function is evaluated as the instance creation +expression: +```scala +new scala.ImplicitFunctionN[T1, ..., Tn, T] { + def apply(implicit x1: T1, ..., xn: Tn): T = e +} +``` +In the case of a single untyped implicit parameter, `implicit (x) => e` can be +abbreviated to `implicit x => e`. If an anonymous implicit function `implicit +(x: T) => e` with a single typed parameter appears as the result expression of +a block, it can be abbreviated to `implicit x: T => e` + +A implicit parameter may also be a wildcard represented by an underscore `_`. In +that case, a fresh name for the parameter is chosen arbitrarily. + +Note: The closing paragraph of the [Anonymous Functions section](https://www +.scala-lang.org/files/archive/spec/2.12/06-expressions.html#anonymous- +functions) of the Scala 2.12 is subsumed by implicit function types and should +be removed. + +Anonymous implicit functions `implicit (x1: T1, ..., xn: Tn) => e` are +automatically inserted around any expression `e` whose expected type is +`scala.ImplicitFunctionN[T1, ..., Tn, R]`. This is analogous to the automatic +insertion of `scala.Function0` around expression in by-name argument position. + +Implicit functions generalize to `N > 22` in the same way that functions do, +see [the corresponding +documentation](../dropped-features/limit22.md). + +## Examples + +See the section on Expressiveness from [Simplicitly: foundations and applications of implicit function types](https://dl.acm.org/citation.cfm?id=3158130). + +### Type Checking + +After desugaring no additional typing rules are required for implicit function +types. diff --git a/scala3doc/dotty-docs/docs/docs/reference/new-types/intersection-types-spec.md b/scala3doc/dotty-docs/docs/docs/reference/new-types/intersection-types-spec.md new file mode 100644 index 000000000000..e4b92388e2db --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/new-types/intersection-types-spec.md @@ -0,0 +1,107 @@ +--- +layout: doc-page +title: "Intersection Types - More Details" +--- + +## Syntax + +Syntactically, the type `S & T` is an infix type, where the infix operator is `&`. +The operator `&` is a normal identifier +with the usual precedence and subject to usual resolving rules. +Unless shadowed by another definition, it resolves to the type `scala.&`, +which acts as a type alias to an internal representation of intersection types. + +``` +Type ::= ...| InfixType +InfixType ::= RefinedType {id [nl] RefinedType} +``` + +## Subtyping Rules + +``` +T <: A T <: B +---------------- + T <: A & B + + A <: T +---------------- + A & B <: T + + B <: T +---------------- + A & B <: T +``` + +From the rules above, we can show that `&` is _commutative_: `A & B <: B & A` for any type `A` and `B`. + +``` + B <: B A <: A +---------- ----------- +A & B <: B A & B <: A +--------------------------- + A & B <: B & A +``` + +In another word, `A & B` is the same type as `B & A`, in the sense that the two types +have the same values and are subtypes of each other. + +If `C` is a type constructor, then `C[A] & C[B]` can be simplified using the following three rules: + +- If `C` is covariant, `C[A] & C[B] ~> C[A & B]` +- If `C` is contravariant, `C[A] & C[B] ~> C[A | B]` +- If `C` is non-variant, emit a compile error + +When `C` is covariant, `C[A & B] <: C[A] & C[B]` can be derived: + +``` + A <: A B <: B + ---------- --------- + A & B <: A A & B <: B +--------------- ----------------- +C[A & B] <: C[A] C[A & B] <: C[B] +------------------------------------------ + C[A & B] <: C[A] & C[B] +``` + +When `C` is contravariant, `C[A | B] <: C[A] & C[B]` can be derived: + +``` + A <: A B <: B + ---------- --------- + A <: A | B B <: A | B +------------------- ---------------- +C[A | B] <: C[A] C[A | B] <: C[B] +-------------------------------------------------- + C[A | B] <: C[A] & C[B] +``` + +## Erasure + +The erased type for `S & T` is the erased _glb_ (greatest lower bound) of the +erased type of `S` and `T`. The rules for erasure of intersection types are given +below in pseudocode: + +``` +|S & T| = glb(|S|, |T|) + +glb(JArray(A), JArray(B)) = JArray(glb(A, B)) +glb(JArray(T), _) = JArray(T) +glb(_, JArray(T)) = JArray(T) +glb(A, B) = A if A extends B +glb(A, B) = B if B extends A +glb(A, _) = A if A is not a trait +glb(_, B) = B if B is not a trait +glb(A, _) = A // use first +``` + +In the above, `|T|` means the erased type of `T`, `JArray` refers to +the type of Java Array. + +See also: `TypeErasure#erasedGlb` + +## Relationship with Compound Type (`with`) + +Intersection types `A & B` replace compound types `A with B` in Scala 2. For the +moment, the syntax `A with B` is still allowed and interpreted as `A & B`, but +its usage as a type (as opposed to in a `new` or `extends` clause) will be +deprecated and removed in the future. diff --git a/scala3doc/dotty-docs/docs/docs/reference/new-types/intersection-types.md b/scala3doc/dotty-docs/docs/docs/reference/new-types/intersection-types.md new file mode 100644 index 000000000000..55c3199206e5 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/new-types/intersection-types.md @@ -0,0 +1,69 @@ +--- +layout: doc-page +title: "Intersection Types" +--- + +Used on types, the `&` operator creates an intersection type. + +## Type Checking + +The type `S & T` represents values that are of the type `S` and `T` at the same time. + +```scala +trait Resettable { + def reset(): Unit +} +trait Growable[T] { + def add(t: T): Unit +} +def f(x: Resettable & Growable[String]) = { + x.reset() + x.add("first") +} +``` + +The parameter `x` is required to be _both_ a `Resettable` and a +`Growable[String]`. + +The members of an intersection type `A & B` are all the members of `A` and all +the members of `B`. For instance `Resettable & Growable[String]` +has member methods `reset` and `add`. + +`&` is _commutative_: `A & B` is the same type as `B & A`. + +If a member appears in both `A` and `B`, its type in `A & B` is the intersection +of its type in `A` and its type in `B`. For instance, assume the definitions: + +```scala +trait A { + def children: List[A] +} +trait B { + def children: List[B] +} +val x: A & B = new C +val ys: List[A & B] = x.children +``` + +The type of `children` in `A & B` is the intersection of `children`'s +type in `A` and its type in `B`, which is `List[A] & List[B]`. This +can be further simplified to `List[A & B]` because `List` is +covariant. + +One might wonder how the compiler could come up with a definition for +`children` of type `List[A & B]` since what is given are `children` +definitions of type `List[A]` and `List[B]`. The answer is the compiler does not +need to. `A & B` is just a type that represents a set of requirements for +values of the type. At the point where a value is _constructed_, one +must make sure that all inherited members are correctly defined. +So if one defines a class `C` that inherits `A` and `B`, one needs +to give at that point a definition of a `children` method with the required type. + +```scala +class C extends A with B { + def children: List[A & B] = ??? +} +``` + + +[More details](./intersection-types-spec.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/new-types/match-types.md b/scala3doc/dotty-docs/docs/docs/reference/new-types/match-types.md new file mode 100644 index 000000000000..2126ffbd8467 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/new-types/match-types.md @@ -0,0 +1,249 @@ +--- +layout: doc-page +title: "Match Types" +--- + +A match type reduces to one of its right-hand sides, depending on the type of +its scrutinee. For example: + +```scala +type Elem[X] = X match { + case String => Char + case Array[t] => t + case Iterable[t] => t +} +``` + +This defines a type that reduces as follows: + +```scala +Elem[String] =:= Char +Elem[Array[Int]] =:= Int +Elem[List[Float]] =:= Float +Elem[Nil.type] =:= Nothing +``` + +Here `=:=` is understood to mean that left and right hand sides are mutually +subtypes of each other. + +In general, a match type is of the form + +```scala +S match { P1 => T1 ... Pn => Tn } +``` + +where `S`, `T1`, ..., `Tn` are types and `P1`, ..., `Pn` are type patterns. Type +variables in patterns start with a lower case letter, as usual. + +Match types can form part of recursive type definitions. Example: + +```scala +type LeafElem[X] = X match { + case String => Char + case Array[t] => LeafElem[t] + case Iterable[t] => LeafElem[t] + case AnyVal => X +} +``` + +Recursive match type definitions can also be given an upper bound, like this: + +```scala +type Concat[Xs <: Tuple, +Ys <: Tuple] <: Tuple = Xs match { + case Unit => Ys + case x *: xs => x *: Concat[xs, Ys] +} +``` + +In this definition, every instance of `Concat[A, B]`, whether reducible or not, +is known to be a subtype of `Tuple`. This is necessary to make the recursive +invocation `x *: Concat[xs, Ys]` type check, since `*:` demands a `Tuple` as its +right operand. + +## Dependent Typing + +Match types can be used to define dependently typed methods. For instance, here +is the value level counterpart to the `LeafElem` type defined above (note the +use of the match type as the return type): + +```scala +def leafElem[X](x: X): LeafElem[X] = x match { + case x: String => x.charAt(0) + case x: Array[t] => leafElem(x(9)) + case x: Iterable[t] => leafElem(x.next()) + case x: AnyVal => x +} +``` + +This special mode of typing for match expressions is only used when the +following conditions are met: + +1. The match expression patterns do not have guards +2. The match expression scrutinee's type is a subtype of the match type + scrutinee's type +3. The match expression and the match type have the same number of cases +4. The match expression patterns are all [Typed Patterns](https://scala-lang.org/files/archive/spec/2.13/08-pattern-matching.html#typed-patterns), + and these types are `=:=` to their corresponding type patterns in the match + type + +## Representation of Match Types + +The internal representation of a match type +``` +S match { P1 => T1 ... Pn => Tn } +``` +is `Match(S, C1, ..., Cn) <: B` where each case `Ci` is of the form +``` +[Xs] =>> P => T +``` + +Here, `[Xs]` is a type parameter clause of the variables bound in pattern `Pi`. +If there are no bound type variables in a case, the type parameter clause is +omitted and only the function type `P => T` is kept. So each case is either a +unary function type or a type lambda over a unary function type. + +`B` is the declared upper bound of the match type, or `Any` if no such bound is +given. We will leave it out in places where it does not matter for the +discussion. The scrutinee, bound, and pattern types must all be first-order +types. + +## Match Type Reduction + +Match type reduction follows the semantics of match expressions, that is, a +match type of the form `S match { P1 => T1 ... Pn => Tn }` reduces to `Ti` if +and only if `s: S match { _: P1 => T1 ... _: Pn => Tn }` evaluates to a value of +type `Ti` for all `s: S`. + +The compiler implements the following reduction algorithm: + +- If the scrutinee type `S` is an empty set of values (such as `Nothing` or + `String & Int`), do not reduce. +- Sequentially consider each pattern `Pi` + - If `S <: Pi` reduce to `Ti`. + - Otherwise, try constructing a proof that `S` and `Pi` are disjoint, or, in + other words, that no value `s` of type `S` is also of type `Pi`. + - If such proof is found, proceed to the next case (`Pi+1`), otherwise, do + not reduce. + +Disjointness proofs rely on the following properties of Scala types: + +1. Single inheritance of classes +2. Final classes cannot be extended +3. Constant types with distinct values are nonintersecting + +Type parameters in patterns are minimally instantiated when computing `S <: Pi`. +An instantiation `Is` is _minimal_ for `Xs` if all type variables in `Xs` that +appear covariantly and nonvariantly in `Is` are as small as possible and all +type variables in `Xs` that appear contravariantly in `Is` are as large as +possible. Here, "small" and "large" are understood with respect to `<:`. + +For simplicity, we have omitted constraint handling so far. The full formulation +of subtyping tests describes them as a function from a constraint and a pair of +types to either _success_ and a new constraint or _failure_. In the context of +reduction, the subtyping test `S <: [Xs := Is] P` is understood to leave the +bounds of all variables in the input constraint unchanged, i.e. existing +variables in the constraint cannot be instantiated by matching the scrutinee +against the patterns. + +## Subtyping Rules for Match Types + +The following rules apply to match types. For simplicity, we omit environments +and constraints. + +1. The first rule is a structural comparison between two match types: + + ``` + S match { P1 => T1 ... Pm => Tm } <: T match { Q1 => U1 ... Qn => Un } + ``` + + if + + ``` + S =:= T, m >= n, Pi =:= Qi and Ti <: Ui for i in 1..n + ``` + + I.e. scrutinees and patterns must be equal and the corresponding bodies must + be subtypes. No case re-ordering is allowed, but the subtype can have more + cases than the supertype. + +2. The second rule states that a match type and its redux are mutual subtypes. + + ``` + S match { P1 => T1 ... Pn => Tn } <: U + U <: S match { P1 => T1 ... Pn => Tn } + ``` + + if + + ``` + S match { P1 => T1 ... Pn => Tn } reduces-to U + ``` + +3. The third rule states that a match type conforms to its upper bound: + + ``` + (S match { P1 => T1 ... Pn => Tn } <: B) <: B + ``` + +## Termination + +Match type definitions can be recursive, which means that it's possible to run +into an infinite loop while reducing match types. + +Since reduction is linked to subtyping, we already have a cycle detection +mechanism in place. As a result, the following will already give a reasonable +error message: + +```scala +type L[X] = X match { + case Int => L[X] +} +def g[X]: L[X] = ??? +``` + +```scala + | val x: Int = g[Int] + | ^ + |Recursion limit exceeded. + |Maybe there is an illegal cyclic reference? + |If that's not the case, you could also try to increase the stacksize using the -Xss JVM option. + |A recurring operation is (inner to outer): + | + | subtype LazyRef(Test.L[Int]) <:< Int +``` + +Internally, `dotc` detects these cycles by turning selected stack overflows into +type errors. If there is a stack overflow during subtyping, the exception will +be caught and turned into a compile-time error that indicates a trace of the +subtype tests that caused the overflow without showing a full stack trace. + +## Variance Laws for Match Types +NOTE: This section does not reflect the current implementation. + +Within a match type `Match(S, Cs) <: B`, all occurrences of type variables count +as covariant. By the nature of the cases `Ci` this means that occurrences in +pattern position are contravarant (since patterns are represented as function +type arguments). + +## Related Work + +Match types have similarities with +[closed type families](https://wiki.haskell.org/GHC/Type_families) in Haskell. +Some differences are: + +- Subtyping instead of type equalities. +- Match type reduction does not tighten the underlying constraint, whereas type + family reduction does unify. This difference in approach mirrors the + difference between local type inference in Scala and global type inference in + Haskell. + +Match types are also similar to Typescript's +[conditional types](https://github.com/Microsoft/TypeScript/pull/21316). The +main differences here are: + + - Conditional types only reduce if both the scrutinee and pattern are ground, + whereas match types also work for type parameters and abstract types. + - Match types can bind variables in type patterns. + - Match types support direct recursion. + - Conditional types distribute through union types. + diff --git a/scala3doc/dotty-docs/docs/docs/reference/new-types/type-lambdas-spec.md b/scala3doc/dotty-docs/docs/docs/reference/new-types/type-lambdas-spec.md new file mode 100644 index 000000000000..e17662a174a7 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/new-types/type-lambdas-spec.md @@ -0,0 +1,118 @@ +--- +layout: doc-page +title: "Type Lambdas - More Details" +--- + +## Syntax + +``` +Type ::= ... | TypeParamClause ‘=>>’ Type +TypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ +TypeParam ::= {Annotation} (id [HkTypeParamClause] | ‘_’) TypeBounds +TypeBounds ::= [‘>:’ Type] [‘<:’ Type] +``` + +### Type Checking + +A type lambda such as `[X] =>> F[X]` defines a function from types to types. The parameter(s) may carry bounds. +If a parameter is bounded, as in `[X >: L <: U] =>> F[X]` it is checked that arguments to the parameters conform to the bounds `L` and `U`. +Only the upper bound `U` can be F-bounded, i.e. `X` can appear in it. + +## Subtyping Rules + +Assume two type lambdas +```scala +type TL1 = [X >: L1 <: U1] =>> R1 +type TL2 = [X >: L2 <: U2] =>> R2 +``` +Then `TL1 <: TL2`, if + + - the type interval `L2..U2` is contained in the type interval `L1..U1` (i.e. +`L1 <: L2` and `U2 <: U1`), + - `R1 <: R2` + +Here we have relied on alpha renaming to match the two bound types `X`. + +A partially applied type constructor such as `List` is assumed to be equivalent to +its eta expansion. I.e, `List = [X] =>> List[X]`. This allows type constructors to be compared with type lambdas. + +## Relationship with Parameterized Type Definitions + +A parameterized type definition +```scala +type T[X] = R +``` +is regarded as a shorthand for an unparameterized definition with a type lambda as right-hand side: +```scala +type T = [X] =>> R +``` +If the type definition carries `+` or `-` variance annotations, +it is checked that the variance annotations are satisfied by the type lambda. +For instance, +```scala +type F2[A, +B] = A => B +``` +expands to +```scala +type F2 = [A, B] =>> A => B +``` +and at the same time it is checked that the parameter `B` appears covariantly in `A => B`. + +A parameterized abstract type +```scala +type T[X] >: L <: U +``` +is regarded as shorthand for an unparameterized abstract type with type lambdas as bounds. +```scala +type T >: ([X] =>> L) <: ([X] =>> U) +``` +However, if `L` is `Nothing` it is not parameterized, since `Nothing` is treated as a bottom type for all kinds. For instance, +```scala +type T[X] <: X => X +``` +is expanded to +```scala +type T >: Nothing <: ([X] =>> X => X) +``` +instead of +```scala +type T >: ([X] =>> Nothing) <: ([X] =>> X => X) +``` + +The same expansions apply to type parameters. E.g. +```scala +[F[X] <: Coll[X]] +``` +is treated as a shorthand for +```scala +[F >: Nothing <: [X] =>> Coll[X]] +``` +Abstract types and opaque type aliases remember the variances they were created with. So the type +```scala +def F2[-A, +B] +``` +is known to be contravariant in `A` and covariant in `B` and can be instantiated only +with types that satisfy these constraints. Likewise +```scala +opaque type O[X] = List[X] +``` +`O` is known to be invariant (and not covariant, as its right hand side would suggest). On the other hand, a transparent alias +```scala +type O2[X] = List[X] +``` +would be treated as covariant, `X` is used covariantly on its right-hand side. + +**Note**: The decision to treat `Nothing` as universal bottom type is provisional, and might be changed after further discussion. + +**Note**: Scala 2 and 3 differ in that Scala 2 also treats `Any` as universal top-type. This is not done in Scala 3. See also the discussion on [kind polymorphism](../other-new-features/kind-polymorphism.md) + +## Curried Type Parameters + +The body of a type lambda can again be a type lambda. Example: +```scala +type TL = [X] =>> [Y] =>> (X, Y) +``` +Currently, no special provision is made to infer type arguments to such curried type lambdas. This is left for future work. + + + diff --git a/scala3doc/dotty-docs/docs/docs/reference/new-types/type-lambdas.md b/scala3doc/dotty-docs/docs/docs/reference/new-types/type-lambdas.md new file mode 100644 index 000000000000..0b69b40a4af6 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/new-types/type-lambdas.md @@ -0,0 +1,16 @@ +--- +layout: doc-page +title: "Type Lambdas" +--- + +A _type lambda_ lets one express a higher-kinded type directly, without +a type definition. + +```scala +[X, Y] =>> Map[Y, X] +``` + +For instance, the type above defines a binary type constructor, which maps arguments `X` and `Y` to `Map[Y, X]`. +Type parameters of type lambdas can have bounds, but they cannot carry `+` or `-` variance annotations. + +[More details](./type-lambdas-spec.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/new-types/union-types-spec.md b/scala3doc/dotty-docs/docs/docs/reference/new-types/union-types-spec.md new file mode 100644 index 000000000000..4888f0bf3880 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/new-types/union-types-spec.md @@ -0,0 +1,160 @@ +--- +layout: doc-page +title: "Union Types - More Details" +--- + +## Syntax + +Syntactically, unions follow the same rules as intersections, but have a lower precedence, see +[Intersection Types - More Details](./intersection-types-spec.md). + +### Interaction with pattern matching syntax +`|` is also used in pattern matching to separate pattern alternatives and has +lower precedence than `:` as used in typed patterns, this means that: + +```scala +case _: A | B => ... +``` +is still equivalent to: +```scala +case (_: A) | B => ... +``` +and not to: +```scala +case _: (A | B) => ... +``` + +## Subtyping Rules + +- `A` is always a subtype of `A | B` for all `A`, `B`. +- If `A <: C` and `B <: C` then `A | B <: C` +- Like `&`, `|` is commutative and associative: + ```scala + A | B =:= B | A + A | (B | C) =:= (A | B) | C + ``` +- `&` is distributive over `|`: + ```scala + A & (B | C) =:= A & B | A & C + ``` + +From these rules it follows that the _least upper bound_ (lub) of a set of types +is the union of these types. This replaces the +[definition of least upper bound in the Scala 2 specification](https://www.scala-lang.org/files/archive/spec/2.12/03-types.html#least-upper-bounds-and-greatest-lower-bounds). + +## Motivation + +The primary reason for introducing union types in Scala is that they allow us to +guarantee that for every set of types, we can always form a finite lub. This is +both useful in practice (infinite lubs in Scala 2 were approximated in an ad-hoc +way, resulting in imprecise and sometimes incredibly long types) and in theory +(the type system of Scala 3 is based on the +[DOT calculus](https://infoscience.epfl.ch/record/227176/files/soundness_oopsla16.pdf), +which has union types). + +Additionally, union types are a useful construct when trying to give types to existing +dynamically typed APIs, this is why they're [an integral part of TypeScript](https://www.typescriptlang.org/docs/handbook/advanced-types.html#union-types) +and have even been [partially implemented in Scala.js](https://github.com/scala-js/scala-js/blob/master/library/src/main/scala/scala/scalajs/js/Union.scala). + +## Join of a union type + +In some situation described below, a union type might need to be widened to +a non-union type, for this purpose we define the _join_ of a union type `T1 | +... | Tn` as the smallest intersection type of base class instances of +`T1`,...,`Tn`. Note that union types might still appear as type arguments in the +resulting type, this guarantees that the join is always finite. + +### Example + +Given + +```scala +trait C[+T] +trait D +trait E +class A extends C[A] with D +class B extends C[B] with D with E +``` + +The join of `A | B` is `C[A | B] & D` + +## Type inference + +When inferring the result type of a definition (`val`, `var`, or `def`) and the +type we are about to infer is a union type, then we replace it by its join. +Similarly, when instantiating a type argument, if the corresponding type +parameter is not upper-bounded by a union type and the type we are about to +instantiate is a union type, we replace it by its join. This mirrors the +treatment of singleton types which are also widened to their underlying type +unless explicitly specified. The motivation is the same: inferring types +which are "too precise" can lead to unintuitive typechecking issues later on. + +Note: Since this behavior limits the usability of union types, it might +be changed in the future. For example by not widening unions that have been +explicitly written down by the user and not inferred, or by not widening a type +argument when the corresponding type parameter is covariant. See +[#2330](https://github.com/lampepfl/dotty/pull/2330) and +[#4867](https://github.com/lampepfl/dotty/issues/4867) for further discussions. + +### Example + +```scala +import scala.collection.mutable.ListBuffer +val x = ListBuffer(Right("foo"), Left(0)) +val y: ListBuffer[Either[Int, String]] = x +``` + +This code typechecks because the inferred type argument to `ListBuffer` in the +right-hand side of `x` was `Left[Int, Nothing] | Right[Nothing, String]` which +was widened to `Either[Int, String]`. If the compiler hadn't done this widening, +the last line wouldn't typecheck because `ListBuffer` is invariant in its +argument. + + +## Members + +The members of a union type are the members of its join. + +### Example + +The following code does not typecheck, because `hello` is not a member of +`AnyRef` which is the join of `A | B`. + +```scala +trait A { def hello: String } +trait B { def hello: String } + +def test(x: A | B) = x.hello // error: value `hello` is not a member of A | B +``` + +On the otherhand, the following would be allowed +```scala +trait C { def hello: String } +trait A extends C with D +trait B extends C with E + +def test(x: A | B) = x.hello // ok as `hello` is a member of the join of A | B which is C + +## Exhaustivity checking + +If the selector of a pattern match is a union type, the match is considered +exhaustive if all parts of the union are covered. + +## Erasure + +The erased type for `A | B` is the _erased least upper bound_ of the erased +types of `A` and `B`. Quoting from the documentation of `TypeErasure#erasedLub`, +the erased lub is computed as follows: + +- if both argument are arrays of objects, an array of the erased lub of the element types +- if both arguments are arrays of same primitives, an array of this primitive +- if one argument is array of primitives and the other is array of objects, Object +- if one argument is an array, Object +- otherwise a common superclass or trait S of the argument classes, with the + following two properties: + * S is minimal: no other common superclass or trait derives from S + * S is last : in the linearization of the first argument type `|A|` + there are no minimal common superclasses or traits that + come after S. + The reason to pick last is that we prefer classes over traits that way, + which leads to more predictable bytecode and (?) faster dynamic dispatch. diff --git a/scala3doc/dotty-docs/docs/docs/reference/new-types/union-types.md b/scala3doc/dotty-docs/docs/docs/reference/new-types/union-types.md new file mode 100644 index 000000000000..0a956a93b34b --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/new-types/union-types.md @@ -0,0 +1,47 @@ +--- +layout: doc-page +title: "Union Types" +--- + +A union type `A | B` has as values all values of type `A` and also all values of type `B`. + + +```scala +case class UserName(name: String) +case class Password(hash: Hash) + +def help(id: UserName | Password) = { + val user = id match { + case UserName(name) => lookupName(name) + case Password(hash) => lookupPassword(hash) + } + ... +} +``` + +Union types are duals of intersection types. `|` is _commutative_: +`A | B` is the same type as `B | A`. + +The compiler will assign a union type to an expression only if such a +type is explicitly given. This can be seen in the following REPL transcript: + +```scala +scala> val password = Password(123) +val password: Password = Password(123) + +scala> val name = UserName("Eve") +val name: UserName = UserName(Eve) + +scala> if (true) name else password +val res2: Object & Product = UserName(Eve) + +scala> val either: Password | UserName = if (true) name else password +val either: Password | UserName = UserName(Eve) +``` + +The type of `res2` is `Object & Product`, which is a supertype of +`UserName` and `Password`, but not the least supertype `Password | +UserName`. If we want the least supertype, we have to give it +explicitly, as is done for the type of `either`. + +[More details](./union-types-spec.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/control-syntax.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/control-syntax.md new file mode 100644 index 000000000000..77e697d6ed26 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/control-syntax.md @@ -0,0 +1,42 @@ +--- +layout: doc-page +title: New Control Syntax +--- + +Scala 3 has a new "quiet" syntax for control expressions that does not rely on +enclosing the condition in parentheses, and also allows to drop parentheses or braces +around the generators of a `for`-expression. Examples: +```scala +if x < 0 then + "negative" +else if x == 0 + "zero" +else + "positive" + +if x < 0 then -x else x + +while x >= 0 do x = f(x) + +for x <- xs if x > 0 +yield x * x + +for + x <- xs + y <- ys +do + println(x + y) +``` + +The rules in detail are: + + - The condition of an `if`-expression can be written without enclosing parentheses if it is followed by a `then` + or some [indented](./indentation.html) code on a following line. + - The condition of a `while`-loop can be written without enclosing parentheses if it is followed by a `do`. + - The enumerators of a `for`-expression can be written without enclosing parentheses or braces if they are followed by a `yield` or `do`. + - A `do` in a `for`-expression expresses a `for`-loop. + +### Rewrites + +The Dotty compiler can rewrite source code from old syntax to new syntax and back. +When invoked with options `-rewrite -new-syntax` it will rewrite from old to new syntax, dropping parentheses and braces in conditions and enumerators. When invoked with options `-rewrite -old-syntax` it will rewrite in the reverse direction, inserting parentheses and braces as needed. diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/creator-applications.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/creator-applications.md new file mode 100644 index 000000000000..482e858ff6ed --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/creator-applications.md @@ -0,0 +1,43 @@ +--- +layout: doc-page +title: "Creator Applications" +--- + +Creator applications allow using simple function call syntax to create instances +of a class, even if there is no apply method implemented. Example: +```scala +class StringBuilder(s: String) { + def this() = this("") +} + +StringBuilder("abc") // same as new StringBuilder("abc") +StringBuilder() // same as new StringBuilder() +``` +Creator applications generalize a functionality provided so far only for case classes, but the mechanism how this is achieved is different. Instead of generating an apply method, the compiler adds a new possible interpretation to a function call `f(args)`. The previous rules are: + +Given a function call `f(args)`, + + - if `f` is a method applicable to `args`, typecheck `f(args)` unchanged, + - otherwise, if `f` has an `apply` method applicable to `args` as a member, continue with `f.apply(args)`, + - otherwise, if `f` is of the form `p.m` and there is an implicit conversion `c` applicable to `p` so that `c(p).m` is applicable to `args`, continue with `c(p).m(args)` + +There's now a fourth rule following these rules: + + - otherwise, if `f` is syntactically a stable identifier, and `new f` where `f` is interpreted as a type identifier is applicable to `args`, continue with `new f(args)`. + + Analogously, the possible interpretations of a function call with type arguments `f[targs]` are augmented with the following interpretation as a final fallback: + + - if `f` is syntactically a stable identifier, and `new f[targs]` where `f` is interpreted as a type identifier is well-typed, continue with `new f[targs]`. + +### Motivation + +Leaving out `new` hides an implementation detail and makes code more pleasant to read. Even though it requires a new rule, it will likely increase the perceived regularity of the language, since case classes already provide function call creation syntax (and are often defined for this reason alone). + +### Discussion + +An alternative design would auto-generate `apply` methods for normal classes, in the same way it is done now for case classes. This design was tried but abandoned since it +caused numerous problems, including + + - overloading ambiguities + - overriding errors + - shadowing of user-defined `apply` methods by more specific auto-generated ones. diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/explicit-nulls.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/explicit-nulls.md new file mode 100644 index 000000000000..2ce5a55dc307 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/explicit-nulls.md @@ -0,0 +1,457 @@ +--- +layout: doc-page +title: "Explicit Nulls" +--- + +Explicit nulls is an opt-in feature that modifies the Scala type system, which makes reference types +(anything that extends `AnyRef`) _non-nullable_. + +This means the following code will no longer typecheck: +``` +val x: String = null // error: found `Null`, but required `String` +``` + +Instead, to mark a type as nullable we use a [union type](https://dotty.epfl.ch/docs/reference/new-types/union-types.html) + +``` +val x: String|Null = null // ok +``` + +Explicit nulls are enabled via a `-Yexplicit-nulls` flag. + +Read on for details. + +## New Type Hierarchy + +When explicit nulls are enabled, the type hierarchy changes so that `Null` is only a subtype of +`Any`, as opposed to every reference type. + +This is the new type hierarchy: +![../../../images/explicit-nulls/explicit-nulls-type-hierarchy.png]("Type Hierarchy for Explicit Nulls") + +After erasure, `Null` remains a subtype of all reference types (as forced by the JVM). + +## Unsoundness + +The new type system is unsound with respect to `null`. This means there are still instances where an expression has a non-nullable type like `String`, but its value is actually `null`. + +The unsoundness happens because uninitialized fields in a class start out as `null`: +```scala +class C { + val f: String = foo(f) + def foo(f2: String): String = f2 +} +val c = new C() +// c.f == "field is null" +``` + +The unsoundness above can be caught by the compiler with the option `-Ycheck-init`. +More details can be found in [safe initialization](./safe-initialization.md). + +## Equality + +We don't allow the double-equal (`==` and `!=`) and reference (`eq` and `ne`) comparison between +`AnyRef` and `Null` anymore, since a variable with a non-nullable type cannot have `null` as value. +`null` can only be compared with `Null`, nullable union (`T | Null`), or `Any` type. + +For some reason, if we really want to compare `null` with non-null values, we have to provide a type hint (e.g. `: Any`). + +```scala +val x: String = ??? +val y: String | Null = ??? + +x == null // error: Values of types String and Null cannot be compared with == or != +x eq null // error +"hello" == null // error + +y == null // ok +y == x // ok + +(x: String | Null) == null // ok +(x: Any) == null // ok +``` + +## Working with Null + +To make working with nullable values easier, we propose adding a few utilities to the standard library. +So far, we have found the following useful: + + - An extension method `.nn` to "cast away" nullability + + ```scala + def[T] (x: T|Null) nn: x.type & T = + if (x == null) throw new NullPointerException("tried to cast away nullability, but value is null") + else x.asInstanceOf[x.type & T] + ``` + + This means that given `x: String|Null`, `x.nn` has type `String`, so we can call all the + usual methods on it. Of course, `x.nn` will throw a NPE if `x` is `null`. + + Don't use `.nn` on mutable variables directly, because it may introduce an unknown type into the type of the variable. + +## Java Interop + +The compiler can load Java classes in two ways: from source or from bytecode. In either case, +when a Java class is loaded, we "patch" the type of its members to reflect that Java types +remain implicitly nullable. + +Specifically, we patch +* the type of fields +* the argument type and return type of methods + +`UncheckedNull` is an alias for `Null` with magic properties (see [below](#uncheckednull)). We illustrate the rules with following examples: + + * The first two rules are easy: we nullify reference types but not value types. + + ```java + class C { + String s; + int x; + } + ``` + ==> + ```scala + class C { + val s: String|UncheckedNull + val x: Int + } + ``` + + * We nullify type parameters because in Java a type parameter is always nullable, so the following code compiles. + + ```java + class C { T foo() { return null; } } + ``` + ==> + ```scala + class C[T] { def foo(): T|UncheckedNull } + ``` + + Notice this is rule is sometimes too conservative, as witnessed by + + ```scala + class InScala { + val c: C[Bool] = ??? // C as above + val b: Bool = c.foo() // no longer typechecks, since foo now returns Bool|Null + } + ``` + + * This reduces the number of redundant nullable types we need to add. Consider + + ```java + class Box { T get(); } + class BoxFactory { Box makeBox(); } + ``` + ==> + ```scala + class Box[T] { def get(): T|UncheckedNull } + class BoxFactory[T] { def makeBox(): Box[T]|UncheckedNull } + ``` + + Suppose we have a `BoxFactory[String]`. Notice that calling `makeBox()` on it returns a + `Box[String]|UncheckedNull`, not a `Box[String|UncheckedNull]|UncheckedNull`. This seems at first + glance unsound ("What if the box itself has `null` inside?"), but is sound because calling + `get()` on a `Box[String]` returns a `String|UncheckedNull`. + + Notice that we need to patch _all_ Java-defined classes that transitively appear in the + argument or return type of a field or method accessible from the Scala code being compiled. + Absent crazy reflection magic, we think that all such Java classes _must_ be visible to + the Typer in the first place, so they will be patched. + + * We will append `UncheckedNull` to the type arguments if the generic class is defined in Scala. + + ```java + class BoxFactory { + Box makeBox(); // Box is Scala-defined + List>> makeCrazyBoxes(); // List is Java-defined + } + ``` + ==> + ```scala + class BoxFactory[T] { + def makeBox(): Box[T | UncheckedNull] | UncheckedNull + def makeCrazyBoxes(): List[Box[List[T] | UncheckedNull]] | UncheckedNull + } + ``` + + In this case, since `Box` is Scala-defined, we will get `Box[T|UncheckedNull]|UncheckedNull`. + This is needed because our nullability function is only applied (modularly) to the Java + classes, but not to the Scala ones, so we need a way to tell `Box` that it contains a + nullable value. + + The `List` is Java-defined, so we don't append `UncheckedNull` to its type argument. But we + still need to nullify its inside. + + * We don't nullify _simple_ literal constant (`final`) fields, since they are known to be non-null + + ```java + class Constants { + final String NAME = "name"; + final int AGE = 0; + final char CHAR = 'a'; + + final String NAME_GENERATED = getNewName(); + } + ``` + ==> + ```scala + class Constants { + val NAME: String("name") = "name" + val AGE: Int(0) = 0 + val CHAR: Char('a') = 'a' + + val NAME_GENERATED: String | Null = ??? + } + ``` + + * We don't append `UncheckedNull` to a field nor to a return type of a method which is annotated with a + `NotNull` annotation. + + ```java + class C { + @NotNull String name; + @NotNull List getNames(String prefix); // List is Java-defined + @NotNull Box getBoxedName(); // Box is Scala-defined + } + ``` + ==> + ```scala + class C { + val name: String + def getNames(prefix: String | UncheckedNull): List[String] // we still need to nullify the paramter types + def getBoxedName(): Box[String | UncheckedNull] // we don't append `UncheckedNull` to the outmost level, but we still need to nullify inside + } + ``` + + The annotation must be from the list below to be recognized as NotNull by the compiler. + Check `Definitions.scala` for an updated list. + + ```scala + // A list of annotations that are commonly used to indicate that a field/method argument or return + // type is not null. These annotations are used by the nullification logic in JavaNullInterop to + // improve the precision of type nullification. + // We don't require that any of these annotations be present in the class path, but we want to + // create Symbols for the ones that are present, so they can be checked during nullification. + @tu lazy val NotNullAnnots: List[ClassSymbol] = ctx.getClassesIfDefined( + "javax.annotation.Nonnull" :: + "edu.umd.cs.findbugs.annotations.NonNull" :: + "androidx.annotation.NonNull" :: + "android.support.annotation.NonNull" :: + "android.annotation.NonNull" :: + "com.android.annotations.NonNull" :: + "org.eclipse.jdt.annotation.NonNull" :: + "org.checkerframework.checker.nullness.qual.NonNull" :: + "org.checkerframework.checker.nullness.compatqual.NonNullDecl" :: + "org.jetbrains.annotations.NotNull" :: + "lombok.NonNull" :: + "io.reactivex.annotations.NonNull" :: Nil map PreNamedString) + ``` + +### UncheckedNull + +To enable method chaining on Java-returned values, we have the special type alias for `Null`: + +```scala +type UncheckedNull = Null +``` + +`UncheckedNull` behaves just like `Null`, except it allows (unsound) member selections: + +```scala +// Assume someJavaMethod()'s original Java signature is +// String someJavaMethod() {} +val s2: String = someJavaMethod().trim().substring(2).toLowerCase() // unsound +``` + +Here, all of `trim`, `substring` and `toLowerCase` return a `String|UncheckedNull`. +The Typer notices the `UncheckedNull` and allows the member selection to go through. +However, if `someJavaMethod` were to return `null`, then the first member selection +would throw a `NPE`. + +Without `UncheckedNull`, the chaining becomes too cumbersome + +```scala +val ret = someJavaMethod() +val s2 = if (ret != null) { + val tmp = ret.trim() + if (tmp != null) { + val tmp2 = tmp.substring(2) + if (tmp2 != null) { + tmp2.toLowerCase() + } + } +} +// Additionally, we need to handle the `else` branches. +``` + +## Flow Typing + +We added a simple form of flow-sensitive type inference. The idea is that if `p` is a +stable path or a trackable variable, then we can know that `p` is non-null if it's compared +with `null`. This information can then be propagated to the `then` and `else` branches +of an if-statement (among other places). + +Example: + +```scala +val s: String|Null = ??? +if (s != null) { + // s: String +} +// s: String|Null + +assert(x != null) +// s: String +``` + +A similar inference can be made for the `else` case if the test is `p == null` + +```scala +if (s == null) { + // s: String|Null +} else { + // s: String +} +``` + +`==` and `!=` is considered a comparison for the purposes of the flow inference. + +### Logical Operators + +We also support logical operators (`&&`, `||`, and `!`): + +```scala +val s: String|Null = ??? +val s2: String|Null = ??? +if (s != null && s2 != null) { + // s: String + // s2: String +} + +if (s == null || s2 == null) { + // s: String|Null + // s2: String|Null +} else { + // s: String + // s2: String +} +``` + +### Inside Conditions + +We also support type specialization _within_ the condition, taking into account that `&&` and `||` are short-circuiting: + +```scala +val s: String|Null = ??? + +if (s != null && s.length > 0) { // s: String in `s.length > 0` + // s: String +} + +if (s == null || s.length > 0) { // s: String in `s.length > 0` + // s: String|Null +} else { + // s: String +} +``` + +### Match Case + +The non-null cases can be detected in match statements. + +```scala +val s: String|Null = ??? + +s match { + case _: String => // s: String + case _ => +} +``` + +### Mutable Variable + +We are able to detect the nullability of some local mutable variables. A simple example is: + +```scala +class C(val x: Int, val next: C|Null) + +var xs: C|Null = C(1, C(2, null)) +// xs is trackable, since all assignments are in the same method +while (xs != null) { + // xs: C + val xsx: Int = xs.x + val xscpy: C = xs + xs = xscpy // since xscpy is non-null, xs still has type C after this line + // xs: C + xs = xs.next // after this assignment, xs can be null again + // xs: C | Null +} +``` + +When dealing with local mutable variables, there are two questions: + +1. Whether to track a local mutable variable during flow typing. + We track a local mutable variable iff the variable is not assigned in a closure. + For example, in the following code `x` is assigned to by the closure `y`, so we do not + do flow typing on `x`. + + ```scala + var x: String|Null = ??? + def y = { + x = null + } + if (x != null) { + // y can be called here, which would break the fact + val a: String = x // error: x is captured and mutated by the closure, not trackable + } + ``` + +2. Whether to generate and use flow typing on a specific _use_ of a local mutable variable. + We only want to do flow typing on a use that belongs to the same method as the definition + of the local variable. + For example, in the following code, even `x` is not assigned to by a closure, but we can only + use flow typing in one of the occurrences (because the other occurrence happens within a nested + closure). + + ```scala + var x: String|Null = ??? + def y = { + if (x != null) { + // not safe to use the fact (x != null) here + // since y can be executed at the same time as the outer block + val _: String = x + } + } + if (x != null) { + val a: String = x // ok to use the fact here + x = null + } + ``` + +See more examples in `tests/explicit-nulls/neg/var-ref-in-closure.scala`. + +Currently, we are unable to track paths with a mutable variable prefix. +For example, `x.a` if `x` is mutable. + +### Unsupported Idioms + +We don't support: + +- flow facts not related to nullability (`if (x == 0) { // x: 0.type not inferred }`) +- tracking aliasing between non-nullable paths + ```scala + val s: String|Null = ??? + val s2: String|Null = ??? + if (s != null && s == s2) { + // s: String inferred + // s2: String not inferred + } + ``` + +## Binary Compatibility + +Our strategy for binary compatibility with Scala binaries that predate explicit nulls +and new libraries compiled without `-Yexplicit-nulls` is to leave the types unchanged +and be compatible but unsound. + +[More details](../../internals/explicit-nulls.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/export.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/export.md new file mode 100644 index 000000000000..f778e4ba462a --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/export.md @@ -0,0 +1,150 @@ +--- +layout: doc-page +title: "Export Clauses" +--- + +An export clause defines aliases for selected members of an object. Example: +```scala +class BitMap +class InkJet + +class Printer { + type PrinterType + def print(bits: BitMap): Unit = ??? + def status: List[String] = ??? +} + +class Scanner { + def scan(): BitMap = ??? + def status: List[String] = ??? +} + +class Copier { + private val printUnit = new Printer { type PrinterType = InkJet } + private val scanUnit = new Scanner + + export scanUnit.scan + export printUnit.{status => _, _} + + def status: List[String] = printUnit.status ++ scanUnit.status +} +``` +The two `export` clauses define the following _export aliases_ in class `Copier`: +```scala +final def scan(): BitMap = scanUnit.scan() +final def print(bits: BitMap): Unit = printUnit.print(bits) +final type PrinterType = printUnit.PrinterType +``` +They can be accessed inside `Copier` as well as from outside: +```scala +val copier = new Copier +copier.print(copier.scan()) +``` +An export clause has the same format as an import clause. Its general form is: +```scala +export path . { sel_1, ..., sel_n } +export given path . { sel_1, ..., sel_n } +``` +It consists of a qualifier expression `path`, which must be a stable identifier, followed by +one or more selectors `sel_i` that identify what gets an alias. Selectors can be +of one of the following forms: + + - A _simple selector_ `x` creates aliases for all eligible members of `path` that are named `x`. + - A _renaming selector_ `x => y` creates aliases for all eligible members of `path` that are named `x`, but the alias is named `y` instead of `x`. + - An _omitting selector_ `x => _` prevents `x` from being aliased by a subsequent + wildcard selector. + - A _wildcard selector_ creates aliases for all eligible members of `path` except for + synthetic members generated by the compiler and those members that are named by a previous simple, renaming, or omitting selector. + +A member is _eligible_ if all of the following holds: + + - its owner is not a base class of the class containing the export clause, + - the member does not override a concrete definition that has as owner + a base class of the class containing the export clause. + - it is accessible at the export clause, + - it is not a constructor, nor the (synthetic) class part of an object, + - it is a given instance (or an old-style `implicit` value) + if and only if the export is tagged with `given`. + +It is a compile-time error if a simple or renaming selector does not identify any eligible +members. + +Type members are aliased by type definitions, and term members are aliased by method definitions. Export aliases copy the type and value parameters of the members they refer to. +Export aliases are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: + + - Export aliases cannot be overridden, since they are final. + - Export aliases cannot override concrete members in base classes, since they are + not marked `override`. + - However, export aliases can implement deferred members of base classes. + +Export aliases for public value definitions that are accessed without +referring to private values in the qualifier path +are marked by the compiler as "stable" and their result types are the singleton types of the aliased definitions. This means that they can be used as parts of stable identifier paths, even though they are technically methods. For instance, the following is OK: +```scala +class C { type T } +object O { val c: C = ... } +export O.c +def f: c.T = ... +``` + + +Export clauses can appear in classes or they can appear at the top-level. An export clause cannot appear as a statement in a block. + +(\*) Note: Unless otherwise stated, the term "class" in this discussion also includes object and trait definitions. + +### Motivation + +It is a standard recommendation to prefer composition over inheritance. This is really an application of the principle of least power: Composition treats components as blackboxes whereas inheritance can affect the internal workings of components through overriding. Sometimes the close coupling implied by inheritance is the best solution for a problem, but where this is not necessary the looser coupling of composition is better. + +So far, object oriented languages including Scala made it much easier to use inheritance than composition. Inheritance only requires an `extends` clause whereas composition required a verbose elaboration of a sequence of forwarders. So in that sense, OO languages are pushing +programmers to a solution that is often too powerful. Export clauses redress the balance. They make composition relationships as concise and easy to express as inheritance relationships. Export clauses also offer more flexibility than extends clauses since members can be renamed or omitted. + +Export clauses also fill a gap opened by the shift from package objects to toplevel definitions. One occasionally useful idiom that gets lost in this shift is a package object inheriting from some class. The idiom is often used in a facade like pattern, to make members +of internal compositions available to users of a package. Toplevel definitions are not wrapped in a user-defined object, so they can't inherit anything. However, toplevel definitions can be export clauses, which supports the facade design pattern in a safer and +more flexible way. + +### Syntax changes: + +``` +TemplateStat ::= ... + | Export +TopStat ::= ... + | Export +Export ::= ‘export’ [‘given’] ImportExpr {‘,’ ImportExpr} +``` + +### Elaboration of Export Clauses + +Export clauses raise questions about the order of elaboration during type checking. +Consider the following example: +```scala +class B { val c: Int } +object a { val b = new B } +export a._ +export b._ +``` +Is the `export b._` clause legal? If yes, what does it export? Is it equivalent to `export a.b._`? What about if we swap the last two clauses? +``` +export b._ +export a._ +``` +To avoid tricky questions like these, we fix the elaboration order of exports as follows. + +Export clauses are processed when the type information of the enclosing object or class is completed. Completion so far consisted of the following steps: + + 1. Elaborate any annotations of the class. + 2. Elaborate the parameters of the class. + 3. Elaborate the self type of the class, if one is given. + 4. Enter all definitions of the class as class members, with types to be completed + on demand. + 5. Determine the types of all parents of the class. + +With export clauses, the following steps are added: + + 6. Compute the types of all paths in export clauses in a context logically + inside the class but not considering any imports or exports in that class. + 7. Enter export aliases for the eligible members of all paths in export clauses. + +It is important that steps 6 and 7 are done in sequence: We first compute the types of _all_ +paths in export clauses and only after this is done we enter any export aliases as class members. This means that a path of an export clause cannot refer to an alias made available +by another export clause of the same class. diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/implicit-by-name-parameters.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/implicit-by-name-parameters.md new file mode 100644 index 000000000000..80c42ca4c46b --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/implicit-by-name-parameters.md @@ -0,0 +1,5 @@ +--- +layout: doc-page +title: "Implicit By-Name Parameters" +--- +The contents of this page have [moved](../contextual/by-name-context-parameters.md). diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/indentation.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/indentation.md new file mode 100644 index 000000000000..cc90ca8a1abc --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/indentation.md @@ -0,0 +1,387 @@ +--- +layout: doc-page +title: Optional Braces +--- + +As an experimental feature, Scala 3 enforces some rules on indentation and allows +some occurrences of braces `{...}` to be optional. +It can be turned off with the compiler flag `-noindent`. + +- First, some badly indented programs are flagged with warnings. +- Second, some occurrences of braces `{...}` are made optional. Generally, the rule + is that adding a pair of optional braces will not change the meaning of a well-indented program. + +### Indentation Rules + +The compiler enforces two rules for well-indented programs, flagging violations as warnings. + + 1. In a brace-delimited region, no statement is allowed to start to the left + of the first statement after the opening brace that starts a new line. + + This rule is helpful for finding missing closing braces. It prevents errors like: + + ```scala + if (x < 0) { + println(1) + println(2) + + println("done") // error: indented too far to the left + ``` + + 2. If significant indentation is turned off (i.e. under Scala-2 mode or under `-noindent`) and we are at the start of an indented sub-part of an expression, and the indented part ends in a newline, the next statement must start at an indentation width less than the sub-part. This prevents errors where an opening brace was forgotten, as in + + ```scala + if (x < 0) + println(1) + println(2) // error: missing `{` + ``` + +These rules still leave a lot of leeway how programs should be indented. For instance, they do not impose +any restrictions on indentation within expressions, nor do they require that all statements of an indentation block line up exactly. + +The rules are generally helpful in pinpointing the root cause of errors related to missing opening or closing braces. These errors are often quite hard to diagnose, in particular in large programs. + +### Optional Braces + +The compiler will insert `` or `` +tokens at certain line breaks. Grammatically, pairs of `` and `` tokens have the same effect as pairs of braces `{` and `}`. + +The algorithm makes use of a stack `IW` of previously encountered indentation widths. The stack initially holds a single element with a zero indentation width. The _current indentation width_ is the indentation width of the top of the stack. + +There are two rules: + + 1. An `` is inserted at a line break, if + + - An indentation region can start at the current position in the source, and + - the first token on the next line has an indentation width strictly greater + than the current indentation width + + An indentation region can start + + - after the condition of an `if-else`, or + - after the leading parameters of an `extension`, or + - after a ": at end of line" token (see below) + - after one of the following tokens: + + ``` + = => <- if then else while do try catch finally for yield match return + ``` + + If an `` is inserted, the indentation width of the token on the next line + is pushed onto `IW`, which makes it the new current indentation width. + + 2. An `` is inserted at a line break, if + + - the first token on the next line has an indentation width strictly less + than the current indentation width, and + - the first token on the next line is not a + [leading infix operator](../changed-features/operators.html). + + If an `` is inserted, the top element is popped from `IW`. + If the indentation width of the token on the next line is still less than the new current indentation width, step (2) repeats. Therefore, several `` tokens + may be inserted in a row. + + An `` is also inserted if the next token following a statement sequence starting with an `` closes an indentation region, i.e. is one of `then`, `else`, `do`, `catch`, `finally`, `yield`, `}`, `)`, `]` or `case`. + + An `` is finally inserted in front of a comma that follows a statement sequence starting with an `` if the indented region is itself enclosed in parentheses + +It is an error if the indentation width of the token following an `` does not match the indentation of some previous line in the enclosing indentation region. For instance, the following would be rejected. + +```scala +if x < 0 + -x + else // error: `else` does not align correctly + x +``` + +Indentation tokens are only inserted in regions where newline statement separators are also inferred: +at the toplevel, inside braces `{...}`, but not inside parentheses `(...)`, patterns or types. + +### Optional Braces Around Template Bodies + +The Scala grammar uses the term _template body_ for the definitions of a class, trait, object or given instance that are normally enclosed in braces. The braces around a template body can also be omitted by means of the following rule + +If at the point where a template body can start there is a `:` that occurs at the end +of a line, and that is followed by at least one indented statement, the recognized +token is changed from ":" to ": at end of line". The latter token is one of the tokens +that can start an indentation region. The Scala grammar is changed so an optional ": at end of line" is allowed in front of a template body. + +Analogous rules apply for enum bodies, type refinements, definitions in an instance creation expressions, and local packages containing nested definitions. + +With these new rules, the following constructs are all valid: + +```scala +trait A: + def f: Int + +class C(x: Int) extends A: + def f = x + +object O: + def f = 3 + +enum Color: + case Red, Green, Blue + +type T = A: + def f: Int + +given [T](using Ord[T]) as Ord[List[T]]: + def compare(x: List[T], y: List[T]) = ??? + +extension (xs: List[Int]) + def second: Int = xs.tail.head + +new A: + def f = 3 + +package p: + def a = 1 +package q: + def b = 2 +``` + +The syntax changes allowing this are as follows: + +``` +TemplateBody ::= [colonEol] ‘{’ [SelfType] TemplateStat {semi TemplateStat} ‘}’ +EnumBody ::= [colonEol] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ +Packaging ::= ‘package’ QualId [colonEol] ‘{’ TopStatSeq ‘}’ +RefinedType ::= AnnotType {[colonEol] Refinement} +``` + +Here, `colonEol` stands for ": at end of line", as described above. +The lexical analyzer is modified so that a `:` at the end of a line +is reported as `colonEol` if the parser is at a point where a `colonEol` is +valid as next token. + +### Spaces vs Tabs + +Indentation prefixes can consist of spaces and/or tabs. Indentation widths are the indentation prefixes themselves, ordered by the string prefix relation. So, so for instance "2 tabs, followed by 4 spaces" is strictly less than "2 tabs, followed by 5 spaces", but "2 tabs, followed by 4 spaces" is incomparable to "6 tabs" or to "4 spaces, followed by 2 tabs". It is an error if the indentation width of some line is incomparable with the indentation width of the region that's current at that point. To avoid such errors, it is a good idea not to mix spaces and tabs in the same source file. + +### Indentation and Braces + +Indentation can be mixed freely with braces. For interpreting indentation inside braces, the following rules apply. + + 1. The assumed indentation width of a multiline region enclosed in braces is the + indentation width of the first token that starts a new line after the opening brace. + + 2. On encountering a closing brace `}`, as many `` tokens as necessary are + inserted to close all open indentation regions inside the pair of braces. + +### Special Treatment of Case Clauses + +The indentation rules for `match` expressions and `catch` clauses are refined as follows: + +- An indentation region is opened after a `match` or `catch` also if the following `case` + appears at the indentation width that's current for the `match` itself. +- In that case, the indentation region closes at the first token at that + same indentation width that is not a `case`, or at any token with a smaller + indentation width, whichever comes first. + +The rules allow to write `match` expressions where cases are not indented themselves, as in the example below: + +```scala +x match +case 1 => print("I") +case 2 => print("II") +case 3 => print("III") +case 4 => print("IV") +case 5 => print("V") + +println(".") +``` + +### The End Marker + +Indentation-based syntax has many advantages over other conventions. But one possible problem is that it makes it hard to discern when a large indentation region ends, since there is no specific token that delineates the end. Braces are not much better since a brace by itself also contains no information about what region is closed. + +To solve this problem, Scala 3 offers an optional `end` marker. Example: + +```scala +def largeMethod(...) = + ... + if ... then ... + else + ... // a large block + end if + ... // more code +end largeMethod +``` + +An `end` marker consists of the identifier `end` and a follow-on specifier token that together constitute all the tokes of a line. Possible specifier tokens are +identifiers or one of the following keywords + +```scala +if while for match try new this val given +``` + +End markers are allowed in statement sequences. The specifier token `s` of an end marker must correspond to the statement that precedes it. This means: + +- If the statement defines a member `x` then `s` must be the same identifier `x`. +- If the statement defines a constructor then `s` must be `this`. +- If the statement defines an anonymous given, then `s` must be `given`. +- If the statement defines an anonymous extension, then `s` must be `extension`. +- If the statement defines an anonymous class, then `s` must be `new`. +- If the statement is a `val` definition binding a pattern, then `s` must be `val`. +- If the statement is a package clause that refers to package `p`, then `s` must be the same identifier `p`. +- If the statement is an `if`, `while`, `for`, `try`, or `match` statement, then `s` must be that same token. + +For instance, the following end markers are all legal: + +```scala +package p1.p2: + + abstract class C(): + + def this(x: Int) = + this() + if x > 0 then + val a :: b = + x :: Nil + end val + var y = + x + end y + while y > 0 do + println(y) + y -= 1 + end while + try + x match + case 0 => println("0") + case _ => + end match + finally + println("done") + end try + end if + end this + + def f: String + end C + + object C: + given C = + new C: + def f = "!" + end f + end new + end given + end C + + extension (x: C) + def ff: String = x.f ++ x.f + end extension + +end p2 +``` + +#### When to Use End Markers + +It is recommended that `end` markers are used for code where the extent of an indentation region is not immediately apparent "at a glance". People will have different preferences what this means, but one can nevertheless give some guidelines that stem from experience. An end marker makes sense if + +- the construct contains blank lines, or +- the construct is long, say 15-20 lines or more, +- the construct ends heavily indented, say 4 indentation levels or more. + +If none of these criteria apply, it's often better to not use an end marker since the code will be just as clear and more concise. If there are several ending regions that satisfy one of the criteria above, we usually need an end marker only for the outermost closed region. So cascades of end markers as in the example above are usually better avoided. + +#### Syntax + +``` +EndMarker ::= ‘end’ EndMarkerTag -- when followed by EOL +EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ‘try’ + | ‘new’ | ‘this’ | ‘given’ | ‘extension’ | ‘val’ +BlockStat ::= ... | EndMarker +TemplateStat ::= ... | EndMarker +TopStat ::= ... | EndMarker +``` + +### Example + +Here is a (somewhat meta-circular) example of code using indentation. It provides a concrete representation of indentation widths as defined above together with efficient operations for constructing and comparing indentation widths. + +```scala +enum IndentWidth: + case Run(ch: Char, n: Int) + case Conc(l: IndentWidth, r: Run) + + def <= (that: IndentWidth): Boolean = this match + case Run(ch1, n1) => + that match + case Run(ch2, n2) => n1 <= n2 && (ch1 == ch2 || n1 == 0) + case Conc(l, r) => this <= l + case Conc(l1, r1) => + that match + case Conc(l2, r2) => l1 == l2 && r1 <= r2 + case _ => false + + def < (that: IndentWidth): Boolean = + this <= that && !(that <= this) + + override def toString: String = this match + case Run(ch, n) => + val kind = ch match + case ' ' => "space" + case '\t' => "tab" + case _ => s"'$ch'-character" + val suffix = if n == 1 then "" else "s" + s"$n $kind$suffix" + case Conc(l, r) => + s"$l, $r" + +object IndentWidth: + private inline val MaxCached = 40 + + private val spaces = IArray.tabulate(MaxCached + 1)(new Run(' ', _)) + private val tabs = IArray.tabulate(MaxCached + 1)(new Run('\t', _)) + + def Run(ch: Char, n: Int): Run = + if n <= MaxCached && ch == ' ' then + spaces(n) + else if n <= MaxCached && ch == '\t' then + tabs(n) + else + new Run(ch, n) + end Run + + val Zero = Run(' ', 0) +end IndentWidth +``` + +### Settings and Rewrites + +Significant indentation is enabled by default. It can be turned off by giving any of the options `-noindent`, `old-syntax` and `language:Scala2`. If indentation is turned off, it is nevertheless checked that indentation conforms to the logical program structure as defined by braces. If that is not the case, the compiler issues a warning. + +The Dotty compiler can rewrite source code to indented code and back. +When invoked with options `-rewrite -indent` it will rewrite braces to +indented regions where possible. When invoked with options `-rewrite -noindent` it will rewrite in the reverse direction, inserting braces for indentation regions. +The `-indent` option only works on [new-style syntax](./control-syntax.html). So to go from old-style syntax to new-style indented code one has to invoke the compiler twice, first with options `-rewrite -new-syntax`, then again with options +`-rewrite -indent`. To go in the opposite direction, from indented code to old-style syntax, it's `-rewrite -noindent`, followed by `-rewrite -old-syntax`. + +### Variant: Indentation Marker `:` + +Generally, the possible indentation regions coincide with those regions where braces `{...}` are also legal, no matter whether the braces enclose an expression or a set of definitions. There is one exception, though: Arguments to function can be enclosed in braces but they cannot be simply indented instead. Making indentation always significant for function arguments would be too restrictive and fragile. + +To allow such arguments to be written without braces, a variant of the indentation scheme is implemented under +option `-Yindent-colons`. This variant is more contentious and less stable than the rest of the significant indentation scheme. In this variant, a colon `:` at the end of a line is also one of the possible tokens that opens an indentation region. Examples: + +```scala +times(10): + println("ah") + println("ha") +``` + +or + +```scala +xs.map: + x => + val y = x - 1 + y * y +``` + +Colons at the end of lines are their own token, distinct from normal `:`. +The Scala grammar is changed in this variant so that colons at end of lines are accepted at all points +where an opening brace enclosing a function argument is legal. Special provisions are taken so that method result types can still use a colon on the end of a line, followed by the actual type on the next. diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/kind-polymorphism.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/kind-polymorphism.md new file mode 100644 index 000000000000..0f3899a42a46 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/kind-polymorphism.md @@ -0,0 +1,46 @@ +--- +layout: doc-page +title: "Kind Polymorphism" +--- + +Normally type parameters in Scala are partitioned into _kinds_. First-level types are types of values. Higher-kinded types are type constructors +such as `List` or `Map`. The kind of a type is indicated by the top type of which it is a subtype. Normal types are subtypes of `Any`, +covariant single argument type constructors such as `List` are subtypes of `[+X] =>> Any`, and the `Map` type constructor is +a subtype of `[X, +Y] =>> Any`. + +A type can be used only as prescribed by its kind. Subtypes of `Any` cannot be applied to type arguments whereas subtypes of `[X] =>> Any` +_must_ be applied to a type argument, unless they are passed to type parameters of the same kind. + +Sometimes we would like to have type parameters that can have more than one kind, for instance to define an implicit +value that works for parameters of any kind. This is now possible through a form of (_subtype_) kind polymorphism. +Kind polymorphism relies on the special type `scala.AnyKind` that can be used as an upper bound of a type. + +```scala +def f[T <: AnyKind] = ... +``` + +The actual type arguments of `f` can then be types of arbitrary kinds. So the following would all be legal: + +```scala +f[Int] +f[List] +f[Map] +f[[X] =>> String] +``` + +We call type parameters and abstract types with an `AnyKind` upper bound _any-kinded types_. +Since the actual kind of an any-kinded type is unknown, its usage must be heavily restricted: An any-kinded type +can be neither the type of a value, nor can it be instantiated with type parameters. So about the only +thing one can do with an any-kinded type is to pass it to another any-kinded type argument. +Nevertheless, this is enough to achieve some interesting generalizations that work across kinds, typically +through advanced uses of implicits. + +(todo: insert good concise example) + +Some technical details: `AnyKind` is a synthesized class just like `Any`, but without any members. It extends no other class. +It is declared `abstract` and `final`, so it can be neither instantiated nor extended. + +`AnyKind` plays a special role in Scala's subtype system: It is a supertype of all other types no matter what their kind is. It is also assumed to be kind-compatible with all other types. Furthermore, `AnyKind` is treated as a higher-kinded type (so it cannot be used as a type of values), but at the same time it has no type parameters (so it cannot be instantiated). + +**Note**: This feature is considered experimental but stable and it can be disabled under compiler flag +(i.e. `-Yno-kind-polymorphism`). diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/named-typeargs-spec.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/named-typeargs-spec.md new file mode 100644 index 000000000000..404f96852aca --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/named-typeargs-spec.md @@ -0,0 +1,38 @@ +--- +layout: doc-page +title: "Named Type Arguments - More Details" +--- + +## Syntax + +The addition to the grammar is: + +``` +SimpleExpr1 ::= ... + | SimpleExpr (TypeArgs | NamedTypeArgs) +NamedTypeArgs ::= ‘[’ NamedTypeArg {‘,’ NamedTypeArg} ‘]’ +NamedTypeArg ::= id ‘=’ Type +``` + +Note in particular that named arguments cannot be passed to type constructors: + +``` scala +class C[T] + +val x: C[T = Int] = // error + new C[T = Int] // error + +class E extends C[T = Int] // error +``` + +## Compatibility considerations + +Named type arguments do not have an impact on binary compatibility, but they +have an impact on source compatibility: if the name of a method type parameter +is changed, any existing named reference to this parameter will break. This +means that the names of method type parameters are now part of the public API +of a library. + +(Unimplemented proposal: to mitigate this, +[`scala.deprecatedName`](https://www.scala-lang.org/api/current/scala/deprecatedName.html) +could be extended to also be applicable on method type parameters.) diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/named-typeargs.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/named-typeargs.md new file mode 100644 index 000000000000..10430391188d --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/named-typeargs.md @@ -0,0 +1,32 @@ +--- +layout: doc-page +title: "Named Type Arguments" +--- + +**Note:** This feature is implemented in Dotty, but is not expected to be part of Scala 3.0. + +Type arguments of methods can now be specified by name as well as by position. Example: + +``` scala +def construct[Elem, Coll[_]](xs: Elem*): Coll[Elem] = ??? + +val xs1 = construct[Coll = List, Elem = Int](1, 2, 3) +val xs2 = construct[Coll = List](1, 2, 3) +``` + +Similar to a named value argument `(x = e)`, a named type argument +`[X = T]` instantiates the type parameter `X` to the type `T`. +Named type arguments do not have to be in order (see `xs1` above) and +unspecified arguments are inferred by the compiler (see `xs2` above). +Type arguments must be all named or un-named, mixtures of named and +positional type arguments are not supported. + +## Motivation + +The main benefit of named type arguments is that unlike positional arguments, +you are allowed to omit passing arguments for some parameters, like in the +definition of `xs2` above. A missing type argument is inferred as usual by +local type inference. This is particularly useful in situations where some type +arguments can be easily inferred from others. + +[More details](./named-typeargs-spec.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/opaques-details.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/opaques-details.md new file mode 100644 index 000000000000..6d65f4aa7eee --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/opaques-details.md @@ -0,0 +1,92 @@ +--- +layout: doc-page +title: "Opaque Type Aliases: More Details" +--- + +### Syntax + +``` +Modifier ::= ... + | ‘opaque’ +``` +`opaque` is a [soft modifier](../soft-modifier.md). It can still be used as a normal identifier when it is not in front of a definition keyword. + +Opaque type aliases must be members of classes, traits, or objects, or they are defined +at the top-level. They cannot be defined in local blocks. + +### Type Checking + +The general form of a (monomorphic) opaque type alias is +```scala +opaque type T >: L <: U = R +``` +where the lower bound `L` and the upper bound `U` may be missing, in which case they are assumed to be `scala.Nothing` and `scala.Any`, respectively. If bounds are given, it is checked that the right hand side `R` conforms to them, i.e. `L <: R` and `R <: U`. F-bounds are not supported for opaque type aliases: `T` is not allowed to appear in `L` or `U`. + +Inside the scope of the alias definition, the alias is transparent: `T` is treated +as a normal alias of `R`. Outside its scope, the alias is treated as the abstract type +```scala +type T >: L <: U +``` +A special case arises if the opaque type alias is defined in an object. Example: +``` +object o { + opaque type T = R +} +``` +In this case we have inside the object (also for non-opaque types) that `o.T` is equal to +`T` or its expanded form `o.this.T`. Equality is understood here as mutual subtyping, i.e. +`o.T <: o.this.T` and `o.this.T <: T`. Furthermore, we have by the rules of opaque type aliases +that `o.this.T` equals `R`. The two equalities compose. That is, inside `o`, it is +also known that `o.T` is equal to `R`. This means the following code type-checks: +```scala +object o { + opaque type T = Int + val x: Int = id(2) +} +def id(x: o.T): o.T = x +``` + +### Toplevel Opaque Types + +An opaque type alias on the toplevel is transparent in all other toplevel definitions in the sourcefile where it appears, but is opaque in nested +objects and classes and in all other source files. Example: +```scala +// in test1.scala +opaque type A = String +val x: A = "abc" + +object obj { + val y: A = "abc" // error: found: "abc", required: A +} + +// in test2.scala +def z: String = x // error: found: A, required: String +``` +This behavior becomes clear if one recalls that toplevel definitions are placed in their own synthetic object. For instance, the code in `test1.scala` would expand to +```scala +object test1$package { + opaque type A = String + val x: A = "abc" +} +object obj { + val y: A = "abc" // error: cannot assign "abc" to opaque type alias A +} +``` +The opaque type alias `A` is transparent in its scope, which includes the definition of `x`, but not the definitions of `obj` and `y`. + + +### Relationship to SIP 35 + +Opaque types in Dotty are an evolution from what is described in +[Scala SIP 35](https://docs.scala-lang.org/sips/opaque-types.html). + +The differences compared to the state described in this SIP are: + + 1. Opaque type aliases cannot be defined anymore in local statement sequences. + 2. The scope where an opaque type alias is visible is now the whole scope where + it is defined, instead of just a companion object. + 3. The notion of a companion object for opaque type aliases has been dropped. + 4. Opaque type aliases can have bounds. + 5. The notion of type equality involving opaque type aliases has been clarified. It was + strengthened with respect to the previous implementation of SIP 35. + diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/opaques.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/opaques.md new file mode 100644 index 000000000000..c5840fb34e15 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/opaques.md @@ -0,0 +1,126 @@ +--- +layout: doc-page +title: "Opaque Type Aliases" +--- + +Opaque types aliases provide type abstraction without any overhead. Example: + +```scala +object Logarithms { + + opaque type Logarithm = Double + + object Logarithm { + + // These are the two ways to lift to the Logarithm type + + def apply(d: Double): Logarithm = math.log(d) + + def safe(d: Double): Option[Logarithm] = + if (d > 0.0) Some(math.log(d)) else None + } + + // Extension methods define opaque types' public APIs + extension (x: Logarithm) { + def toDouble: Double = math.exp(x) + def + (y: Logarithm): Logarithm = Logarithm(math.exp(x) + math.exp(y)) + def * (y: Logarithm): Logarithm = x + y + } +} +``` + +This introduces `Logarithm` as a new abstract type, which is implemented as `Double`. +The fact that `Logarithm` is the same as `Double` is only known in the scope where +`Logarithm` is defined which in the above example corresponds to the object `Logarithms`. +Or in other words, within the scope it is treated as type alias, but this is opaque to the outside world +where in consequence `Logarithm` is seen as an abstract type and has nothing to do with `Double`. + +The public API of `Logarithm` consists of the `apply` and `safe` methods defined in the companion object. +They convert from `Double`s to `Logarithm` values. Moreover, an operation `toDouble` that converts the other way, and operations `+` and `*` are defined as extension methods on `Logarithm` values. +The following operations would be valid because they use functionality implemented in the `Logarithms` object. + +```scala +import Logarithms.Logarithm + +val l = Logarithm(1.0) +val l2 = Logarithm(2.0) +val l3 = l * l2 +val l4 = l + l2 +``` + +But the following operations would lead to type errors: + +```scala +val d: Double = l // error: found: Logarithm, required: Double +val l2: Logarithm = 1.0 // error: found: Double, required: Logarithm +l * 2 // error: found: Int(2), required: Logarithm +l / l2 // error: `/` is not a member of Logarithm +``` + +### Bounds For Opaque Type Aliases + +Opaque type aliases can also come with bounds. Example: +```scala +object Access { + + opaque type Permissions = Int + opaque type PermissionChoice = Int + opaque type Permission <: Permissions & PermissionChoice = Int + + extension (x: Permissions) def & (y: Permissions): Permissions = x | y + extension (x: PermissionChoice) def | (y: PermissionChoice): PermissionChoice = x | y + extension (granted: Permissions) def is(required: Permissions) = (granted & required) == required + extension (granted: Permissions) def isOneOf(required: PermissionChoice) = (granted & required) != 0 + + val NoPermission: Permission = 0 + val Read: Permission = 1 + val Write: Permission = 2 + val ReadWrite: Permissions = Read | Write + val ReadOrWrite: PermissionChoice = Read | Write +} +``` +The `Access` object defines three opaque type aliases: + + - `Permission`, representing a single permission, + - `Permissions`, representing a set of permissions with the meaning "all of these permissions granted", + - `PermissionChoice`, representing a set of permissions with the meaning "at least one of these permissions granted". + +Outside the `Access` object, values of type `Permissions` may be combined using the `&` operator, +where `x & y` means "all permissions in `x` *and* in `y` granted". +Values of type `PermissionChoice` may be combined using the `|` operator, +where `x | y` means "a permission in `x` *or* in `y` granted". + +Note that inside the `Access` object, the `&` and `|` operators always resolve to the corresponding methods of `Int`, +because members always take precedence over extension methods. +Because of that, the `|` extension method in `Access` does not cause infinite recursion. +Also, the definition of `ReadWrite` must use `|`, +even though an equivalent definition outside `Access` would use `&`. + +All three opaque type aliases have the same underlying representation type `Int`. The +`Permission` type has an upper bound `Permissions & PermissionChoice`. This makes +it known outside the `Access` object that `Permission` is a subtype of the other +two types. Hence, the following usage scenario type-checks. +```scala +object User { + import Access._ + + case class Item(rights: Permissions) + + val roItem = Item(Read) // OK, since Permission <: Permissions + val rwItem = Item(ReadWrite) + val noItem = Item(NoPermission) + + assert( roItem.rights.is(ReadWrite) == false ) + assert( roItem.rights.isOneOf(ReadOrWrite) == true ) + + assert( rwItem.rights.is(ReadWrite) == true ) + assert( rwItem.rights.isOneOf(ReadOrWrite) == true ) + + assert( noItem.rights.is(ReadWrite) == false ) + assert( noItem.rights.isOneOf(ReadOrWrite) == false ) +} +``` +On the other hand, the call `roItem.rights.isOneOf(ReadWrite)` would give a type error +since `Permissions` and `PermissionChoice` are different, unrelated types outside `Access`. + +[More details](opaques-details.md) diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/open-classes.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/open-classes.md new file mode 100644 index 000000000000..38d95d6c4158 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/open-classes.md @@ -0,0 +1,79 @@ +--- +layout: doc-page +title: "Open Classes" +--- + +An `open` modifier on a class signals that the class is planned for extensions. Example: +```scala +// File Writer.scala +package p + +open class Writer[T] { + + /** Sends to stdout, can be overridden */ + def send(x: T) = println(x) + + /** Sends all arguments using `send` */ + def sendAll(xs: T*) = xs.foreach(send) +} + +// File EncryptedWriter.scala +package p + +class EncryptedWriter[T: Encryptable] extends Writer[T] { + override def send(x: T) = super.send(encrypt(x)) +} +``` +An open class typically comes with some documentation that describes +the internal calling patterns between methods of the class as well as hooks that can be overridden. We call this the _extension contract_ of the class. It is different from the _external contract_ between a class and its users. + +Classes that are not open can still be extended, but only if at least one of two alternative conditions is met: + + - The extending class is in the same source file as the extended class. In this case, the extension is usually an internal implementation matter. + + - The language feature `adhocExtensions` is enabled for the extending class. This is typically enabled by an import statement in the source file of the extension: + ```scala + import scala.language.adhocExtensions + ``` + Alternatively, the feature can be enabled by the command line option `-language:adhocExtensions`. + If the feature is not enabled, the compiler will issue a "feature" warning. For instance, if the `open` modifier on class `Writer` is dropped, compiling `EncryptedWriter` would produce a warning: + ``` + -- Feature Warning: EncryptedWriter.scala:6:14 ---- + |class EncryptedWriter[T: Encryptable] extends Writer[T] + | ^ + |Unless class Writer is declared 'open', its extension in a separate file should be enabled + |by adding the import clause 'import scala.language.adhocExtensions' + |or by setting the compiler option -language:adhocExtensions. + ``` + +### Motivation + +When writing a class, there are three possible expectations of extensibility: + +1. The class is intended to allow extensions. This means one should expect +a carefully worked out and documented extension contract for the class. + +2. Extensions of the class are forbidden, for instance to make correctness or security guarantees. + +3. There is no firm decision either way. The class is not _a priori_ intended for extensions, but if others find it useful to extend on an _ad-hoc_ basis, let them go ahead. However, they are on their own in this case. There is no documented extension contract, and future versions of the class might break the extensions (by rearranging internal call patterns, for instance). + +The three cases are clearly distinguished by using `open` for (1), `final` for (2) and no modifier for (3). + +It is good practice to avoid _ad-hoc_ extensions in a code base, since they tend to lead to fragile systems that are hard to evolve. But there +are still some situations where these extensions are useful: for instance, +to mock classes in tests, or to apply temporary patches that add features or fix bugs in library classes. That's why _ad-hoc_ extensions are permitted, but only if there is an explicit opt-in via a language feature import. + +### Details + + - `open` is a soft modifier. It is treated as a normal identifier + unless it is in modifier position. + - An `open` class cannot be `final` or `sealed`. + - Traits or `abstract` classes are always `open`, so `open` is redundant for them. + +### Relationship with `sealed` + +A class that is neither `abstract` nor `open` is similar to a `sealed` class: it can still be extended, but only in the same compilation unit. The difference is what happens if an extension of the class is attempted in another compilation unit. For a `sealed` class, this is an error, whereas for a simple non-open class, this is still permitted provided the `adhocExtensions` feature is enabled, and it gives a warning otherwise. + +### Migration + +`open` is a new modifier in Scala 3. To allow cross compilation between Scala 2.13 and Scala 3.0 without warnings, the feature warning for ad-hoc extensions is produced only under `-strict`. It will be produced by default from Scala 3.1 on. diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/parameter-untupling-spec.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/parameter-untupling-spec.md new file mode 100644 index 000000000000..77e6fff32691 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/parameter-untupling-spec.md @@ -0,0 +1,85 @@ +--- +layout: doc-page +title: "Parameter Untupling - More Details" +--- + +### Motivation + +Say you have a list of pairs + +```scala +val xs: List[(Int, Int)] +``` + +and you want to map `xs` to a list of `Int`s so that each pair of numbers is mapped to their sum. +Previously, the best way to do this was with a pattern-matching decomposition: +```scala +xs.map { + case (x, y) => x + y +} +``` +While correct, this is inconvenient. Instead, we propose to write it the following way: + +```scala +xs.map { + (x, y) => x + y +} +``` +or, equivalently: +```scala +xs.map(_ + _) +``` + +Generally, a function value with `n > 1` parameters can be converted to a function with tupled arguments if the expected type is a unary function type of the form `((T_1, ..., T_n)) => U`. + +### Type Checking + +Let a function `f` of the form `(p1, ..., pn) => e` for `n != 1`, parameters `p1, ..., pn`, and an expression `e`. + +If the expected type of `f` is a fully defined function type or SAM-type that has a +single parameter of a subtype of `ProductN[T1, ..., Tn]`, where each type `Ti` fits the corresponding +parameter `pi`. Then `f` will conform to the function type `ProductN[T1, ..., Tn] => R`. + +A type `Ti` fits a parameter `pi` if one of the following two cases is `true`: + +* `pi` comes without a type, i.e. it is a simple identifier or `_`. +* `pi` is of the form `x: Ui` or `_: Ui` and `Ti <: Ui`. + +Auto-tupling composes with eta-expansion. That is an n-ary function generated by eta-expansion +can in turn be adapted to the expected type with auto-tupling. + +#### Term adaptation + +If the function +```scala +(p1: T1, ..., pn: Tn) => e +``` + +is typed as `ProductN[T1, ..., Tn] => Te`, then it will be transformed to + +```scala +(x: TupleN[T1, ..., Tn]) => { + def p1: T1 = x._1 + ... + def pn: Tn = x._n + e +} +``` + +##### Generic tuples + +If we come to support generic tuples, which provide the possibility of having tuples/functions of arities larger than 22 we would need to additionally support generic tuples of the form `T1 *: T2 *: ...`. +Translation of such a tuples would use the `apply` method on the tuple to access the elements instead of the `_N` methods of `Product`. + +### Migration + +Code like this could not be written before, hence the new notation would not be ambiguous after adoption. + +Though it is possible that someone has written an implicit conversion form `(T1, ..., Tn) => R` to `TupleN[T1, ..., Tn] => R` +for some `n`. This change could be detected and fixed by `Scalafix`. Furthermore, such conversion would probably +be doing the same translation (semantically) but in a less efficient way. + +### Reference + +For more info see: +* [Issue #897](https://github.com/lampepfl/dotty/issues/897). diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/parameter-untupling.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/parameter-untupling.md new file mode 100644 index 000000000000..335406ed8f64 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/parameter-untupling.md @@ -0,0 +1,37 @@ +--- +layout: doc-page +title: "Parameter Untupling" +--- + +Say you have a list of pairs +```scala +val xs: List[(Int, Int)] +``` +and you want to map `xs` to a list of `Int`s so that each pair of numbers is mapped to +their sum. Previously, the best way to do this was with a pattern-matching decomposition: +```scala +xs map { + case (x, y) => x + y +} +``` +While correct, this is also inconvenient and confusing, since the `case` +suggests that the pattern match could fail. As a shorter and clearer alternative Dotty now allows +```scala +xs.map { + (x, y) => x + y +} +``` +or, equivalently: +```scala +xs.map(_ + _) +``` +Generally, a function value with `n > 1` parameters is converted to a +pattern-matching closure using `case` if the expected type is a unary +function type of the form `((T_1, ..., T_n)) => U`. + +### Reference + +For more info see: + +* [More details](./parameter-untupling-spec.md) +* [Issue #897](https://github.com/lampepfl/dotty/issues/897). diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/quoted-pattern-spec.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/quoted-pattern-spec.md new file mode 100644 index 000000000000..ad52b0e7a0dc --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/quoted-pattern-spec.md @@ -0,0 +1,111 @@ +--- +layout: doc-page +title: "Pattern Matching on Quoted Code" +--- + + +## Overview + +Any top-level quote `'{ ... }` in a pattern position will become a quoted pattern. Inside quoted pattern parts of the code can be spliced with `$` which extracts that part of the code. +Splices can be of two forms: +* A splice `${ ... : Expr[T] }` that can be placed in any expression position. +* A splice `${ ... : Bind[T] }` that can be placed on names of `val`s, `var`s or `def`s + +```scala +def foo(x: Expr[Int])(using tasty.Reflect): Expr[Int] = x match { + case '{ val $a: Int = $x; (${Bind(`a`)}: Int) + 1 } => '{ $x + 1 } // TODO needs fix for #6328, `a` is currently not in scope while typing +} +``` +In the example above we have `$a` which provides a `Bind[Int]`, `$x` which provides an `Expr[Int]` and `${Bind(`a`)}` which probides an `Expr[Int]` that is pattern matched against `Bind(`a`)` to check that it is a reference to `a`. + +Quoted patterns are transformed during typer to a call of `scala.internal.quoted.Expr.unapply` which splits the quoted code into the patterns and a reifiable quote that will be used as witnesses at runtime. + +```scala +def foo(x: Expr[Int])(using tasty.Reflect): Expr[Int] = x match { + case scala.internal.quoted.Expr.unapply[Tuple3[Bind[Int], Expr[Int], Expr[Int]]](Tuple3(a, x, Bind(`a`), y))('{ @patternBindHole val a: Int = patternHole[Int]; patternHole[Int] + 1 }) => +} +``` + + +## Runtime semantics + +At runtime to a `quoted.Expr` can be matched to another using `scala.internal.quoted.Expr.unapply`. + +```scala +def unapply[Tup <: Tuple](scrutineeExpr: Expr[Any])(implicit patternExpr: Expr[Any], reflection: Reflection): Option[Tup] +``` + +The `scrutineeExpr` is a normal quoted expression while `patternExpr` may contain holes representing splices. +The result of pattern matching is `None` if the expressions are not equivalent, otherwise it returns `Some` (some tuple) containing the contents of the matched holes. + +Let's define some abstractions on the possible results of this pattern matching using the alias `Matching`: +```scala +type Matching = Option[Tuple] +type Env + +def notMatched = None +def matched = Some(()) // aka Some(Tuple0()) +def matched[T](x: T) = Some(Tuple1(x)) +extension (x: Matching) def && (y: Matching) = if (x == None || y == None) None else Some(x.get ++ y.get) +def fold[T](m: Mattching*)(using Env): Matching = m.fold(matched)(_ && _) + +// `a =#= b` stands for `a` matches `b` +extension (scrutinee: Tree) def =#= pattern: Tree)(using Env): Matching // described by cases in the tables below + +def envWith(equiv: (Symbol, Symbol)*)(using Env): Env // Adds to the current environment the fact that s1 from the scrutinee is equivalent to s2 in the pattern + +def equivalent(s1: Symbol, s2: Symbol)(using Env): Env +``` + +The implementation of `=#=` + +| Tree | Pattern | Returns | +| :-----------------------: | :-------------------------: | :---------- | +| Term `a` | `patternHole[X]` | `match(quoted.Expr[X]('a))` if type of `a` is a subtype of `X` +| `val a: A` | `@patternBindHole val x: X` | `match(quoted.Bind[X](a.sym)) && '{val a: A} =#= '{val x: X}` +| Literal `a` | Literal `x` | `matched` if value of `a` is equal to the value of `x` +| `a` | `x` | `matched` if `equivalent(a.sym, x.sym)` +| `a.b` | `x.y` | `'a =#= 'x` if `equivalent(b.sym, y.sym)` +| `a: A` | `x: X` | `'a =#= 'x && '[A] =#= '[X]` +| `fa(.. ai ..)` | `fx(.. xi ..)` | `'fa =#= 'fx && fold(.. 'ai =#= 'xi) ..)` +| `fa[.. Ai ..]` | `fx[.. Xi ..]` | `'fa =#= 'fx && fold(.. '[Ai] =#= '[Xi] ..)` +| `{.. ai ..}` | `{.. xi ..}` | `fold(.. 'ai =#= 'xi ..)` +| `if (a) b else c` | `if (x) y else z` | `'a =#= 'x && 'b =#= 'y && 'c =#= 'z` +| `while (a) b` | `while (x) y` | `'a =#= 'x && 'b =#= 'y` +| Assignment `a = b` | Assignment `x = y` | `'b =#= 'y` if `'a =#= 'x.nonEmpty` +| Named argument
      `n = a` | Named argument
      `m = x` | `'a =#= 'x` +| `Seq(.. ai ..): _*` | `Seq(.. xi ..): _*` | `fold(.. 'ai =#= 'xi ..)` +| `new A` | `new X` | `'[A] =#= '[X]` +| `this` | `this` | `matched` if both refer to the same symbol +| `a.super[B]` | `x.super[Y]` | `'a =#= 'x` if `B` equals `Y` +| `val a: A = b`
      `lazy val a: A = b`
      `var a: A = b` | `val x: X = y`
      `lazy val x: X = y`
      `var x: X = y` | `'[A] =#= '[X] && 'b =#= 'y given envWith(a.sym -> b.sym)` +| `def a[..Ai..](.. bij: Bij ..): A = c` | `def x[..Xi..](.. yij: Yij ..): X = z` | `fold(..'[Ai] =#= '[Xi]..) && fold(.. 'bij =#= 'yij && '[Bij] =#= '[Yij] ..) && '[A] =#= '[X] && 'c =#= 'z given envWith(a.sym -> b.sym, .. bij.sym -> yij.sym ..)` +| `(.. ai: Ai ..) => b` | `(.. xi: Xi ..) => y` | `fold(.. 'ai =#= 'xi && '[Ai] =#= '[Xi] ..) && 'b =#= 'y given envWith(.. ai.sym -> xi.sym ..)` +| `a match { .. bi .. }` | `x match { .. yi .. }` | `'a =#= 'x && fold(.. 'bi =#= 'yi ..)` +| `try a catch { .. bi .. } finally ci` | `try x catch { .. yi .. } finally z` | `'a =#= 'x && fold(.. 'bi =#= 'yi ..) && 'c =#= 'z` +| | | +| `case a if b => c` | `case x if y => z` | `'a =#= 'x && 'b =#= 'y && c =#= z` +| | | +| Inferred `A` | Inferred `X` | `matched` if `A <:< X` +| `A[.. Bi ..]` | `X[.. Yi ..]` | `matched` if `('[A] && '[X] && fold(.. '[Bi] =#= '[Yi] ..)).nonEmpty` +| `A @annot` | `X` | `'[A] =#= '[X]` +| `A` | `X @annot` | `'[A] && '[X]` +| | | `notMatched` + + +| Pattern inside the quote | Pattern | Returns | +| :-------------------------: |:--------------------------: | :------------- | +| Value `a` | Value `x` | `'a =#= 'x` +| `a: A` | `x: X` | `'[A] && '[X]` +| `a @ b` | `x @ y` | `'b =#= 'y given envWith(a.sym -> b.sym)` +| Unapply `a(..bi..)(..ci..)` | Unapply `x(..yi..)(..zi..)` | `'a =#= 'x && fold(.. 'bi =#= 'yi ..) && fold(.. 'ci =#= 'zi ..)` +| `.. | ai | ..` | `.. | xi | ..` | `fold(.. 'ai =#= 'xi ..)` +| `_` | `_` | `matched` +| | | `notMatched` + + + + +## Quoted Patterns transformation + +Coming soon... diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/safe-initialization.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/safe-initialization.md new file mode 100644 index 000000000000..327dd23efd49 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/safe-initialization.md @@ -0,0 +1,311 @@ +--- +layout: doc-page +title: "Safe Initialization" +--- + +Dotty implements experimental safe initialization check, which can be enabled by the compiler option `-Ycheck-init`. + +## A Quick Glance + +To get a feel of how it works, we first show several examples below. + +### Parent-Child Interaction + +Given the following code snippet: + +``` Scala +abstract class AbstractFile { + def name: String + val extension: String = name.substring(4) +} + +class RemoteFile(url: String) extends AbstractFile { + val localFile: String = url.hashCode + ".tmp" // error: usge of `localFile` before it's initialized + def name: String = localFile +} +``` + +The checker will report: + +``` scala +-- Warning: tests/init/neg/AbstractFile.scala:7:4 ------------------------------ +7 | val localFile: String = url.hashCode + ".tmp" // error + | ^ + | Access non-initialized field value localFile. Calling trace: + | -> val extension: String = name.substring(4) [ AbstractFile.scala:3 ] + | -> def name: String = localFile [ AbstractFile.scala:8 ] +``` + +### Inner-Outer Interaction + +Given the code below: + +``` scala +object Trees { + class ValDef { counter += 1 } + class EmptyValDef extends ValDef + val theEmptyValDef = new EmptyValDef + private var counter = 0 // error +} +``` + +The checker will report: + +``` scala +-- Warning: tests/init/neg/trees.scala:5:14 ------------------------------------ +5 | private var counter = 0 // error + | ^ + | Access non-initialized field variable counter. Calling trace: + | -> val theEmptyValDef = new EmptyValDef [ trees.scala:4 ] + | -> class EmptyValDef extends ValDef [ trees.scala:3 ] + | -> class ValDef { counter += 1 } [ trees.scala:2 ] +``` + +### Functions + +Given the code below: + +``` scala +abstract class Parent { + val f: () => String = () => this.message + def message: String +} +class Child extends Parent { + val a = f() + val b = "hello" // error + def message: String = b +} +``` + +The checker reports: + +``` scala +-- Warning: tests/init/neg/features-high-order.scala:7:6 ----------------------- +7 | val b = "hello" // error + | ^ + |Access non-initialized field value b. Calling trace: + | -> val a = f() [ features-high-order.scala:6 ] + | -> val f: () => String = () => this.message [ features-high-order.scala:2 ] + | -> def message: String = b [ features-high-order.scala:8 ] +``` + +## Design Goals + +We establish the following design goals: + +- __Sound__: checking always terminates, and is sound for common and reasonable usage (over-approximation) +- __Expressive__: support common and reasonable initialization patterns +- __Friendly__: simple rules, minimal syntactic overhead, informative error messages +- __Modular__: modular checking, no analysis beyond project boundary +- __Fast__: instant feedback +- __Simple__: no changes to core type system, explainable by a simple theory + +By _reasonable usage_, we include the following use cases (but not restricted to them): + +- Access fields on `this` and outer `this` during initialization +- Call methods on `this` and outer `this` during initialization +- Instantiate inner class and call methods on such instances during initialization +- Capture fields in functions + +## Principles and Rules + +To achieve the goals, we uphold three fundamental principles: +_stackability_, _monotonicity_ and _scopability_. + +Stackability means that objects are initialized in stack order: if the +object `b` is created during the initialization of object `a`, then +all fields of `b` should become initialized before or at the same time +as `a`. Scala enforces this property in syntax by demanding that all +fields are initialized at the end of the primary constructor, except +for the language feature below: + +``` scala +var x: T = _ +``` + +Control effects such as exceptions may break this property, as the +following example shows: + +``` scala +class MyException(val b: B) extends Exception("") +class A { + val b = try { new B } catch { case myEx: MyException => myEx.b } + println(b.a) +} +class B { + throw new MyException(this) + val a: Int = 1 +} +``` + +In the code above, the control effect teleport the uninitialized value +wrapped in an exception. In the implementation, we avoid the problem +by ensuring that the values that are thrown must be transitively initialized. + +Monotonicity means that the initialization status of an object should +not go backward: initialized fields continue to be initialized, a +field points to an initialized object may not later point to an +object under initialization. As an example, the following code will be rejected: + +``` scala +trait Reporter { def report(msg: String): Unit } +class FileReporter(ctx: Context) extends Reporter { + ctx.typer.reporter = this // ctx now reaches an uninitialized object + val file: File = new File("report.txt") + def report(msg: String) = file.write(msg) +} +``` + +In the code above, suppose `ctx` points to a transitively initialized +object. Now the assignment at line 3 makes `this`, which is not fully +initialized, reachable from `ctx`. This makes field usage dangerous, +as it may indirectly reach uninitialized fields. + +Monotonicity is based on a well-known technique called _heap monotonic +typestate_ to ensure soundness in the presence of aliasing +[1]. Otherwise, either soundness will be compromised or we have to +disallow the usage of already initialized fields. + +Scopability means that an expression may only access existing objects via formal +parameters and `this`. More precisely, given any environment `ρ` (which are the +value bindings for method parameters and `this`) and heap `σ` for evaluating an expression +`e`, if the resulting value reaches an object `o` pre-existent in `σ`, then `o` +is reachable from `ρ` in `σ`. Control effects like coroutines, delimited +control, resumable exceptions may break the property, as they can transport a +value upper in the stack (not in scope) to be reachable from the current scope. +Static fields can also serve as a teleport thus breaks this property. In the +implementation, we need to enforce that teleported values are transitively +initialized. + +With the established principles and design goals, following rules are imposed: + +1. In an assignment `o.x = e`, the expression `e` may only point to transitively initialized objects. + + This is how monotonicity is enforced in the system. Note that in an + initialization `val f: T = e`, the expression `e` may point to an object + under initialization. This requires a distinction between mutation and + initialization in order to enforce different rules. Scala + has different syntax for them, it thus is not an issue. + +2. References to objects under initialization may not be passed as arguments to method calls or constructors. + + Escape of `this` in the constructor is commonly regarded as an + anti-pattern, and it's rarely used in practice. This rule is simple + for the programmer to reason about initialization and it simplifies + implementation. The theory supports safe escape of `this` with the help of + annotations, we delay the extension until there is a strong need. + +3. Local definitions may only refer to transitively initialized objects. + + It means that in a local definition `val x: T = e`, the expression `e` may + only evaluate to transitively initialized objects. The same goes for local + lazy variables and methods. This rule is again motivated for simplicity in + reasoning about initialization: programmers may safely assume that all local + definitions only point to transitively initialized objects. + +## Modularity + +For modularity, we forbid subtle initialization interaction beyond project +boundaries. For example, the following code passes the check when the two +classes are defined in the same project: + +```Scala +class Base { + private val map: mutable.Map[Int, String] = mutable.Map.empty + def enter(k: Int, v: String) = map(k) = v +} +class Child extends Base { + enter(1, "one") + enter(2, "two") +} +``` + +However, when the class `Base` and `Child` are defined in two different +projects, the check will emit a warning for the calls to `enter` in the class +`Child`. This restricts subtle initialization within project boundaries, +and avoids accidental violation of contracts across library versions. + +We impose the following rules to enforce modularity: + +4. A class or trait that may be extended in another project should not + call virtual methods on `this` in its template/mixin evaluation, + directly or indirectly. + +5. The method call `o.m(args)` is forbidden if `o` is not transitively + initialized and the target of `m` is defined in an external project. + +6. The expression `new p.C(args)` is forbidden, if `p` is not transitively + initialized and `C` is defined in an external project. + +Theoretically, we may analyze across project boundaries based on tasty. However, +from our experience with Dotty community projects, most subtle initialization +patterns are restricted in the same project. As the rules only report warnings +instead of errors, we think it is good to first impose more strict rules, The +feedback from the community is welcome. + +## Theory + +The theory is based on type-and-effect systems [2]. We introduce two concepts, +_effects_ and _potentials_: + +``` +π = C.this | Warm(C, π) | π.f | π.m | π.super[D] | Cold | Fun(Π, Φ) | Outer(C, π) +ϕ = π↑ | π.f! | π.m! +``` + +Potentials (π) represent values that are possibly under initialization. + +- `C.this`: current object +- `Warm(C, π)`: an object of type `C` where all its fields are assigned, and the potential for `this` of its enclosing class is `π`. +- `π.f`: the potential of the field `f` in the potential `π` +- `π.m`: the potential of the field `f` in the potential `π` +- `π.super[D]`: essentially the object π, used for virtual method resolution +- `Cold`: an object with unknown initialization status +- `Fun(Π, Φ)`: a function, when called produce effects Φ and return potentials Π. +- `Outer(C, π)`: the potential of `this` for the enclosing class of `C` when `C.this` is ` π`. + +Effects are triggered from potentials: + +- `π↑`: promote the object pointed to by the potential `π` to fully-initialized +- `π.f!`: access field `f` on the potential `π` +- `π.m!`: call the method `m` on the potential `π` + +To ensure that the checking always terminate and for better +performance, we restrict the length of potentials to be finite (by +default 2). If the potential is too long, the checker stops +tracking it by checking that the potential is actually transitively +initialized. + +For an expression `e`, it may be summarized by the pair `(Π, Φ)`, +which means evaluation of `e` may produce the effects Φ and return the +potentials Π. Each field and method is associated with such a pair. +We call such a pair _summary_. The expansion of proxy potentials and effects, +such as `π.f`, `π.m` and `π.m!`, will take advantage of the summaries. +Depending on the potential `π` for `this`, the summaries need to be rebased (`asSeenFrom`) before usage. + +The checking treats the templates of concrete classes as entry points. +It maintains the set of initialized fields as initialization +progresses, and check that only initialized fields are accessed during +the initialization and there is no leaking of values under initialization. +Virtual method calls on `this` is not a problem, +as they can always be resolved statically. + +More details can be found in a forthcoming paper. + +## Back Doors + +Occasionally you may want to suppress warnings reported by the +checker. You can either write `e: @unchecked` to tell the checker to +skip checking for the expression `e`, or you may use the old trick: +mark some fields as lazy. + +## Caveats + +The system cannot handle static fields, nor does it provide safety +guarantee when extending Java or Scala 2 classes. Calling methods of +Java or Scala 2 is always safe. + +## References + +- Fähndrich, M. and Leino, K.R.M., 2003, July. _Heap monotonic typestates_. In International Workshop on Aliasing, Confinement and Ownership in object-oriented programming (IWACO). +- Lucassen, J.M. and Gifford, D.K., 1988, January. _Polymorphic effect systems_. In Proceedings of the 15th ACM SIGPLAN-SIGACT symposium on Principles of programming languages (pp. 47-57). ACM. diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/super-traits.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/super-traits.md new file mode 100644 index 000000000000..f7ca5d688ea1 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/super-traits.md @@ -0,0 +1,86 @@ +--- +layout: doc-page +title: super traits +--- + +Traits are used in two roles: + + 1. As mixins for other classes and traits + 2. As types of vals, defs, or parameters + +Some traits are used primarily in the first role, and we usually do not want to see them in inferred types. An example is the `Product` trait that the compiler +adds as a super trait to every case class or case object. In Scala 2, this parent trait sometimes makes inferred types more complicated than they should be. Example: +```scala +trait Kind +case object Var extends Kind +case object Val extends Kind +val x = Set(if condition then Val else Var) +``` +Here, the inferred type of `x` is `Set[Kind & Product & Serializable]` whereas one would have hoped it to be `Set[Kind]`. The reasoning for this particular type to be inferred is as follows: + + - The type of the conditional above is the union type `Val | Var`. + - A union type is widened in type inference to the least supertype that is + not a union type. In the example, this type is `Kind & Product & Serializable` since all three traits are supertraits of both `Val` and `Var`. + So that type becomes the inferred element type of the set. + +Scala 3 allows one to mark a trait as a `super` trait, which means that it can be suppressed in type inference. Here's an example that follows the lines of the +code above, but now with a new super trait `S` instead of `Product`: +```scala +super trait S +trait Kind +object Var extends Kind, S +object Val extends Kind, S +val x = Set(if condition then Val else Var) +``` +Now `x` has inferred type `Set[Kind]`. The common super trait `S` does not +appear in the inferred type. + +### Super Traits + +The traits `scala.Product`, `java.lang.Serializable` and `java.lang.Comparable` +are treated automatically as super traits. Other traits can be turned into super traits, by adding the keyword `super` in front of `trait`, as shown above. + +Every trait can be declared as a super trait. Typically super traits are traits that influence the implementation of inheriting classes and traits and that are not usually used as types by themselves. Two examples from the +standard collection library: + + - `IterableOps`, which provides method implementations for an `Iterable` + - `StrictOptimizedSeqOps`, which optimises some of these implementations for + sequences with efficient indexing. + +Generally, any trait that is extended recursively is a good candidate to be +declared a super trait. + +### Retro-Fitting Scala 2 Libraries + +To allow cross-building between Scala 2 and 3, super traits can also be +introduced by adding the `@superTrait` annotation, which is defined in package `scala.annotation`. Example: +```scala +import scala.annotation.superTrait + +@superTrait trait StrictOptimizedSeqOps[+A, +CC[_], +C] ... +``` +The `@superTrait` annotation will be deprecated and removed in some later version of Scala when cross-building with Scala 2 will no longer be a concern. + +### Rules for Inference + +Super traits can be given as explicit types as usual. But they are often elided when types are inferred. Roughly, the rules for type inference say that super traits are dropped from intersections where possible. + +The precise rules are as follows: + + - When inferring a type of a type variable, or the type of a val, or the return type of a def, + - where that type is not higher-kinded, + - and where `B` is its known upper bound or `Any` if none exists: + - If the type inferred so far is of the form `T1 & ... & Tn` where + `n >= 1`, replace the maximal number of `Ti`s by `Any`, while ensuring that + the resulting type is still a subtype of the bound `B`. + - However, do not perform this widening if all types `Ti` can get replaced in that way. + +The last clause ensures that a single super trait instance such as `Product` is not widened to `Any`. Super trait instances are only dropped when they appear in conjunction with some other type. + +### Syntax + +Only the production `TmplDef` for class and trait definitions has to be changed. +The new version is: +``` +TmplDef ::= ([‘case’] ‘class’ | [‘super’] ‘trait’) ClassDef +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/threadUnsafe-annotation.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/threadUnsafe-annotation.md new file mode 100644 index 000000000000..85b8d66259c3 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/threadUnsafe-annotation.md @@ -0,0 +1,18 @@ +--- +layout: doc-page +title: threadUnsafe annotation +--- + +A new annotation `@threadUnsafe` can be used on a field which defines a `lazy +val`. When this annotation is used, the initialization of the lazy val will use a +faster mechanism which is not thread-safe. + +### Example + +```scala +import scala.annotation.threadUnsafe + +class Hello { + @threadUnsafe lazy val x: Int = 1 +} +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/trait-parameters.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/trait-parameters.md new file mode 100644 index 000000000000..04cd9b7607b2 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/trait-parameters.md @@ -0,0 +1,60 @@ +--- +layout: doc-page +title: "Trait Parameters" +--- + +Dotty allows traits to have parameters, just like classes have parameters. + +```scala +trait Greeting(val name: String) { + def msg = s"How are you, $name" +} + +class C extends Greeting("Bob") { + println(msg) +} +``` + +Arguments to a trait are evaluated immediately before the trait is initialized. + +One potential issue with trait parameters is how to prevent +ambiguities. For instance, you might try to extend `Greeting` twice, +with different parameters. + +```scala +class D extends C with Greeting("Bill") // error: parameter passed twice +``` + +Should this print "Bob" or "Bill"? In fact this program is illegal, +because it violates the second rule of the following for trait parameters: + + 1. If a class `C` extends a parameterized trait `T`, and its superclass does not, `C` _must_ pass arguments to `T`. + + 2. If a class `C` extends a parameterized trait `T`, and its superclass does as well, `C` _must not_ pass arguments to `T`. + + 3. Traits must never pass arguments to parent traits. + +Here's a trait extending the parameterized trait `Greeting`. + +```scala +trait FormalGreeting extends Greeting { + override def msg = s"How do you do, $name" +} +``` +As is required, no arguments are passed to `Greeting`. However, this poses an issue +when defining a class that extends `FormalGreeting`: + +```scala +class E extends FormalGreeting // error: missing arguments for `Greeting`. +``` + +The correct way to write `E` is to extend both `Greeting` and +`FormalGreeting` (in either order): + +```scala +class E extends Greeting("Bob") with FormalGreeting +``` + +### Reference + +For more info, see [Scala SIP 25](http://docs.scala-lang.org/sips/pending/trait-parameters.html). diff --git a/scala3doc/dotty-docs/docs/docs/reference/other-new-features/tupled-function.md b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/tupled-function.md new file mode 100644 index 000000000000..ad94169210ad --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/other-new-features/tupled-function.md @@ -0,0 +1,82 @@ +--- +layout: doc-page +title: "Tupled Function" +--- + +Tupled Function +---------------------- + +With functions bounded to arities up to 22 it was possible to generalize some operation on all function types using overloading. +Now that we have functions and tuples generalized to [arities above 22](../dropped-features/limit22.md) overloading is not an option anymore. +The type class `TupleFunction` provides a way to abstract directly over a function of any arity converting it to an equivalent function that receives all arguments in a single tuple. + +```scala +/** Type class relating a `FunctionN[..., R]` with an equivalent tupled function `Function1[TupleN[...], R]` + * + * @tparam F a function type + * @tparam G a tupled function type (function of arity 1 receiving a tuple as argument) + */ +@implicitNotFound("${F} cannot be tupled as ${G}") +sealed trait TupledFunction[F, G] { + def tupled(f: F): G + def untupled(g: G): F +} +``` + +The compiler will synthesize an instance of `TupledFunction[F, G]` if: + +* `F` is a function type of arity `N` +* `G` is a function with a single tuple argument of size `N` and its types are equal to the arguments of `F` +* The return type of `F` is equal to the return type of `G` +* `F` and `G` are the same sort of function (both are `(...) => R` or both are `(...) ?=> R`) +* If only one of `F` or `G` is instantiated the second one is inferred. + +Examples +-------- +`TupledFunction` can be used to generalize the `Function1.tupled`, ... `Function22.tupled` methods to functions of any arities. +The following defines `tupled` as [extension method](../contextual/extension-methods.html) ([full example](https://github.com/lampepfl/dotty/blob/master/tests/run/tupled-function-tupled.scala)). + +```scala +/** Creates a tupled version of this function: instead of N arguments, + * it accepts a single [[scala.Tuple]] with N elements as argument. + * + * @tparam F the function type + * @tparam Args the tuple type with the same types as the function arguments of F + * @tparam R the return type of F + */ +extension [F, Args <: Tuple, R](f: F) + def tupled(using tf: TupledFunction[F, Args => R]): Args => R = tf.tupled(f) +``` + +`TupledFunction` can be used to generalize the `Function.untupled` to a function of any arities ([full example](https://github.com/lampepfl/dotty/blob/master/tests/run/tupled-function-untupled.scala)) + +```scala +/** Creates an untupled version of this function: instead of a single argument of type [[scala.Tuple]] with N elements, + * it accepts N arguments. + * + * This is a generalization of [[scala.Function.untupled]] that work on functions of any arity + * + * @tparam F the function type + * @tparam Args the tuple type with the same types as the function arguments of F + * @tparam R the return type of F + */ +extension [F, Args <: Tuple, R](f: Args => R) + def untupled(using tf: TupledFunction[F, Args => R]): F = tf.untupled(f) +``` + +`TupledFunction` can also be used to generalize the [`Tuple1.compose`](https://github.com/lampepfl/dotty/blob/master/tests/run/tupled-function-compose.scala) and [`Tuple1.andThen`](https://github.com/lampepfl/dotty/blob/master/tests/run/tupled-function-andThen.scala) methods to compose functions of larger arities and with functions that return tuples. + +```scala +/** Composes two instances of TupledFunction into a new TupledFunction, with this function applied last. + * + * @tparam F a function type + * @tparam G a function type + * @tparam FArgs the tuple type with the same types as the function arguments of F and return type of G + * @tparam GArgs the tuple type with the same types as the function arguments of G + * @tparam R the return type of F + */ +extension [F, G, FArgs <: Tuple, GArgs <: Tuple, R](f: F) + def compose(g: G)(using tg: TupledFunction[G, GArgs => FArgs], tf: TupledFunction[F, FArgs => R]): GArgs => R = { + (x: GArgs) => tf.tupled(f)(tg.tupled(g)(x)) +} +``` diff --git a/scala3doc/dotty-docs/docs/docs/reference/overview.md b/scala3doc/dotty-docs/docs/docs/reference/overview.md new file mode 100644 index 000000000000..7ab8705dd179 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/overview.md @@ -0,0 +1,133 @@ +--- +layout: doc-page +title: "Overview" +--- + +Dotty implements many language changes compared to Scala 2. These changes are currently discussed for inclusion in Scala 3, the new Scala language standard which will be based on the Dotty codebase. + +## Goals + +The language redesign was guided by three main goals: + + - Strengthen Scala's foundations. + Make the full programming language compatible with the foundational work on the DOT calculus and apply the lessons learned from that work. + - Make Scala easier and safer to use. Tame powerful constructs such as implicits to provide a gentler learning curve. Remove warts and puzzlers. + - Further improve the consistency and expressiveness of Scala's language constructs. + +Corresponding to these goals, the language changes fall into seven categories: +(1) Core constructs to strengthen foundations, (2) simplifications and (3) restrictions, to make the language easier and safer to use, (4) dropped constructs to make the language smaller and more regular, (5) changed constructs to remove warts, and increase consistency and usability, (6) new constructs to fill gaps and increase expressiveness, (7) a new, principled approach to metaprogramming that replaces today's experimental macros. + +## Essential Foundations + +These new constructs directly model core features of DOT, higher-kinded types, and the [SI calculus for implicit resolution](https://infoscience.epfl.ch/record/229878/files/simplicitly_1.pdf). + + - [Intersection types](new-types/intersection-types.md), replacing compound types, + - [Union types](new-types/union-types.md), + - [Type lambdas](new-types/type-lambdas.md), replacing encodings using structural types and type projection. + - [Context Functions](contextual/context-functions.md), offering abstraction over given parameters. + +## Simplifications + +These constructs replace existing constructs with the aim of making the language safer and simpler to use, and to promote uniformity in code style. + + - [Trait Parameters](other-new-features/trait-parameters.md) replace [early initializers](dropped-features/early-initializers.md) with a more generally useful construct. + - [Given Instances](contextual/givens.md) + replace implicit objects and defs, focussing on intent over mechanism. + - [Using Clauses](contextual/using-clauses.md) replace implicit parameters, avoiding their ambiguities. + - [Extension Methods](contextual/extension-methods.md) replace implicit classes with a clearer and simpler mechanism. + - [Opaque Type Aliases](other-new-features/opaques.md) replace most uses + of value classes while guaranteeing absence of boxing. + - [Toplevel definitions](dropped-features/package-objects.md) replace package objects, dropping syntactic boilerplate. + - [Export clauses](other-new-features/export.md) + provide a simple and general way to express aggregation, which can replace the + previous facade pattern of package objects inheriting from classes. + - [Vararg patterns](changed-features/vararg-patterns.md) now use the form `: _*` instead of `@ _*`, mirroring vararg expressions, + - [Creator applications](other-new-features/creator-applications.md) allow using simple function call syntax + instead of `new` expressions. `new` expressions stay around as a fallback for + the cases where creator applications cannot be used. + +With the exception of early initializers and old-style vararg patterns, all superseded constructs continue to be available in Scala 3.0. The plan is to deprecate and phase them out later. + +Value classes (superseded by opaque type aliases) are a special case. There are currently no deprecation plans for value classes, since we might want to bring them back in a more general form if they are supported natively by the JVM as is planned by project Valhalla. + +## Restrictions + +These constructs are restricted to make the language safer. + + - [Implicit Conversions](contextual/conversions.md): there is only one way to define implicit conversions instead of many, and potentially surprising implicit conversions require a language import. + - [Given Imports](contextual/given-imports.md): implicits now require a special form of import, to make the import clearly visible. + - [Type Projection](dropped-features/type-projection.md): only classes can be used as prefix `C` of a type projection `C#A`. Type projection on abstract types is no longer supported since it is unsound. + - [Multiversal Equality](contextual/multiversal-equality.md) implements an "opt-in" scheme to rule out nonsensical comparisons with `==` and `!=`. + - [@infix and @alpha](changed-features/operators.md) + make method application syntax uniform across code bases and require alphanumeric aliases for all symbolic names (proposed, not implemented). + +Unrestricted implicit conversions continue to be available in Scala 3.0, but will be deprecated and removed later. Unrestricted versions of the other constructs in the list above are available only under `-source 3.0-migration`. + + +## Dropped Constructs + +These constructs are proposed to be dropped without a new construct replacing them. The motivation for dropping these constructs is to simplify the language and its implementation. + + - [DelayedInit](dropped-features/delayed-init.md), + - [Existential types](dropped-features/existential-types.md), + - [Procedure syntax](dropped-features/procedure-syntax.md), + - [Class shadowing](dropped-features/class-shadowing.md), + - [XML literals](dropped-features/xml.md), + - [Symbol literals](dropped-features/symlits.md), + - [Auto application](dropped-features/auto-apply.md), + - [Weak conformance](dropped-features/weak-conformance.md), + - [Compound types](new-types/intersection-types.md), + - [Auto tupling](https://github.com/lampepfl/dotty/pull/4311) (implemented, but not merged). + +The date when these constructs are dropped varies. The current status is: + + - Not implemented at all: + - DelayedInit, existential types, weak conformance. + - Supported under `-source 3.0-migration`: + - procedure syntax, class shadowing, symbol literals, auto application, auto tupling in a restricted form. + - Supported in 3.0, to be deprecated and phased out later: + - XML literals, compound types. + + +## Changes + +These constructs have undergone changes to make them more regular and useful. + + - [Structural Types](changed-features/structural-types.md): They now allow pluggable implementations, which greatly increases their usefulness. Some usage patterns are restricted compared to the status quo. + - [Name-based pattern matching](changed-features/pattern-matching.md): The existing undocumented Scala 2 implementation has been codified in a slightly simplified form. + - [Eta expansion](changed-features/eta-expansion.md) is now performed universally also in the absence of an expected type. The postfix `_` operator is thus made redundant. It will be deprecated and dropped after Scala 3.0. + - [Implicit Resolution](changed-features/implicit-resolution.md): The implicit resolution rules have been cleaned up to make them more useful and less surprising. Implicit scope is restricted to no longer include package prefixes. + +Most aspects of old-style implicit resolution are still available under `-source 3.0-migration`. The other changes in this list are applied unconditionally. + +## New Constructs + +These are additions to the language that make it more powerful or pleasant to use. + + - [Enums](enums/enums.md) provide concise syntax for enumerations and [algebraic data types](enums/adts.md). + - [Parameter Untupling](other-new-features/parameter-untupling.md) avoids having to use `case` for tupled parameter destructuring. + - [Dependent Function Types](new-types/dependent-function-types.md) generalize dependent methods to dependent function values and types. + - [Polymorphic Function Types](https://github.com/lampepfl/dotty/pull/4672) generalize polymorphic methods to dependent function values and types. _Current status_: There is a proposal, and a prototype implementation, but the implementation has not been finalized or merged yet. + - [Kind Polymorphism](other-new-features/kind-polymorphism.md) allows the definition of operators working equally on types and type constructors. + +## Metaprogramming + +The following constructs together aim to put metaprogramming in Scala on a new basis. So far, metaprogramming was achieved by a combination of macros and libraries such as Shapeless that were in turn based on some key macros. Current Scala 2 macro mechanisms are a thin veneer on top the current Scala 2 compiler, which makes them fragile and in many cases impossible to port to Scala 3. + +It's worth noting that macros were never included in the Scala 2 language specification and were so far made available only under an `-experimental` flag. This has not prevented their widespread usage. + +To enable porting most uses of macros, we are experimenting with the advanced language constructs listed below. These designs are more provisional than the rest of the proposed language constructs for Scala 3.0. There might still be some changes until the final release. Stabilizing the feature set needed for metaprogramming is our first priority. + +- [Match Types](new-types/match-types.md) allow computation on types. +- [Inline](metaprogramming/inline.md) provides +by itself a straightforward implementation of some simple macros and is at the same time an essential building block for the implementation of complex macros. +- [Quotes and Splices](metaprogramming/macros.md) provide a principled way to express macros and staging with a unified set of abstractions. +- [Type class derivation](contextual/derivation.md) provides an in-language implementation of the `Gen` macro in Shapeless and other foundational libraries. The new implementation is more robust, efficient and easier to use than the macro. +- [Implicit by-name parameters](contextual/implicit-by-name-parameters.md) provide a more robust in-language implementation of the `Lazy` macro in Shapeless. + +## See Also + +[A classification of proposed language features](./features-classification.md) is +an expanded version of this page that adds the status (i.e. relative importance to be a part of Scala 3, and relative urgency when to decide this) and expected migration cost +of each language construct. + diff --git a/scala3doc/dotty-docs/docs/docs/reference/soft-modifier.md b/scala3doc/dotty-docs/docs/docs/reference/soft-modifier.md new file mode 100644 index 000000000000..e896df1c916d --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/reference/soft-modifier.md @@ -0,0 +1,15 @@ +--- +layout: doc-page +title: Soft Modifiers +--- + +A soft modifier is one of the identifiers `opaque` and `inline`. + + +It is treated as a potential modifier of a definition, if it is followed by a hard modifier or a keyword combination starting a definition (`def`, `val`, `var`, `type`, `class`, `case class`, `trait`, `object`, `case object`, `enum`). Between the two words there may be a sequence of newline tokens and soft modifiers. + +It is treated as a potential modifier of a parameter binding unless it is followed by `:`. + diff --git a/scala3doc/dotty-docs/docs/docs/release-notes/0.1.2.md b/scala3doc/dotty-docs/docs/docs/release-notes/0.1.2.md new file mode 100644 index 000000000000..7e9630c39757 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/release-notes/0.1.2.md @@ -0,0 +1,194 @@ +--- +layout: doc-page +title: 0.1.2 release notes +--- + +0.1.2 will be the first public release of Dotty. +Being the first one, these notes are incomplete. +This document is a work in progress until 0.1.2-final is released. +These notes are up-to-date based on 0.1.2-RC1 and will be updated +with changes that happen during the RC stabilization cycle. + +# Required Java Version + +Dotty 0.1.2 targets Java 8. We don't have plans to add support for earlier versions of Java. + +# IDE support for Dotty + +# Reporting Bugs / Known Issues + +Please [file](https://github.com/lampepfl/dotty/issues) any bugs you encounter. If you’re unsure whether something is a bug, +please ask on the Dotty [gitter channel](https://github.com/lampepfl/dotty). + +# Dotty Doc + +Dotty has added support for Dotty Doc. +Actually, the dotty documentation site [https://dotty.epfl.ch/docs/](https://dotty.epfl.ch/docs/) is generated by it. +Looks nice, doesn’t it? + +#Other implemented features: + +This release ships with the following features: + + - TASTY support by [@odersky], [@xeno-by] and [@darkdimius] [\[1\]][1] + - HK-types support by [@odersky], [@smarter] and [@darkdimius] [\[2\]][2] + - Intersection and Union Types by [@odersky] [\[3\]][3] [\[4\]][3] + - Enumerations by [@odersky] [\[4\]][4] [\[5\]][5] [\[6\]][6] + - Implicit By-Name Parameters by [@odersky] [\[7\]][7] + - Miniphases-based design by [@darkdimius], [@odersky] and [@olhotak] [\[8\]][8] + - `@static` methods support by [@darkdmius] [\[9\]][9] + - Non-blocking lazy vals by [@darkdimius] [\[10\]][10] + - JVM code emission by [@magarciaEPFL] and [@darkdimius] [\[11\]][11] + - Efficient multi-dimensional array allocation by [@darkdimius] [\[12\]][12] + - Default-method based trait composition [@odersky] and [@darkdimius] [\[13\]][13] [\[14\]][14] + - Trait parameters by [@odersky] [\[15\]][15] + - Working contravariant implicits by [@odersky] [\[16\]][16] + - Option-less pattern matching by [@odersky], [@darkdimius] and [@OlivierBlanvillain] [\[17\]][17] + - SAM-based anonymous functions by [@magarciaEPFL], [@darkdimius] and [@retronym] [\[18\]][18] + - Pattern matching support by [@darkdimius] [\[19\]][19] + - Value classes support by [@smarter] and [@darkdimius] [\[20\]][20] + - Pattern matching exhaustivity checks by [@liufengyun] [\[21\]][21] + - Tailrec optimization by [@darkdimius] [\[22\]][22] [\[23\]][23] + - Language server protocol implementation by [@smarter] [\[24\]][24] + - VS code plugin by [@smarter] [\[24\]][24] + - Vulpix: new test-suite driver by [@felixmulder] that replaces partest by [@vsalvis] [\[25\]][25] + - Java parser by [@olhotak] [\[26\]][26] + - Local optimizations(-optimise) by [@darkdimius] and [@OlivierBlanvillain] [\[27\]][27] + - sbt support by [@smarter] [\[28\]][28] + - DottyDoc by [@felixmulder] [\[29\]][29] + - Implicit Function Types by [@odersky] [\[30\]][30] + - Phantom Types by [@nicolasstucki] [\[31\]][31] + - Functions with more than 22 parameters by [@odersky] [\[32\]][32] + - Inline keyword by [@odersky] [\[33\]][33] + +[@odersky]: https://twitter.com/odersky +[@DarkDimius]: https://twitter.com/DarkDimius +[@smarter]: https://github.com/smarter +[@felixmulder]: https://twitter.com/felixmulder +[@nicolasstucki]: https://github.com/nicolasstucki +[@liufengyun]: https://github.com/liufengyun +[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain +[@olhotak]: https://plg.uwaterloo.ca/~olhotak/ +[@retronym]: https://github.com/retronym +[@xeno-by]: https://github.com/xeno-by +[@vsalvis]: https://github.com/vsalvis +[@magarciaEPFL]: https://github.com/magarciaEPFL + +[1]: https://docs.google.com/document/d/1h3KUMxsSSjyze05VecJGQ5H2yh7fNADtIf3chD3_wr0/edit +[2]: https://infoscience.epfl.ch/record/222780?ln=en +[3]: ../reference/new-types/intersection-types.html +[4]: ../reference/new-types/union-types.html +[5]: ../reference/enums/adts.html +[6]: ../reference/enums/desugarEnums.html +[7]: ../reference/other-new-features/implicit-by-name-parameters.html +[8]: https://infoscience.epfl.ch/record/228518 +[9]: http://docs.scala-lang.org/sips/pending/static-members.html +[10]: http://docs.scala-lang.org/sips/pending/improved-lazy-val-initialization.html +[11]: http://magarciaepfl.github.io/scala/ +[12]: https://github.com/lampepfl/dotty/commit/b2215ed23311b2c99ea638f9d7fcad9737dba588 +[13]: https://github.com/lampepfl/dotty/pull/187 +[14]: https://github.com/lampepfl/dotty/pull/217 +[15]: ../reference/other-new-features/trait-parameters.html +[16]: https://github.com/lampepfl/dotty/commit/89540268e6c49fb92b9ca61249e46bb59981bf5a +[17]: https://github.com/lampepfl/dotty/pull/174 +[18]: https://github.com/lampepfl/dotty/pull/488 +[19]: https://github.com/lampepfl/dotty/pull/174 +[20]: https://github.com/lampepfl/dotty/pull/411 +[21]: https://github.com/lampepfl/dotty/pull/1364 +[22]: https://github.com/lampepfl/dotty/pull/1227 +[23]: https://github.com/lampepfl/dotty/pull/117 +[24]: https://github.com/lampepfl/dotty/pull/2532 +[25]: https://github.com/lampepfl/dotty/pull/2194 +[26]: https://github.com/lampepfl/dotty/pull/213 +[27]: https://github.com/lampepfl/dotty/pull/2513 +[28]: https://github.com/lampepfl/dotty/pull/2361 +[29]: https://github.com/lampepfl/dotty/pull/1453 +[30]: ../reference/contextual/implicit-function-types.html +[31]: https://github.com/lampepfl/dotty/pull/2136 +[32]: https://github.com/lampepfl/dotty/pull/1758 +[33]: ../reference/metaprogramming/inline.html + +# Contributors +The Dotty team and contributors have closed 750 issues and have merged a total of 1258 pull requests. + +Concretely, according to + + ``` + git ls-tree -r -z --name-only HEAD -- |egrep -z -Z -E '\.(scala)$'| xargs -0 -n1 git blame --line-porcelain |grep "^author "|sort|uniq -c|sort -nr + ``` +71 people contributed code, tests, and/or documentation to Dotty 0.1.2-RC1. +The following people have contributed to this release: + +| commits | blame lines | Name | +|---------|-------------|-----------------------------| +| 4346 | 82017 | Martin Odersky | +| 1288 | 83070 | Dmitry Petrashko | +| 19 | 35382 | Samuel Gruetter | +| 826 | 20148 | Felix Mulder | +| 567 | 10454 | Guillaume Martres | +| 136 | 5311 | liu fengyun | +| 189 | 4449 | Nicolas Stucki | +| 21 | 3717 | Sébastien Doeraene | +| 30 | 1711 | Ondrej Lhotak | +| 33 | 1094 | Enno Runne | +| 47 | 735 | Olivier Blanvillain | +| 4 | 480 | Valthor Halldorsson | +| 7 | 343 | Aleksander Boruch-Gruszecki | +| 25 | 220 | VladimirNik | +| 3 | 186 | Enno | +| 8 | 143 | Clemens Winter | +| 16 | 133 | Jason Zaugg | +| 3 | 116 | Miron Aseev | +| 4 | 109 | Shane Delmore | +| 5 | 100 | Alexander Myltsev | +| 5 | 96 | Abel Nieto | +| 1 | 92 | Dmitry Melnichenko | +| 22 | 88 | Jonathan Brachthäuser | +| 5 | 87 | Guillaume Massé | +| 20 | 75 | vsalvis | +| 2 | 64 | Tobias Schlatter | +| 4 | 51 | Ólafur Páll Geirsson | +| 4 | 49 | Sebastian Harko | +| 2 | 48 | Andrew Zurn | +| 3 | 44 | jvican | +| 1 | 43 | Jarrod Janssen | +| 1 | 43 | Igor Mielientiev | +| 2 | 35 | Thiago Pereira | +| 5 | 32 | Martijn Hoekstra | +| 1 | 26 | Bartosz Krasiński | +| 1 | 24 | Adam Trousdale | +| 10 | 22 | Aggelos Biboudis | +| 12 | 20 | Paolo G. Giarrusso | +| 1 | 17 | Jon-Anders Teigen | +| 17 | 16 | Vlad Ureche | +| 1 | 16 | Jyotman Singh | +| 4 | 15 | Lucas Burson | +| 1 | 12 | Markus Hauck | +| 3 | 11 | Varunram Ganesh | +| 2 | 11 | Reto Hablützel | +| 3 | 10 | Allan Renucci | +| 4 | 9 | Sarunas Valaskevicius | +| 2 | 9 | Nikolay.Tropin | +| 1 | 9 | Csongor Kiss | +| 2 | 7 | dos65 | +| 2 | 6 | Varunram | +| 6 | 5 | Nada Amin | +| 1 | 4 | ruben | +| 1 | 4 | Kazuyoshi Kato | +| 1 | 3 | Jonathan Rodriguez | +| 1 | 3 | andreaTP | +| 1 | 1 | AlexSikia | +| 3 | 0 | Edmund Noble | +| 2 | 0 | jvican | +| 3 | 0 | Allan Renucci | +| 3 | 0 | Senia-psm | +| 2 | 0 | Lukas Rytz | +| 2 | 0 | Jan Christopher Vogt | +| 2 | 0 | Raymond Tay | +| 1 | 0 | Matthias Sperl | +| 1 | 0 | Herdy Handoko | +| 1 | 0 | Grzegorz Kossakowski | +| 1 | 0 | George Leontiev | +| 1 | 0 | Sandro Stucki | +| 1 | 0 | Adriaan Moors | +| 1 | 0 | Simon Hafner | diff --git a/scala3doc/dotty-docs/docs/docs/release-notes/syntax-changes-0.22.md b/scala3doc/dotty-docs/docs/docs/release-notes/syntax-changes-0.22.md new file mode 100644 index 000000000000..64207d72e9d1 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/release-notes/syntax-changes-0.22.md @@ -0,0 +1,109 @@ +--- +layout: doc-page +title: Syntax Changes in Dotty 0.22 +--- + +In 2019, we experimented with several syntax changes in Dotty, most notably in the area of +implicits replacements. In Dotty 0.22, released on Feb 5, 2020, we have settled on what +we believe will be the definite syntax for Scala 3. Dotty 0.23 will support only this +syntax. Previous variants will no longer be supported. + +Here is a rundown of how previous variants need to be rewritten in the +new syntax. This will be useful as a migration and learning help for people who have already +written code in one of the previous versions of Dotty. + +## Given Instances + +Given instances are always written with `given` and `as`. The previous use of `:` instead of `as` is no longer supported. Examples: +```scala +given intOrd as Ordering[Int] { ... } +given [T] as Ordering[List[T]] { ... } +given ctx as ExecutionContext = ... +given Ordering[String] { ... } +``` +`as` can be omitted if the instance is anonymous and does not have parameters, as in the last definition above. + +## Context Parameters + +Context parameters are the replacement of Scala 2's implicit parameters. Context parameters and arguments both start with `using`. Examples: +```scala +def max[T](x: T, y: T)(using Ordering[T]): T = ... +given [T](using Ordering[T]) as Ordering[List[T]] { ... } + +max(a, b)(using intOrd) +``` +The previous syntax that uses `given` also for context parameters and arguments is no longer supported. + +Context bounds remain supported as a shorthand for one-parameter type class constraints. So the two definitions above could also be written as +```scala +def max[T: Ordering](x: T, y: T): T = ... +given [T: Ordering] as Ordering[List[T]] { ... } +``` +Parameters of context function values are also written with `using`. So it is +```scala +(using x: A) => E +``` +instead of `(implicit x: A) => E` or `(given x: A) => E`. + +## Context Functions Types + +Implicit function types `implicit A => B` have been replaced with context function types, which are written `A ?=> B`. The syntax `(given A) => B` that was used in earlier Dotty versions is no longer supported. + +## Given Imports + +The syntax of wildcard given import selectors is now `given _`. Examples +```scala +import a.{given _} +import b.{_, given _} +``` +The previous syntax, which used just `given` without an underscore is no longer supported. The change was made to better align with typed given import selectors such as `given Ordering[T]`, +which are unchanged. + +## Collective Extensions + +Collective extensions are now a separate construct. Example: +```scala +extension [T] on List[T] { + def second: T ... + def takeRightWhile(p: T => Boolean): List[T] = ... +} +``` +Collective extensions still _expand_ to given instances with regular extension methods, but the previous syntaxes that expressed them as some syntactic variant of given instances are no longer supported. + +## Extension Methods + +There have been two syntax changes for regular extension methods. First, +any type parameters are now written in front, following the `def`. Second, +a "`.`" in front of the method name is now allowed (but not required). Example: +```scala +def [T](xs: List[T]).second: T +``` +The previous syntax which used type parameters after the method name is no longer supported. + +## Optional Braces For Definitions + +Braces around the definitions of a class, object or similar construct can now be omitted +if the leading signature of the definition is followed by a `:` at the end a line. Examples: +```scala +trait Text: + def toString: String + +class Str(str: String) extends Text: + def toString = str + +class Append(txt1: Text, txt2: Text) extends Text: + def toString = txt1 ++ txt2 + +object Empty extends Text: + def toString = "" + +extension on (t: Text): + def length = toString.length + +given Ordering[Text]: + def compare(txt1: Text, txt2: Text): Int = + summon[Ordering[String]].compare(txt1.toString, txt2.toString) +``` +Previous variants required a `with` instead of the `:` or inserted braces around indented code after class, object, ... without any leading token. These are no longer supported. + +Note that this interpretation of `:` as an alternative to braces only applies to class-level definitions. The use of `:` at the end of a line to imply braces around a following _function argument_ is not affected by this change. It still requires the `Yindent-colons` option. diff --git a/scala3doc/dotty-docs/docs/docs/resources/talks.md b/scala3doc/dotty-docs/docs/docs/resources/talks.md new file mode 100644 index 000000000000..8f054d7bd96e --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/resources/talks.md @@ -0,0 +1,61 @@ +--- +layout: doc-page +title: Talks +--- + +Talks on Dotty +-------------- +- (ScalaDays 2019, Lausanne) [A Tour of Scala 3](https://www.youtube.com/watch?v=_Rnrx2lo9cw) by [Martin Odersky](http://twitter.com/odersky) [\[slides\]](https://www.slideshare.net/Odersky/a-tour-of-scala-3) + +- (ScalaDays 2016, Berlin) [Scala's Road Ahead](https://www.youtube.com/watch?v=GHzWqJKFCk4) by [Martin Odersky](http://twitter.com/odersky) [\[slides\]](http://www.slideshare.net/Odersky/scala-days-nyc-2016) + +- (JVMLS 2015) [Compilers are Databases](https://www.youtube.com/watch?v=WxyyJyB_Ssc) by [Martin Odersky](http://twitter.com/odersky) [\[slides\]](http://www.slideshare.net/Odersky/compilers-are-databases) + +- (Scala World 2015) [Dotty: Exploring the future of Scala](https://www.youtube.com/watch?v=aftdOFuVU1o) by [Dmitry Petrashko](http://twitter.com/darkdimius) [\[slides\]](https://d-d.me/scalaworld2015/#/). +Dmitry covers many of the new features that Dotty brings on the table such as Intersection and Union types, improved lazy val initialization and more. +Dmitry also covers dotty internals and in particular the high-level of contextual abstractions of Dotty. You will get to +become familiar with many core concepts such as `Denotations`, their evolution through (compilation) time, their +transformations and more. + +Deep Dive with Dotty +-------------------- +- (ScalaDays 2019, Lausanne) [Metaprogramming in Dotty](https://www.youtube.com/watch?v=ZfDS_gJyPTc) by [Nicolas Stucki](https://github.com/nicolasstucki). + +- (ScalaDays 2019, Lausanne) [Future-proofing Scala: the TASTY intermediate representation](https://www.youtube.com/watch?v=zQFjC3zLYwo) by [Guillaume Martres](http://guillaume.martres.me/). + +- (Mar 21, 2017) [Dotty Internals 1: Trees & Symbols](https://www.youtube.com/watch?v=yYd-zuDd3S8) by [Dmitry Petrashko](http://twitter.com/darkdimius) [\[meeting notes\]](../internals/dotty-internals-1-notes.md). +This is a recorded meeting between EPFL and Waterloo, where we introduce first notions inside Dotty: Trees and Symbols. + +- (Mar 21, 2017) [Dotty Internals 2: Types](https://www.youtube.com/watch?v=3gmLIYlGbKc) by [Martin Odersky](http://twitter.com/odersky) and [Dmitry Petrashko](http://twitter.com/darkdimius). +This is a recorded meeting between EPFL and Waterloo, where we introduce how types are represented inside Dotty. + +- (Jun 15, 2017) [Dotty Internals 3: Denotations](https://youtu.be/9iPA7zMRGKY) by [Martin Odersky](http://twitter.com/odersky) and [Dmitry Petrashko](http://twitter.com/darkdimius). +This is a recorded meeting between EPFL and Waterloo, where we introduce denotations in Dotty. + +- (JVM Language Summit) [How do we make the Dotty compiler fast](https://www.youtube.com/watch?v=9xYoSwnSPz0) by [Dmitry Petrashko](http://twitter.com/darkdimius). +[Dmitry Petrashko](http://twitter.com/darkdimius) gives a high-level introduction on what was done to make Dotty . + + +- (Typelevel Summit Oslo, May 2016) [Dotty and types: the story so far](https://www.youtube.com/watch?v=YIQjfCKDR5A) by +Guillaume Martres [\[slides\]](http://guillaume.martres.me/talks/typelevel-summit-oslo/). +Guillaume focused on some of the practical improvements to the type system that Dotty makes, like the new type parameter +inference algorithm that is able to reason about the type safety of more situations than scalac. + +- (flatMap(Oslo) 2016) [AutoSpecialization in Dotty](https://vimeo.com/165928176) by [Dmitry Petrashko](http://twitter.com/darkdimius) [\[slides\]](https://d-d.me/talks/flatmap2016/#/). +The Dotty Linker analyses your program and its dependencies to +apply a new specialization scheme. It builds on our experience from Specialization, Miniboxing and the Valhalla Project, +and drastically reduces the size of the emitted bytecode. And, best of all, it's always enabled, happens behind the +scenes without annotations, and results in speedups in excess of 20x. Additionally, it "just works" on Scala collections. + +- (ScalaSphere 2016) [Hacking on Dotty: A live demo](https://www.youtube.com/watch?v=0OOYGeZLHs4) by Guillaume Martres [\[slides\]](http://guillaume.martres.me/talks/dotty-live-demo/). +Guillaume hacks on Dotty: a live demo during which he +creates a simple compiler phase to trace method calls at run-time. + +- (Scala By the Bay 2016) [Dotty: what is it and how it works](https://www.youtube.com/watch?v=wCFbYu7xEJA) by Guillaume +Martres [\[slides\]](http://guillaume.martres.me/talks/dotty-tutorial/#/). Guillaume provides a high-level view of the +compilation-pipeline of Dotty. + +- (ScalaDays 2015, Amsterdam) [Making your Scala applications smaller and faster with the Dotty linker](https://www.youtube.com/watch?v=xCeI1ArdXM4) by Dmitry Petrashko [\[slides\]](https://d-d.me/scaladays2015/#/). +Dmitry introduces the call-graph analysis algorithm +that Dotty implements and the performance benefits we can get in terms of number of methods, bytecode size, JVM code size +and the number of objects allocated in the end. diff --git a/scala3doc/dotty-docs/docs/docs/usage/cbt-projects.md b/scala3doc/dotty-docs/docs/docs/usage/cbt-projects.md new file mode 100644 index 000000000000..a40bb92a74fa --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/usage/cbt-projects.md @@ -0,0 +1,20 @@ +--- +layout: doc-page +title: "Using Dotty with cbt" +--- + +**NOTE: cbt support for Dotty is experimental and incomplete (for example, +incremental compilation is not supported), we recommend [using Dotty with sbt](sbt-projects.md) for now.** + +cbt comes with built-in Dotty support. Follow the +[cbt tutorial](https://github.com/cvogt/cbt/), then simply extend `Dotty` in the Build class. + +```scala +// build/build.scala +import cbt._ +class Build(val context: Context) extends Dotty { + ... +} +``` + +Also see the [example project](https://github.com/cvogt/cbt/tree/master/examples/dotty-example). diff --git a/scala3doc/dotty-docs/docs/docs/usage/dottydoc.md b/scala3doc/dotty-docs/docs/docs/usage/dottydoc.md new file mode 100644 index 000000000000..b6abd05bc717 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/usage/dottydoc.md @@ -0,0 +1,260 @@ +--- +layout: doc-page +title: Dottydoc +--- + +Dottydoc is a tool to generate a combined documentation and API reference for +your project. + +In previous versions of the Scaladoc tool, there is a big divide between what +is documentation and what is API reference. Dottydoc allows referencing, citing +and rendering parts of your API in your documentation, thus allowing the two to +blend naturally. + +To do this, Dottydoc is very similar to what [Jekyll](http://jekyllrb.com/) +provides in form of static site generation. As you probably guessed, this +whole site was created using Dottydoc. + +Creating a site is just as simple as in Jekyll. The site root contains the +layout of the site and all files placed here will be either considered static, +or processed for template expansion. + +The files that are considered for template expansion must end in `*.{html,md}` +and will from here on be referred to as "template files" or "templates". + +A simple "hello world" site could look something like this: + +``` +├── docs +│ └── getting-started.md +└── index.html +``` + +This will give you a site with the following endpoints: + +``` +_site/index.html +_site/docs/getting-started.html +``` + +Just as with Jekyll, the site is rendered in a `_site` directory. + +Using existing Templates and Layouts +==================================== +Dottydoc uses the [Liquid](https://shopify.github.io/liquid/) templating engine +and provides a number of custom filters and tags specific to Scala +documentation. + +In Dottydoc, all templates can contain YAML front-matter. The front-matter +is parsed and put into the `page` variable available in templates via Liquid. + +To perform template expansion, Dottydoc looks at `layout` in the front-matter. +Here's a simple example of the templating system in action, `index.html`: + +```html +--- +layout: main +--- + +

      Hello world!

      +``` + +With a simple main template like this: + +{% raw %} +```html + + + Hello, world! + + + {{ content }} + + +``` + +Would result in `{{ content }}` being replaced by `

      Hello world!

      ` from +the `index.html` file. +{% endraw %} + +Layouts must be placed in a `_layouts` directory in the site root: + +``` +├── _layouts +│ └── main.html +├── docs +│ └── getting-started.md +└── index.html +``` + +It is also possible to use one of the [default layouts](#default-layouts) that ship with Dottydoc. + +Blog +==== +Dottydoc also allows for a simple blogging platform in the same vein as Jekyll. +Blog posts are placed within the `./blog/_posts` directory and have to be on +the form `year-month-day-title.{md,html}`. + +An example of this would be: + +``` +├── blog +│ └── _posts +│ └── 2016-12-05-implicit-function-types.md +└── index.html +``` + +To be rendered as templates, each blog post should have front-matter and a +`layout` declaration. + +The posts are also available in the variable `site.posts` throughout the site. +The fields of these objects are the same as in +`[BlogPost](dotty.tools.dottydoc.staticsite.BlogPost)`. + +Includes +======== +In Liquid, there is a concept of include tags, these are used in templates to +include other de facto templates: + +```html +
      + {% raw %}{% include "sidebar.html" %}{% endraw %} +
      +``` + +You can leave out the file extension if your include ends in `.html`. + +Includes need to be kept in `_includes` in the site root. Dottydoc provides a +couple of [default includes](#default-includes), but the user-specified +includes may override these. + +An example structure with an include file "sidebar.html": + +``` +├── _includes +│ └── sidebar.html +├── blog +│ ├── _posts +│ │ └── 2016-12-05-implicit-function-types.md +│ └── index.md +└── index.html +``` + +Sidebar +======= +Dottydoc gives you the ability to create your own custom table of contents, +this can either be achieved by overriding the `toc.html` include - or by +providing a `sidebar.yml` file in the site root: + +```yaml +sidebar: + - title: Blog + url: blog/index.html + - title: Docs + url: docs/index.html + - title: Usage + subsection: + - title: Dottydoc + url: docs/usage/dottydoc.html + - title: sbt-projects + url: docs/usage/sbt-projects.html +``` + +The `sidebar` key is mandatory, as well as `title` for each element. The +default table of contents allows you to have subsections - albeit the current +depth limit is 2 - we'd love to see this change, contributions welcome! + +The items which have the `subsection` key, may not have a `url` key in the +current scheme. A site root example with this could be: + +``` +├── blog +│ └── _posts +│ └── 2016-12-05-implicit-function-types.md +├── index.html +└── sidebar.yml +``` + +Dottydoc Specific Tags and Behavior +==================================== +Linking to API +-------------- +If you for instance, want to link to `scala.collection.immutable.Seq` in a +markdown file, you can simply use the canonical path in your url: + +```markdown +[Seq](scala.collection.immutable.Seq) +``` + +Linking to members is done in the same fashion: + +```markdown +[Seq](scala.collection.immutable.Seq.isEmpty) +``` + +Dottydoc denotes objects by ending their names in "$". To select `Object.range` +you'd therefore write: + +```markdown +[Object.range](scala.collection.immutable.List$.range) +``` + +Rendering Docstrings +-------------------- +Sometimes you end up duplicating the docstring text in your documentation, +therefore Dottydoc makes it easy to render this inline: + +```html +{% raw %}{% docstring "scala.collection.immutable.Seq" %}{% endraw %} +``` + +Other extensions +---------------- +We would love to have your feedback on what you think would be good in order to +render the documentation you want! Perhaps you'd like to render method +definitions or members? Let us know by filing +[issues](https://github.com/lampepfl/dotty/issues/new)! + +Default Layouts +=============== +main.html +--------- +A wrapper for all other layouts, includes a default `` with included +JavaScripts and CSS style-sheets. + +### Variables ### +* `content`: placed in `` tag +* `extraCSS`: a list of relative paths to extra CSS style-sheets for the site +* `extraJS`: a list of relative paths to extra JavaScripts for the site +* `title`: the `` of the page + +sidebar.html +------------ +Sidebar uses `main.html` as its parent layout. It adds a sidebar generated from +a YAML file (if exists), as well as the index for the project API. + +### Variables ### +* `content`: placed in a `<div>` with class `content-body` +* `docs`: the API docs generated from supplied source files, this is included by + default and does not need to be specified. + +doc-page.html +------------- +Doc page is used for pages that need a sidebar and provides a small wrapper for +the included {% raw %}`{{ content}}`{% endraw %}. + +api-page.html +------------- +The last two layouts are special, in that they are treated specially by +Dottydoc. The input to the API page is a documented +`[Entity](dotty.tools.dottydoc.model.Entity)`. As such, this page can be changed +to alter the way Dottydoc renders API documentation. + +blog-page.html +-------------- +A blog page uses files placed in `./blog/_posts/` as input to render a blog. + +Default Includes +================ +* `scala-logo.svg`: the scala in Dotty version as svg +* `toc.html`: the default table of contents template diff --git a/scala3doc/dotty-docs/docs/docs/usage/getting-started.md b/scala3doc/dotty-docs/docs/docs/usage/getting-started.md new file mode 100644 index 000000000000..cf9faf835d29 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/usage/getting-started.md @@ -0,0 +1,76 @@ +--- +layout: doc-page +title: Getting Started: Users +--- + +## Trying out Dotty + +### In your web browser +[Scastie](https://scastie.scala-lang.org/?target=dotty), the online Scala playground, supports Dotty. +This is an easy way to try Dotty without installing anything, directly in your browser. + +### sbt +The fastest way to create a new project compiled by Dotty is using [sbt (1.1.4+)](http://www.scala-sbt.org/) + +Create a simple Dotty project: +```bash +$ sbt new lampepfl/dotty.g8 +``` + +Or a Dotty project that cross compiles with Scala 2: +```bash +$ sbt new lampepfl/dotty-cross.g8 +``` + +You can then start a Dotty REPL directly from your sbt project: +```bash +$ sbt +> console +scala> +``` + +For more information, see the [Dotty Example Project](https://github.com/lampepfl/dotty-example-project) + +### IDE support +Start using the Dotty IDE in any Dotty project by following the +[IDE guide](./ide-support.md). + +### Standalone installation +Releases are available for download on the [Releases Section](https://github.com/lampepfl/dotty/releases) +of the Dotty repository. Releases include three executables: `dotc` the Dotty compiler, +`dotd` the [Dotty Documentation tool](./dottydoc.md) and `dotr` the Dotty REPL. + +``` +. +└── bin +    ├── dotc +    ├── dotd +    └── dotr +``` + +Add these executables to your `PATH` and you will be able to run the corresponding commands directly +from your console: +```bash +# Compile code using Dotty +$ dotc HelloWorld.scala + +# Run it with the proper classpath +$ dotr HelloWorld + +# Start a Dotty REPL +$ dotr +Starting dotty REPL... +scala> +``` + +If you're a Mac user, we also provide a [homebrew](https://brew.sh/) package that can be installed by running: + +```bash +brew install lampepfl/brew/dotty +``` + +In case you have already installed Dotty via brew, you should instead update it: + +```bash +brew upgrade dotty +``` diff --git a/scala3doc/dotty-docs/docs/docs/usage/ide-support.md b/scala3doc/dotty-docs/docs/docs/usage/ide-support.md new file mode 100644 index 000000000000..1ed36ab6ff7d --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/usage/ide-support.md @@ -0,0 +1,63 @@ +--- +layout: doc-page +title: "IDE support for Dotty" +--- + +Dotty comes built-in with the Dotty Language Server, an implementation of the +[Language Server Protocol](https://github.com/Microsoft/language-server-protocol), +which means that any editor that implements the LSP can be used as a Dotty IDE. +Currently, the only IDE we officially support is +[Visual Studio Code](https://code.visualstudio.com/). + +Prerequisites +============ +To use this in your own Scala project, you must first get it to compile with +Dotty, please follow the instructions at https://github.com/lampepfl/dotty-example-project + +Usage +===== +1. Install [Visual Studio Code](https://code.visualstudio.com/). +2. Make sure `code`, the binary for Visual Studio Code, is on your `$PATH`, this + is the case if you can start the IDE by running `code` in a terminal. This + is the default on all systems except Mac where you'll need to follow these + instructions: https://code.visualstudio.com/docs/setup/mac#_command-line +3. In your project, run: +```shell +sbt launchIDE +``` + +Status +====== + +## Fully supported features: +- Typechecking as you type to show compiler errors/warnings +- Type information on hover +- Go to definition (in the current project) +- Find all references +- Documentation on hover +- [Worksheet mode](worksheet-mode.md) + +## Partially working features: +- Completion +- Renaming +- Go to definition in external projects + +## Unimplemented features: +- Formatting code (requires integrating with scalafmt) +- Quick fixes (probably by integrating with scalafix) + +## Current limitations, to be fixed: +- Projects should be compiled with sbt before starting the IDE, this is + automatically done for you if you run `sbt launchIDE`. +- Once the IDE is started, source files that are not opened in the IDE + should not be modified in some other editor, the IDE won't pick up + these changes. +- Not all compiler errors/warnings are displayed, just those occurring + during typechecking. + + +Feedback +======== +Please report issues on https://github.com/lampepfl/dotty/issues, +you can also come chat with use on the +[Dotty gitter channel](https://gitter.im/lampepfl/dotty)! diff --git a/scala3doc/dotty-docs/docs/docs/usage/language-versions.md b/scala3doc/dotty-docs/docs/docs/usage/language-versions.md new file mode 100644 index 000000000000..e5395b136e18 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/usage/language-versions.md @@ -0,0 +1,31 @@ +--- +layout: doc-page +title: "Language Versions" +--- + +The default Scala language version currently supported by the Dotty compiler is `3.0`. There are also other language versions that can be specified instead: + + - `3.1`: A preview of changes introduced in the next version after 3.0. Some Scala-2 specific idioms will be dropped in this version. The feature set supported by this version will be refined over time as we approach its release. + + - `3.0-migration`: Same as `3.0` but with a Scala 2 compatibility mode that helps moving Scala 2.13 sources over to Scala 3. In particular, it + + - flags some Scala 2 constructs that are disallowed in Scala 3 as migration warnings instead of hard errors, + - changes some rules to be more lenient and backwards compatible with Scala 2.13 + - gives some additional warnings where the semantics has changed between Scala 2.13 and 3.0 + - in conjunction with `-rewrite`, offer code rewrites from Scala 2.13 to 3.0. + + - `3.1-migration`: Same as `3.1` but with additional helpers to migrate from `3.0`. Similarly to the helpers available under `3.0-migration`, these include migration warnings and optional rewrites. + +There are two ways to specify a language version. + + - With a `-source` command line setting, e.g. `-source 3.0-migration`. + - With a `scala.language` import at the top of a compilation unit, e.g: + +```scala +package p +import scala.language.`3.1` + +class C { ... } +``` + +Language imports supersede command-line settings in the compilation units where they are specified. Only one language import is allowed in a compilation unit, and it must come before any definitions in that unit. diff --git a/scala3doc/dotty-docs/docs/docs/usage/sbt-projects.md b/scala3doc/dotty-docs/docs/docs/usage/sbt-projects.md new file mode 100644 index 000000000000..4b0b72f78f77 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/usage/sbt-projects.md @@ -0,0 +1,6 @@ +--- +layout: doc-page +title: "Using Dotty with sbt" +--- + +To try it in your project see the [Getting Started User Guide](./getting-started.md). diff --git a/scala3doc/dotty-docs/docs/docs/usage/version-numbers.md b/scala3doc/dotty-docs/docs/docs/usage/version-numbers.md new file mode 100644 index 000000000000..660cba4509c7 --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/usage/version-numbers.md @@ -0,0 +1,19 @@ +--- +layout: doc-page +title: "Version numbers" +--- + +**This documentation is outdated! Please find the newer version [here](../contributing/procedures/release.md)**. + +Dotty uses multiple schemes for version numbering. + +Stable releases have version numbers of the form `0.${x}.${y}`, where `x` is a main version and `y` is a bug-fix update id. + +Release candidates version numbers have the form `0.${x}.${y}-RC${z}`. +Every 6 weeks, the latest release candidate is promoted to stable and becomes version `0.${x}.${y}`. +The release candidates let library authors test their code in advance of each +release. Multiple release candidates may be released during each 6 weeks +period to fix regressions and are differentiated by `z`. + +Nightlies have version numbers of the form `0.${x}.${y}-bin-${date}-${sha}-NIGHTLY`. +Every 6 weeks, the latest nightly is promoted to release candidate becomes version `0.${x}.${y}-RC1`. diff --git a/scala3doc/dotty-docs/docs/docs/usage/worksheet-mode-implementation-details.md b/scala3doc/dotty-docs/docs/docs/usage/worksheet-mode-implementation-details.md new file mode 100644 index 000000000000..d06939d810ae --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/usage/worksheet-mode-implementation-details.md @@ -0,0 +1,79 @@ +--- +layout: doc-page +title: "Worksheet Mode - Implementation details" +--- + +In brief, the worksheets extend the Language Server Protocol and rely on the +Dotty REPL to evaluate code. + +## Evaluation +Each of the individual expressions and statements of the worksheet is extracted +and passed to the Dotty REPL. After the REPL has finished evaluating one unit of +input, it emits a special delimiter that indicates the end of the output for +this input. (See `dotty.tools.languageserver.worksheet.InputStreamConsumer`) + +This process continues until all input has been evaluated. + +The Dotty REPL is run in a separate JVM. The `Evaluator` (see +`dotty.tools.languageserver.worksheet.Evaluator`) will re-use a JVM if the +configuration of the project hasn't changed. + +## Communication with the client +The worksheets extend the Language Server Protocol and add one request and one +notification. + +### Run worksheet request +The worksheet run request is sent from the client to the server to request that +the server runs a given worksheet and streams the result. + +*Request:* + + - method: `worksheet/run` + - params: `WorksheetRunParams` defined as follows: + ```typescript + interface WorksheetRunParams { + /** + * The worksheet to evaluate. + */ + textDocument: VersionedTextDocumentIdentifier; + } + ``` + +*Response:* + + - result: `WorksheetRunResult` defined as follows: + ```typescript + interface WorksheetRunResult { + /** + * Indicates whether evaluation was successful. + */ + success: boolean; + } + ``` + +### Worksheet output notification +The worksheet output notification is sent from the server to the client to +indicate that worksheet execution has produced some output. + +*Notification:* + + - method: `worksheet/publishOutput` + - params: `WorksheetRunOutput` defined as follows: + ```typescript + interface WorksheetRunOutput { + /** + * The worksheet that produced this output. + */ + textDocument: VersionedTextDocumentIdentifier; + + /** + * The range of the expression that produced this output. + */ + range: Range; + + /** + * The output that has been produced. + */ + content: string; + } + ``` diff --git a/scala3doc/dotty-docs/docs/docs/usage/worksheet-mode.md b/scala3doc/dotty-docs/docs/docs/usage/worksheet-mode.md new file mode 100644 index 000000000000..c33bbfc7c02c --- /dev/null +++ b/scala3doc/dotty-docs/docs/docs/usage/worksheet-mode.md @@ -0,0 +1,50 @@ +--- +layout: doc-page +title: "Worksheet mode with Dotty IDE" +--- + +A worksheet is a Scala file that is evaluated on save, and the result of each +expression is shown in a column to the right of your program. Worksheets are +like a REPL session on steroids, and enjoy 1st class editor support: completion, +hyperlinking, interactive errors-as-you-type, etc. Worksheet use the extension +`.sc`. + +How to use the worksheets +========================= +The only supported client for the Worksheet mode is [Visual Studio +Code](https://code.visualstudio.com/). + +To use the worksheets, start Dotty IDE by [following the +instruction](ide-support.md) and create a new file `MyWorksheet.sc` and +write some code: + +```scala +val xyz = 123 +println("Hello, worksheets!") +456 + xyz +``` + +On top of the buffer, the message `Run this worksheet` appears. Click it to +evaluate the code of the worksheet. Each line of output is printed on the right +of the expression that produced it. The worksheets run with the classes of your +project and its dependencies on their classpath. + +![../../images/worksheets/worksheet-run.png ]("Run worksheet") + +By default, the worksheets are also run when the file is saved. This can be +configured in VSCode preferences: + +![../../images/worksheets/config-autorun.png]("Configure run on save") + +Note that the worksheet are fully integrated with the rest of Dotty IDE: While +typing, errors are shown, completions are suggested, and you can use all the +other features of Dotty IDE such as go to definition, find all references, etc. + +![../../images/worksheets/worksheet-help.png]("IDE features in the worksheet") + +Implementation details +====================== + +The implementation details of the worksheet mode and the information necessary to add support for +other clients are available in [Worksheet mode - Implementation +details](worksheet-mode-implementation-details.md). diff --git a/scala3doc/dotty-docs/docs/images/aggelos.jpg b/scala3doc/dotty-docs/docs/images/aggelos.jpg new file mode 100644 index 000000000000..050a44edb67f Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/aggelos.jpg differ diff --git a/scala3doc/dotty-docs/docs/images/allan.jpg b/scala3doc/dotty-docs/docs/images/allan.jpg new file mode 100644 index 000000000000..74a229dea3ee Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/allan.jpg differ diff --git a/scala3doc/dotty-docs/docs/images/anatolii.png b/scala3doc/dotty-docs/docs/images/anatolii.png new file mode 100644 index 000000000000..94d7fd28a800 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/anatolii.png differ diff --git a/scala3doc/dotty-docs/docs/images/dotty-ide/decompiler.png b/scala3doc/dotty-docs/docs/images/dotty-ide/decompiler.png new file mode 100644 index 000000000000..f0822f70b4c4 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/dotty-ide/decompiler.png differ diff --git a/scala3doc/dotty-docs/docs/images/dotty-ide/documentation-hover.png b/scala3doc/dotty-docs/docs/images/dotty-ide/documentation-hover.png new file mode 100644 index 000000000000..5ae883b2bafd Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/dotty-ide/documentation-hover.png differ diff --git a/scala3doc/dotty-docs/docs/images/dotty-ide/signature-help.png b/scala3doc/dotty-docs/docs/images/dotty-ide/signature-help.png new file mode 100644 index 000000000000..5ac0b613006c Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/dotty-ide/signature-help.png differ diff --git a/scala3doc/dotty-docs/docs/images/dotty-logo-white.svg b/scala3doc/dotty-docs/docs/images/dotty-logo-white.svg new file mode 100644 index 000000000000..8fd33e25e3aa --- /dev/null +++ b/scala3doc/dotty-docs/docs/images/dotty-logo-white.svg @@ -0,0 +1,30 @@ +<svg width="64px" height="109px" viewBox="0 0 64 109" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> + <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd"> + <g id="logo-background" transform="translate(0.000000, 16.000000)" fill="#CCC"> + <path d="M0.5,59.5 C0.5,59.5 63.4,65.8 63.4,76.3 L63.4,51.1 C63.4,51.1 63.4,40.6 0.5,34.3 L0.5,59.5 L0.5,59.5 Z" id="logo-background-bottom"></path> + <path d="M0.5,25.9 C0.5,25.9 63.4,32.2 63.4,42.7 L63.4,17.5 C63.4,17.5 63.4,7 0.5,0.7 L0.5,10.5 L0.5,25.9 L0.5,25.9 Z" id="logo-background-top"></path> + </g> + <g id="logo-foreground" fill="#FFF"> + <path d="M0.5,109 L0.5,83.8 C0.5,83.8 63.4,77.5 63.4,67 L63.4,92.2 C63.5,92.3 63.5,102.7 0.5,109" id="Logo_Foreground_Bottom"></path> + <path d="M0.5,50.3 C0.5,50.3 63.4,44 63.4,33.5 L63.4,58.7 C63.4,58.7 63.4,69.2 0.5,75.5 L0.5,50.3 L0.5,50.3 Z" id="Logo_Foreground_Middle"></path> + <path d="M63.5,0 L63.5,25.2 C63.5,25.2 63.5,35.7 0.6,42 L0.6,16.7 C0.5,16.7 63.5,10.5 63.5,0" id="logo-foreground-top"></path> + </g> + <g id="dots" transform="translate(4.000000, 10.000000)" fill="#B6B6B6"> + <g id="bottom" transform="translate(0.000000, 67.500000)"> + <path d="M1.4471104,12.2885536 C1.45019313,12.2881864 1.45019313,12.2881864 1.45327704,12.2878189 C1.91768952,12.2324787 2.24930811,11.8111359 2.1939679,11.3467234 C2.13862768,10.8823109 1.71728488,10.5506923 1.2528724,10.6060325 C1.24980572,10.606398 1.24980572,10.606398 1.24674021,10.6067632 C0.782326598,10.6620939 0.450699376,11.0834299 0.506030077,11.5478435 C0.561360778,12.0122571 0.982696791,12.3438844 1.4471104,12.2885536 L1.4471104,12.2885536 Z M9.34858965,12.1304907 C9.35409106,12.129756 9.35409106,12.129756 9.35959333,12.129021 C10.1807278,12.019341 10.7574756,11.2647668 10.6477955,10.4436323 C10.5381155,9.62249789 9.78354128,9.04575011 8.96240683,9.15543014 C8.95693539,9.15616095 8.95693539,9.15616095 8.95146479,9.15689157 C8.13032806,9.26655457 7.55356464,10.0211168 7.66322765,10.8422536 C7.77289065,11.6633903 8.52745292,12.2401537 9.34858965,12.1304907 L9.34858965,12.1304907 Z M17.0066591,11.0493128 C17.0121188,11.0484984 17.0121188,11.0484984 17.0175789,11.0476838 C17.8369369,10.9254384 18.4020584,10.1621181 18.279813,9.34276003 C18.1575677,8.52340201 17.3942473,7.95828052 16.5748893,8.08052587 C16.5694641,8.08133528 16.5694641,8.08133528 16.5640392,8.08214454 C15.7446783,8.20437064 15.1795389,8.96767771 15.301765,9.78703861 C15.4239911,10.6063995 16.1872982,11.1715389 17.0066591,11.0493128 L17.0066591,11.0493128 Z M24.8225082,10.4232459 C24.8297684,10.4220275 24.8297684,10.4220275 24.8370287,10.420809 C25.922252,10.2386672 26.6543441,9.21126476 26.4722023,8.12604147 C26.2900605,7.04081818 25.2626581,6.30872601 24.1774348,6.49086783 C24.1702314,6.4920768 24.1702314,6.4920768 24.1630282,6.49328559 C23.0777996,6.67539597 22.3456777,7.70277717 22.5277881,8.78800574 C22.7098984,9.87323431 23.7372796,10.6053563 24.8225082,10.4232459 L24.8225082,10.4232459 Z M32.4725265,9.05261002 C33.5532108,8.84532993 34.2612448,7.80122849 34.0539647,6.72054419 C33.8466846,5.63985989 32.8025831,4.93182589 31.7218988,5.13910599 C31.714724,5.14048211 31.714724,5.14048211 31.7075489,5.14185807 C30.6268567,5.34909665 29.9187826,6.39317088 30.1260211,7.47386314 C30.3332597,8.5545554 31.3773339,9.26262952 32.4580262,9.05539095 C32.4652764,9.05400057 32.4652764,9.05400057 32.4725265,9.05261002 Z M39.8723866,6.89476879 C39.877909,6.8935242 39.877909,6.8935242 39.883431,6.89227947 C40.6915794,6.71010471 41.1990314,5.90728916 41.0168566,5.09914075 C40.8346818,4.29099233 40.0318663,3.78354042 39.2237179,3.96571517 C39.218279,3.96694116 39.218279,3.96694116 39.2128397,3.96816703 C38.404681,4.15029588 37.8971834,4.9530826 38.0793123,5.76124136 C38.2614411,6.56940012 39.0642279,7.07689764 39.8723866,6.89476879 L39.8723866,6.89476879 Z M47.4276119,5.00828445 C47.4329844,5.00678549 47.4329844,5.00678549 47.4383562,5.00528637 C48.2362896,4.78258973 48.7026111,3.95520561 48.4799145,3.15727221 C48.2572179,2.35933881 47.4298337,1.89301728 46.6319003,2.11571391 C46.6266645,2.1171751 46.6266645,2.1171751 46.6214279,2.11863616 C45.8234736,2.34125773 45.3570742,3.16859798 45.5796958,3.96655233 C45.8023173,4.76450667 46.6296576,5.23090603 47.4276119,5.00828445 L47.4276119,5.00828445 Z M54.419759,2.30643871 C54.8556933,2.13695884 55.0716973,1.6461737 54.9022174,1.21023941 C54.7327376,0.774305114 54.2419524,0.558301127 53.8060181,0.727780997 C53.8032127,0.728871549 53.8032127,0.728871549 53.8004064,0.729962021 C53.3644359,0.89934874 53.148327,1.39008772 53.3177137,1.82605822 C53.4871005,2.26202871 53.9778394,2.47813756 54.4138099,2.30875084 C54.4167849,2.30759485 54.4167849,2.30759485 54.419759,2.30643871 Z" id="Shape"></path> + <path d="M1.67760355,20.096503 C1.68306395,20.0958524 1.68306395,20.0958524 1.68852642,20.0952015 C2.51113381,19.9971782 3.09852524,19.2508595 3.00050189,18.4282521 C2.90247854,17.6056447 2.15615986,17.0182533 1.33355246,17.1162767 C1.3281205,17.1169239 1.3281205,17.1169239 1.3226906,17.1175709 C0.500081196,17.2155774 -0.0873255124,17.961884 0.0106809923,18.7844934 C0.108687497,19.6071028 0.854994145,20.1945095 1.67760355,20.096503 L1.67760355,20.096503 Z M9.34858965,19.1274206 C9.35409106,19.1266859 9.35409106,19.1266859 9.35959333,19.1259509 C10.1807278,19.0162709 10.7574756,18.2616967 10.6477955,17.4405622 C10.5381155,16.6194278 9.78354128,16.04268 8.96240683,16.15236 C8.95693539,16.1530908 8.95693539,16.1530908 8.95146479,16.1538215 C8.13032806,16.2634845 7.55356464,17.0180467 7.66322765,17.8391835 C7.77289065,18.6603202 8.52745292,19.2370836 9.34858965,19.1274206 L9.34858965,19.1274206 Z M17.1767435,18.6256231 C17.1839958,18.6245412 17.1839958,18.6245412 17.1912486,18.6234592 C18.27963,18.4610765 19.0303002,17.4471319 18.8679175,16.3587506 C18.7055348,15.2703693 17.6915903,14.519699 16.6032089,14.6820817 C16.5960024,14.6831569 16.5960024,14.6831569 16.5887964,14.6842319 C15.5004112,14.846589 14.7497172,15.8605159 14.9120743,16.948901 C15.0744314,18.0372862 16.0883584,18.7879802 17.1767435,18.6256231 L17.1767435,18.6256231 Z M24.8370287,17.4177379 C25.922252,17.2355961 26.6543441,16.2081937 26.4722023,15.1229704 C26.2900605,14.0377471 25.2626581,13.3056549 24.1774348,13.4877968 C24.1702314,13.4890057 24.1702314,13.4890057 24.1630282,13.4902145 C23.0777996,13.6723249 22.3456777,14.6997061 22.5277881,15.7849347 C22.7098984,16.8701632 23.7372796,17.6022852 24.8225082,17.4201748 C24.8297684,17.4189565 24.8297684,17.4189565 24.8370287,17.4177379 Z M32.4725265,16.061085 C33.5532108,15.853199 34.2612448,14.8060455 34.0539647,13.7222022 C33.8466846,12.6383589 32.8025831,11.9282552 31.7218988,12.1361412 C31.714724,12.1375214 31.714724,12.1375214 31.7075489,12.1389013 C30.6268567,12.3467457 29.9187826,13.3938719 30.1260211,14.4777232 C30.3332597,15.5615745 31.3773339,16.2717185 32.4580262,16.0638741 C32.4652764,16.0624797 32.4652764,16.0624797 32.4725265,16.061085 Z M40.0707225,14.4695476 C40.0780573,14.4678946 40.0780573,14.4678946 40.0853916,14.4662413 C41.158768,14.2242783 41.8327617,13.1579849 41.5907986,12.0846085 C41.3488355,11.011232 40.2825422,10.3372384 39.2091657,10.5792015 C39.2019419,10.5808298 39.2019419,10.5808298 39.1947175,10.582458 C38.1213273,10.8243601 37.447273,11.8906152 37.6891752,12.9640053 C37.9310773,14.0373955 38.9973324,14.7114498 40.0707225,14.4695476 L40.0707225,14.4695476 Z M47.4276119,12.0082845 C47.4329844,12.0067855 47.4329844,12.0067855 47.4383562,12.0052864 C48.2362896,11.7825897 48.7026111,10.9552056 48.4799145,10.1572722 C48.2572179,9.35933881 47.4298337,8.89301728 46.6319003,9.11571391 C46.6266645,9.1171751 46.6266645,9.1171751 46.6214279,9.11863616 C45.8234736,9.34125773 45.3570742,10.168598 45.5796958,10.9665523 C45.8023173,11.7645067 46.6296576,12.230906 47.4276119,12.0082845 L47.4276119,12.0082845 Z M54.8999721,9.57562965 C54.9052414,9.57358217 54.9052414,9.57358217 54.9105092,9.57153441 C55.6826371,9.27135123 56.0652239,8.40207131 55.7650408,7.62994336 C55.4648576,6.85781542 54.5955777,6.4752286 53.8234497,6.77541179 C53.8184808,6.77734338 53.8184808,6.77734338 53.8135101,6.77927482 C53.0413181,7.07929302 52.6585455,7.94849117 52.9585637,8.72068323 C53.2585819,9.4928753 54.12778,9.87564785 54.8999721,9.57562965 L54.8999721,9.57562965 Z" id="Shape"></path> + <path d="M1.45327704,26.6978168 C1.54647464,26.6867112 1.63432439,26.660866 1.7147722,26.6228911 C2.03520341,26.4716332 2.23820252,26.1279362 2.1939679,25.7567213 C2.13862768,25.2923089 1.71728488,24.9606903 1.2528724,25.0160305 C1.24980572,25.0163959 1.24980572,25.0163959 1.24674021,25.0167611 C0.782326598,25.0720918 0.450699376,25.4934278 0.506030077,25.9578415 C0.561360778,26.4222551 0.982696791,26.7538823 1.4471104,26.6985516 C1.45019313,26.6981843 1.45019313,26.6981843 1.45327704,26.6978168 Z M9.34858965,26.1274206 C9.35409106,26.1266859 9.35409106,26.1266859 9.35959333,26.1259509 C10.1807278,26.0162709 10.7574756,25.2616967 10.6477955,24.4405622 C10.5381155,23.6194278 9.78354128,23.04268 8.96240683,23.15236 C8.95693539,23.1530908 8.95693539,23.1530908 8.95146479,23.1538215 C8.13032806,23.2634845 7.55356464,24.0180467 7.66322765,24.8391835 C7.77289065,25.6603202 8.52745292,26.2370836 9.34858965,26.1274206 L9.34858965,26.1274206 Z M17.0066591,25.0462427 C17.0121188,25.0454283 17.0121188,25.0454283 17.0175789,25.0446136 C17.8369369,24.9223683 18.4020584,24.1590479 18.279813,23.3396899 C18.1575677,22.5203319 17.3942473,21.9552104 16.5748893,22.0774558 C16.5694641,22.0782652 16.5694641,22.0782652 16.5640392,22.0790744 C15.7446783,22.2013005 15.1795389,22.9646076 15.301765,23.7839685 C15.4239911,24.6033294 16.1872982,25.1684688 17.0066591,25.0462427 L17.0066591,25.0462427 Z M24.8225082,24.4201748 C24.8297684,24.4189565 24.8297684,24.4189565 24.8370287,24.4177379 C25.922252,24.2355961 26.6543441,23.2081937 26.4722023,22.1229704 C26.2900605,21.0377471 25.2626581,20.3056549 24.1774348,20.4877968 C24.1702314,20.4890057 24.1702314,20.4890057 24.1630282,20.4902145 C23.0777996,20.6723249 22.3456777,21.6997061 22.5277881,22.7849347 C22.7098984,23.8701632 23.7372796,24.6022852 24.8225082,24.4201748 L24.8225082,24.4201748 Z M32.4725265,23.0495399 C33.5532108,22.8422598 34.2612448,21.7981584 34.0539647,20.7174741 C33.8466846,19.6367898 32.8025831,18.9287558 31.7218988,19.1360359 C31.714724,19.137412 31.714724,19.137412 31.7075489,19.138788 C30.6268567,19.3460265 29.9187826,20.3901008 30.1260211,21.470793 C30.3332597,22.5514853 31.3773339,23.2595594 32.4580262,23.0523208 C32.4652764,23.0509305 32.4652764,23.0509305 32.4725265,23.0495399 Z M39.8723866,20.8947688 C39.877909,20.8935242 39.877909,20.8935242 39.883431,20.8922795 C40.6915794,20.7101047 41.1990314,19.9072892 41.0168566,19.0991407 C40.8346818,18.2909923 40.0318663,17.7835404 39.2237179,17.9657152 C39.218279,17.9669412 39.218279,17.9669412 39.2128397,17.968167 C38.404681,18.1502959 37.8971834,18.9530826 38.0793123,19.7612414 C38.2614411,20.5694001 39.0642279,21.0768976 39.8723866,20.8947688 L39.8723866,20.8947688 Z M47.4276119,19.0082845 C47.4329844,19.0067855 47.4329844,19.0067855 47.4383562,19.0052864 C48.2362896,18.7825897 48.7026111,17.9552056 48.4799145,17.1572722 C48.2572179,16.3593388 47.4298337,15.8930173 46.6319003,16.1157139 C46.6266645,16.1171751 46.6266645,16.1171751 46.6214279,16.1186362 C45.8234736,16.3412577 45.3570742,17.168598 45.5796958,17.9665523 C45.8023173,18.7645067 46.6296576,19.230906 47.4276119,19.0082845 L47.4276119,19.0082845 Z M54.4138099,15.7087505 C54.4167849,15.7075945 54.4167849,15.7075945 54.419759,15.7064383 C54.8556933,15.5369585 55.0716973,15.0461733 54.9022174,14.610239 C54.7327376,14.1743047 54.2419524,13.9583007 53.8060181,14.1277806 C53.8032127,14.1288712 53.8032127,14.1288712 53.8004064,14.1299616 C53.3644359,14.2993484 53.148327,14.7900873 53.3177137,15.2260578 C53.4871005,15.6620283 53.9778394,15.8781372 54.4138099,15.7087505 L54.4138099,15.7087505 Z" id="Shape"></path> + </g> + <g id="middle" transform="translate(0.000000, 33.900002)"> + <path d="M1.4471104,12.2885536 C1.45019313,12.2881864 1.45019313,12.2881864 1.45327704,12.2878189 C1.91768952,12.2324787 2.24930811,11.8111359 2.1939679,11.3467234 C2.13862768,10.8823109 1.71728488,10.5506923 1.2528724,10.6060325 C1.24980572,10.606398 1.24980572,10.606398 1.24674021,10.6067632 C0.782326598,10.6620939 0.450699376,11.0834299 0.506030077,11.5478435 C0.561360778,12.0122571 0.982696791,12.3438844 1.4471104,12.2885536 L1.4471104,12.2885536 Z M9.34858965,12.1304907 C9.35409106,12.129756 9.35409106,12.129756 9.35959333,12.129021 C10.1807278,12.019341 10.7574756,11.2647668 10.6477955,10.4436323 C10.5381155,9.62249789 9.78354128,9.04575011 8.96240683,9.15543014 C8.95693539,9.15616095 8.95693539,9.15616095 8.95146479,9.15689157 C8.13032806,9.26655457 7.55356464,10.0211168 7.66322765,10.8422536 C7.77289065,11.6633903 8.52745292,12.2401537 9.34858965,12.1304907 L9.34858965,12.1304907 Z M17.0066591,11.0493128 C17.0121188,11.0484984 17.0121188,11.0484984 17.0175789,11.0476838 C17.8369369,10.9254384 18.4020584,10.1621181 18.279813,9.34276003 C18.1575677,8.52340201 17.3942473,7.95828052 16.5748893,8.08052587 C16.5694641,8.08133528 16.5694641,8.08133528 16.5640392,8.08214454 C15.7446783,8.20437064 15.1795389,8.96767771 15.301765,9.78703861 C15.4239911,10.6063995 16.1872982,11.1715389 17.0066591,11.0493128 L17.0066591,11.0493128 Z M24.8225082,10.4232459 C24.8297684,10.4220275 24.8297684,10.4220275 24.8370287,10.420809 C25.922252,10.2386672 26.6543441,9.21126476 26.4722023,8.12604147 C26.2900605,7.04081818 25.2626581,6.30872601 24.1774348,6.49086783 C24.1702314,6.4920768 24.1702314,6.4920768 24.1630282,6.49328559 C23.0777996,6.67539597 22.3456777,7.70277717 22.5277881,8.78800574 C22.7098984,9.87323431 23.7372796,10.6053563 24.8225082,10.4232459 L24.8225082,10.4232459 Z M32.4725265,9.05261002 C33.5532108,8.84532993 34.2612448,7.80122849 34.0539647,6.72054419 C33.8466846,5.63985989 32.8025831,4.93182589 31.7218988,5.13910599 C31.714724,5.14048211 31.714724,5.14048211 31.7075489,5.14185807 C30.6268567,5.34909665 29.9187826,6.39317088 30.1260211,7.47386314 C30.3332597,8.5545554 31.3773339,9.26262952 32.4580262,9.05539095 C32.4652764,9.05400057 32.4652764,9.05400057 32.4725265,9.05261002 Z M39.8723866,6.89476879 C39.877909,6.8935242 39.877909,6.8935242 39.883431,6.89227947 C40.6915794,6.71010471 41.1990314,5.90728916 41.0168566,5.09914075 C40.8346818,4.29099233 40.0318663,3.78354042 39.2237179,3.96571517 C39.218279,3.96694116 39.218279,3.96694116 39.2128397,3.96816703 C38.404681,4.15029588 37.8971834,4.9530826 38.0793123,5.76124136 C38.2614411,6.56940012 39.0642279,7.07689764 39.8723866,6.89476879 L39.8723866,6.89476879 Z M47.4276119,5.00828445 C47.4329844,5.00678549 47.4329844,5.00678549 47.4383562,5.00528637 C48.2362896,4.78258973 48.7026111,3.95520561 48.4799145,3.15727221 C48.2572179,2.35933881 47.4298337,1.89301728 46.6319003,2.11571391 C46.6266645,2.1171751 46.6266645,2.1171751 46.6214279,2.11863616 C45.8234736,2.34125773 45.3570742,3.16859798 45.5796958,3.96655233 C45.8023173,4.76450667 46.6296576,5.23090603 47.4276119,5.00828445 L47.4276119,5.00828445 Z M54.419759,2.30643871 C54.8556933,2.13695884 55.0716973,1.6461737 54.9022174,1.21023941 C54.7327376,0.774305114 54.2419524,0.558301127 53.8060181,0.727780997 C53.8032127,0.728871549 53.8032127,0.728871549 53.8004064,0.729962021 C53.3644359,0.89934874 53.148327,1.39008772 53.3177137,1.82605822 C53.4871005,2.26202871 53.9778394,2.47813756 54.4138099,2.30875084 C54.4167849,2.30759485 54.4167849,2.30759485 54.419759,2.30643871 Z" id="Shape"></path> + <path d="M1.67760355,20.096503 C1.68306395,20.0958524 1.68306395,20.0958524 1.68852642,20.0952015 C2.51113381,19.9971782 3.09852524,19.2508595 3.00050189,18.4282521 C2.90247854,17.6056447 2.15615986,17.0182533 1.33355246,17.1162767 C1.3281205,17.1169239 1.3281205,17.1169239 1.3226906,17.1175709 C0.500081196,17.2155774 -0.0873255124,17.961884 0.0106809923,18.7844934 C0.108687497,19.6071028 0.854994145,20.1945095 1.67760355,20.096503 L1.67760355,20.096503 Z M9.34858965,19.1274206 C9.35409106,19.1266859 9.35409106,19.1266859 9.35959333,19.1259509 C10.1807278,19.0162709 10.7574756,18.2616967 10.6477955,17.4405622 C10.5381155,16.6194278 9.78354128,16.04268 8.96240683,16.15236 C8.95693539,16.1530908 8.95693539,16.1530908 8.95146479,16.1538215 C8.13032806,16.2634845 7.55356464,17.0180467 7.66322765,17.8391835 C7.77289065,18.6603202 8.52745292,19.2370836 9.34858965,19.1274206 L9.34858965,19.1274206 Z M17.1767435,18.6256231 C17.1839958,18.6245412 17.1839958,18.6245412 17.1912486,18.6234592 C18.27963,18.4610765 19.0303002,17.4471319 18.8679175,16.3587506 C18.7055348,15.2703693 17.6915903,14.519699 16.6032089,14.6820817 C16.5960024,14.6831569 16.5960024,14.6831569 16.5887964,14.6842319 C15.5004112,14.846589 14.7497172,15.8605159 14.9120743,16.948901 C15.0744314,18.0372862 16.0883584,18.7879802 17.1767435,18.6256231 L17.1767435,18.6256231 Z M24.8370287,17.4177379 C25.922252,17.2355961 26.6543441,16.2081937 26.4722023,15.1229704 C26.2900605,14.0377471 25.2626581,13.3056549 24.1774348,13.4877968 C24.1702314,13.4890057 24.1702314,13.4890057 24.1630282,13.4902145 C23.0777996,13.6723249 22.3456777,14.6997061 22.5277881,15.7849347 C22.7098984,16.8701632 23.7372796,17.6022852 24.8225082,17.4201748 C24.8297684,17.4189565 24.8297684,17.4189565 24.8370287,17.4177379 Z M32.4725265,16.061085 C33.5532108,15.853199 34.2612448,14.8060455 34.0539647,13.7222022 C33.8466846,12.6383589 32.8025831,11.9282552 31.7218988,12.1361412 C31.714724,12.1375214 31.714724,12.1375214 31.7075489,12.1389013 C30.6268567,12.3467457 29.9187826,13.3938719 30.1260211,14.4777232 C30.3332597,15.5615745 31.3773339,16.2717185 32.4580262,16.0638741 C32.4652764,16.0624797 32.4652764,16.0624797 32.4725265,16.061085 Z M40.0707225,14.4695476 C40.0780573,14.4678946 40.0780573,14.4678946 40.0853916,14.4662413 C41.158768,14.2242783 41.8327617,13.1579849 41.5907986,12.0846085 C41.3488355,11.011232 40.2825422,10.3372384 39.2091657,10.5792015 C39.2019419,10.5808298 39.2019419,10.5808298 39.1947175,10.582458 C38.1213273,10.8243601 37.447273,11.8906152 37.6891752,12.9640053 C37.9310773,14.0373955 38.9973324,14.7114498 40.0707225,14.4695476 L40.0707225,14.4695476 Z M47.4276119,12.0082845 C47.4329844,12.0067855 47.4329844,12.0067855 47.4383562,12.0052864 C48.2362896,11.7825897 48.7026111,10.9552056 48.4799145,10.1572722 C48.2572179,9.35933881 47.4298337,8.89301728 46.6319003,9.11571391 C46.6266645,9.1171751 46.6266645,9.1171751 46.6214279,9.11863616 C45.8234736,9.34125773 45.3570742,10.168598 45.5796958,10.9665523 C45.8023173,11.7645067 46.6296576,12.230906 47.4276119,12.0082845 L47.4276119,12.0082845 Z M54.8999721,9.57562965 C54.9052414,9.57358217 54.9052414,9.57358217 54.9105092,9.57153441 C55.6826371,9.27135123 56.0652239,8.40207131 55.7650408,7.62994336 C55.4648576,6.85781542 54.5955777,6.4752286 53.8234497,6.77541179 C53.8184808,6.77734338 53.8184808,6.77734338 53.8135101,6.77927482 C53.0413181,7.07929302 52.6585455,7.94849117 52.9585637,8.72068323 C53.2585819,9.4928753 54.12778,9.87564785 54.8999721,9.57562965 L54.8999721,9.57562965 Z" id="Shape"></path> + <path d="M1.45327704,26.6978168 C1.54647464,26.6867112 1.63432439,26.660866 1.7147722,26.6228911 C2.03520341,26.4716332 2.23820252,26.1279362 2.1939679,25.7567213 C2.13862768,25.2923089 1.71728488,24.9606903 1.2528724,25.0160305 C1.24980572,25.0163959 1.24980572,25.0163959 1.24674021,25.0167611 C0.782326598,25.0720918 0.450699376,25.4934278 0.506030077,25.9578415 C0.561360778,26.4222551 0.982696791,26.7538823 1.4471104,26.6985516 C1.45019313,26.6981843 1.45019313,26.6981843 1.45327704,26.6978168 Z M9.34858965,26.1274206 C9.35409106,26.1266859 9.35409106,26.1266859 9.35959333,26.1259509 C10.1807278,26.0162709 10.7574756,25.2616967 10.6477955,24.4405622 C10.5381155,23.6194278 9.78354128,23.04268 8.96240683,23.15236 C8.95693539,23.1530908 8.95693539,23.1530908 8.95146479,23.1538215 C8.13032806,23.2634845 7.55356464,24.0180467 7.66322765,24.8391835 C7.77289065,25.6603202 8.52745292,26.2370836 9.34858965,26.1274206 L9.34858965,26.1274206 Z M17.0066591,25.0462427 C17.0121188,25.0454283 17.0121188,25.0454283 17.0175789,25.0446136 C17.8369369,24.9223683 18.4020584,24.1590479 18.279813,23.3396899 C18.1575677,22.5203319 17.3942473,21.9552104 16.5748893,22.0774558 C16.5694641,22.0782652 16.5694641,22.0782652 16.5640392,22.0790744 C15.7446783,22.2013005 15.1795389,22.9646076 15.301765,23.7839685 C15.4239911,24.6033294 16.1872982,25.1684688 17.0066591,25.0462427 L17.0066591,25.0462427 Z M24.8225082,24.4201748 C24.8297684,24.4189565 24.8297684,24.4189565 24.8370287,24.4177379 C25.922252,24.2355961 26.6543441,23.2081937 26.4722023,22.1229704 C26.2900605,21.0377471 25.2626581,20.3056549 24.1774348,20.4877968 C24.1702314,20.4890057 24.1702314,20.4890057 24.1630282,20.4902145 C23.0777996,20.6723249 22.3456777,21.6997061 22.5277881,22.7849347 C22.7098984,23.8701632 23.7372796,24.6022852 24.8225082,24.4201748 L24.8225082,24.4201748 Z M32.4725265,23.0495399 C33.5532108,22.8422598 34.2612448,21.7981584 34.0539647,20.7174741 C33.8466846,19.6367898 32.8025831,18.9287558 31.7218988,19.1360359 C31.714724,19.137412 31.714724,19.137412 31.7075489,19.138788 C30.6268567,19.3460265 29.9187826,20.3901008 30.1260211,21.470793 C30.3332597,22.5514853 31.3773339,23.2595594 32.4580262,23.0523208 C32.4652764,23.0509305 32.4652764,23.0509305 32.4725265,23.0495399 Z M39.8723866,20.8947688 C39.877909,20.8935242 39.877909,20.8935242 39.883431,20.8922795 C40.6915794,20.7101047 41.1990314,19.9072892 41.0168566,19.0991407 C40.8346818,18.2909923 40.0318663,17.7835404 39.2237179,17.9657152 C39.218279,17.9669412 39.218279,17.9669412 39.2128397,17.968167 C38.404681,18.1502959 37.8971834,18.9530826 38.0793123,19.7612414 C38.2614411,20.5694001 39.0642279,21.0768976 39.8723866,20.8947688 L39.8723866,20.8947688 Z M47.4276119,19.0082845 C47.4329844,19.0067855 47.4329844,19.0067855 47.4383562,19.0052864 C48.2362896,18.7825897 48.7026111,17.9552056 48.4799145,17.1572722 C48.2572179,16.3593388 47.4298337,15.8930173 46.6319003,16.1157139 C46.6266645,16.1171751 46.6266645,16.1171751 46.6214279,16.1186362 C45.8234736,16.3412577 45.3570742,17.168598 45.5796958,17.9665523 C45.8023173,18.7645067 46.6296576,19.230906 47.4276119,19.0082845 L47.4276119,19.0082845 Z M54.4138099,15.7087505 C54.4167849,15.7075945 54.4167849,15.7075945 54.419759,15.7064383 C54.8556933,15.5369585 55.0716973,15.0461733 54.9022174,14.610239 C54.7327376,14.1743047 54.2419524,13.9583007 53.8060181,14.1277806 C53.8032127,14.1288712 53.8032127,14.1288712 53.8004064,14.1299616 C53.3644359,14.2993484 53.148327,14.7900873 53.3177137,15.2260578 C53.4871005,15.6620283 53.9778394,15.8781372 54.4138099,15.7087505 L54.4138099,15.7087505 Z" id="Shape"></path> + </g> + <g id="top"> + <path d="M1.4471104,12.2885536 C1.45019313,12.2881864 1.45019313,12.2881864 1.45327704,12.2878189 C1.91768952,12.2324787 2.24930811,11.8111359 2.1939679,11.3467234 C2.13862768,10.8823109 1.71728488,10.5506923 1.2528724,10.6060325 C1.24980572,10.606398 1.24980572,10.606398 1.24674021,10.6067632 C0.782326598,10.6620939 0.450699376,11.0834299 0.506030077,11.5478435 C0.561360778,12.0122571 0.982696791,12.3438844 1.4471104,12.2885536 L1.4471104,12.2885536 Z M9.34858965,12.1304907 C9.35409106,12.129756 9.35409106,12.129756 9.35959333,12.129021 C10.1807278,12.019341 10.7574756,11.2647668 10.6477955,10.4436323 C10.5381155,9.62249789 9.78354128,9.04575011 8.96240683,9.15543014 C8.95693539,9.15616095 8.95693539,9.15616095 8.95146479,9.15689157 C8.13032806,9.26655457 7.55356464,10.0211168 7.66322765,10.8422536 C7.77289065,11.6633903 8.52745292,12.2401537 9.34858965,12.1304907 L9.34858965,12.1304907 Z M17.0066591,11.0493128 C17.0121188,11.0484984 17.0121188,11.0484984 17.0175789,11.0476838 C17.8369369,10.9254384 18.4020584,10.1621181 18.279813,9.34276003 C18.1575677,8.52340201 17.3942473,7.95828052 16.5748893,8.08052587 C16.5694641,8.08133528 16.5694641,8.08133528 16.5640392,8.08214454 C15.7446783,8.20437064 15.1795389,8.96767771 15.301765,9.78703861 C15.4239911,10.6063995 16.1872982,11.1715389 17.0066591,11.0493128 L17.0066591,11.0493128 Z M24.8225082,10.4232459 C24.8297684,10.4220275 24.8297684,10.4220275 24.8370287,10.420809 C25.922252,10.2386672 26.6543441,9.21126476 26.4722023,8.12604147 C26.2900605,7.04081818 25.2626581,6.30872601 24.1774348,6.49086783 C24.1702314,6.4920768 24.1702314,6.4920768 24.1630282,6.49328559 C23.0777996,6.67539597 22.3456777,7.70277717 22.5277881,8.78800574 C22.7098984,9.87323431 23.7372796,10.6053563 24.8225082,10.4232459 L24.8225082,10.4232459 Z M32.4725265,9.05261002 C33.5532108,8.84532993 34.2612448,7.80122849 34.0539647,6.72054419 C33.8466846,5.63985989 32.8025831,4.93182589 31.7218988,5.13910599 C31.714724,5.14048211 31.714724,5.14048211 31.7075489,5.14185807 C30.6268567,5.34909665 29.9187826,6.39317088 30.1260211,7.47386314 C30.3332597,8.5545554 31.3773339,9.26262952 32.4580262,9.05539095 C32.4652764,9.05400057 32.4652764,9.05400057 32.4725265,9.05261002 Z M39.8723866,6.89476879 C39.877909,6.8935242 39.877909,6.8935242 39.883431,6.89227947 C40.6915794,6.71010471 41.1990314,5.90728916 41.0168566,5.09914075 C40.8346818,4.29099233 40.0318663,3.78354042 39.2237179,3.96571517 C39.218279,3.96694116 39.218279,3.96694116 39.2128397,3.96816703 C38.404681,4.15029588 37.8971834,4.9530826 38.0793123,5.76124136 C38.2614411,6.56940012 39.0642279,7.07689764 39.8723866,6.89476879 L39.8723866,6.89476879 Z M47.4276119,5.00828445 C47.4329844,5.00678549 47.4329844,5.00678549 47.4383562,5.00528637 C48.2362896,4.78258973 48.7026111,3.95520561 48.4799145,3.15727221 C48.2572179,2.35933881 47.4298337,1.89301728 46.6319003,2.11571391 C46.6266645,2.1171751 46.6266645,2.1171751 46.6214279,2.11863616 C45.8234736,2.34125773 45.3570742,3.16859798 45.5796958,3.96655233 C45.8023173,4.76450667 46.6296576,5.23090603 47.4276119,5.00828445 L47.4276119,5.00828445 Z M54.419759,2.30643871 C54.8556933,2.13695884 55.0716973,1.6461737 54.9022174,1.21023941 C54.7327376,0.774305114 54.2419524,0.558301127 53.8060181,0.727780997 C53.8032127,0.728871549 53.8032127,0.728871549 53.8004064,0.729962021 C53.3644359,0.89934874 53.148327,1.39008772 53.3177137,1.82605822 C53.4871005,2.26202871 53.9778394,2.47813756 54.4138099,2.30875084 C54.4167849,2.30759485 54.4167849,2.30759485 54.419759,2.30643871 Z" id="Shape"></path> + <path d="M1.67760355,20.096503 C1.68306395,20.0958524 1.68306395,20.0958524 1.68852642,20.0952015 C2.51113381,19.9971782 3.09852524,19.2508595 3.00050189,18.4282521 C2.90247854,17.6056447 2.15615986,17.0182533 1.33355246,17.1162767 C1.3281205,17.1169239 1.3281205,17.1169239 1.3226906,17.1175709 C0.500081196,17.2155774 -0.0873255124,17.961884 0.0106809923,18.7844934 C0.108687497,19.6071028 0.854994145,20.1945095 1.67760355,20.096503 L1.67760355,20.096503 Z M9.34858965,19.1274206 C9.35409106,19.1266859 9.35409106,19.1266859 9.35959333,19.1259509 C10.1807278,19.0162709 10.7574756,18.2616967 10.6477955,17.4405622 C10.5381155,16.6194278 9.78354128,16.04268 8.96240683,16.15236 C8.95693539,16.1530908 8.95693539,16.1530908 8.95146479,16.1538215 C8.13032806,16.2634845 7.55356464,17.0180467 7.66322765,17.8391835 C7.77289065,18.6603202 8.52745292,19.2370836 9.34858965,19.1274206 L9.34858965,19.1274206 Z M17.1767435,18.6256231 C17.1839958,18.6245412 17.1839958,18.6245412 17.1912486,18.6234592 C18.27963,18.4610765 19.0303002,17.4471319 18.8679175,16.3587506 C18.7055348,15.2703693 17.6915903,14.519699 16.6032089,14.6820817 C16.5960024,14.6831569 16.5960024,14.6831569 16.5887964,14.6842319 C15.5004112,14.846589 14.7497172,15.8605159 14.9120743,16.948901 C15.0744314,18.0372862 16.0883584,18.7879802 17.1767435,18.6256231 L17.1767435,18.6256231 Z M24.8370287,17.4177379 C25.922252,17.2355961 26.6543441,16.2081937 26.4722023,15.1229704 C26.2900605,14.0377471 25.2626581,13.3056549 24.1774348,13.4877968 C24.1702314,13.4890057 24.1702314,13.4890057 24.1630282,13.4902145 C23.0777996,13.6723249 22.3456777,14.6997061 22.5277881,15.7849347 C22.7098984,16.8701632 23.7372796,17.6022852 24.8225082,17.4201748 C24.8297684,17.4189565 24.8297684,17.4189565 24.8370287,17.4177379 Z M32.4725265,16.061085 C33.5532108,15.853199 34.2612448,14.8060455 34.0539647,13.7222022 C33.8466846,12.6383589 32.8025831,11.9282552 31.7218988,12.1361412 C31.714724,12.1375214 31.714724,12.1375214 31.7075489,12.1389013 C30.6268567,12.3467457 29.9187826,13.3938719 30.1260211,14.4777232 C30.3332597,15.5615745 31.3773339,16.2717185 32.4580262,16.0638741 C32.4652764,16.0624797 32.4652764,16.0624797 32.4725265,16.061085 Z M40.0707225,14.4695476 C40.0780573,14.4678946 40.0780573,14.4678946 40.0853916,14.4662413 C41.158768,14.2242783 41.8327617,13.1579849 41.5907986,12.0846085 C41.3488355,11.011232 40.2825422,10.3372384 39.2091657,10.5792015 C39.2019419,10.5808298 39.2019419,10.5808298 39.1947175,10.582458 C38.1213273,10.8243601 37.447273,11.8906152 37.6891752,12.9640053 C37.9310773,14.0373955 38.9973324,14.7114498 40.0707225,14.4695476 L40.0707225,14.4695476 Z M47.4276119,12.0082845 C47.4329844,12.0067855 47.4329844,12.0067855 47.4383562,12.0052864 C48.2362896,11.7825897 48.7026111,10.9552056 48.4799145,10.1572722 C48.2572179,9.35933881 47.4298337,8.89301728 46.6319003,9.11571391 C46.6266645,9.1171751 46.6266645,9.1171751 46.6214279,9.11863616 C45.8234736,9.34125773 45.3570742,10.168598 45.5796958,10.9665523 C45.8023173,11.7645067 46.6296576,12.230906 47.4276119,12.0082845 L47.4276119,12.0082845 Z M54.8999721,9.57562965 C54.9052414,9.57358217 54.9052414,9.57358217 54.9105092,9.57153441 C55.6826371,9.27135123 56.0652239,8.40207131 55.7650408,7.62994336 C55.4648576,6.85781542 54.5955777,6.4752286 53.8234497,6.77541179 C53.8184808,6.77734338 53.8184808,6.77734338 53.8135101,6.77927482 C53.0413181,7.07929302 52.6585455,7.94849117 52.9585637,8.72068323 C53.2585819,9.4928753 54.12778,9.87564785 54.8999721,9.57562965 L54.8999721,9.57562965 Z" id="Shape"></path> + <path d="M1.45327704,26.6978168 C1.54647464,26.6867112 1.63432439,26.660866 1.7147722,26.6228911 C2.03520341,26.4716332 2.23820252,26.1279362 2.1939679,25.7567213 C2.13862768,25.2923089 1.71728488,24.9606903 1.2528724,25.0160305 C1.24980572,25.0163959 1.24980572,25.0163959 1.24674021,25.0167611 C0.782326598,25.0720918 0.450699376,25.4934278 0.506030077,25.9578415 C0.561360778,26.4222551 0.982696791,26.7538823 1.4471104,26.6985516 C1.45019313,26.6981843 1.45019313,26.6981843 1.45327704,26.6978168 Z M9.34858965,26.1274206 C9.35409106,26.1266859 9.35409106,26.1266859 9.35959333,26.1259509 C10.1807278,26.0162709 10.7574756,25.2616967 10.6477955,24.4405622 C10.5381155,23.6194278 9.78354128,23.04268 8.96240683,23.15236 C8.95693539,23.1530908 8.95693539,23.1530908 8.95146479,23.1538215 C8.13032806,23.2634845 7.55356464,24.0180467 7.66322765,24.8391835 C7.77289065,25.6603202 8.52745292,26.2370836 9.34858965,26.1274206 L9.34858965,26.1274206 Z M17.0066591,25.0462427 C17.0121188,25.0454283 17.0121188,25.0454283 17.0175789,25.0446136 C17.8369369,24.9223683 18.4020584,24.1590479 18.279813,23.3396899 C18.1575677,22.5203319 17.3942473,21.9552104 16.5748893,22.0774558 C16.5694641,22.0782652 16.5694641,22.0782652 16.5640392,22.0790744 C15.7446783,22.2013005 15.1795389,22.9646076 15.301765,23.7839685 C15.4239911,24.6033294 16.1872982,25.1684688 17.0066591,25.0462427 L17.0066591,25.0462427 Z M24.8225082,24.4201748 C24.8297684,24.4189565 24.8297684,24.4189565 24.8370287,24.4177379 C25.922252,24.2355961 26.6543441,23.2081937 26.4722023,22.1229704 C26.2900605,21.0377471 25.2626581,20.3056549 24.1774348,20.4877968 C24.1702314,20.4890057 24.1702314,20.4890057 24.1630282,20.4902145 C23.0777996,20.6723249 22.3456777,21.6997061 22.5277881,22.7849347 C22.7098984,23.8701632 23.7372796,24.6022852 24.8225082,24.4201748 L24.8225082,24.4201748 Z M32.4725265,23.0495399 C33.5532108,22.8422598 34.2612448,21.7981584 34.0539647,20.7174741 C33.8466846,19.6367898 32.8025831,18.9287558 31.7218988,19.1360359 C31.714724,19.137412 31.714724,19.137412 31.7075489,19.138788 C30.6268567,19.3460265 29.9187826,20.3901008 30.1260211,21.470793 C30.3332597,22.5514853 31.3773339,23.2595594 32.4580262,23.0523208 C32.4652764,23.0509305 32.4652764,23.0509305 32.4725265,23.0495399 Z M39.8723866,20.8947688 C39.877909,20.8935242 39.877909,20.8935242 39.883431,20.8922795 C40.6915794,20.7101047 41.1990314,19.9072892 41.0168566,19.0991407 C40.8346818,18.2909923 40.0318663,17.7835404 39.2237179,17.9657152 C39.218279,17.9669412 39.218279,17.9669412 39.2128397,17.968167 C38.404681,18.1502959 37.8971834,18.9530826 38.0793123,19.7612414 C38.2614411,20.5694001 39.0642279,21.0768976 39.8723866,20.8947688 L39.8723866,20.8947688 Z M47.4276119,19.0082845 C47.4329844,19.0067855 47.4329844,19.0067855 47.4383562,19.0052864 C48.2362896,18.7825897 48.7026111,17.9552056 48.4799145,17.1572722 C48.2572179,16.3593388 47.4298337,15.8930173 46.6319003,16.1157139 C46.6266645,16.1171751 46.6266645,16.1171751 46.6214279,16.1186362 C45.8234736,16.3412577 45.3570742,17.168598 45.5796958,17.9665523 C45.8023173,18.7645067 46.6296576,19.230906 47.4276119,19.0082845 L47.4276119,19.0082845 Z M54.4138099,15.7087505 C54.4167849,15.7075945 54.4167849,15.7075945 54.419759,15.7064383 C54.8556933,15.5369585 55.0716973,15.0461733 54.9022174,14.610239 C54.7327376,14.1743047 54.2419524,13.9583007 53.8060181,14.1277806 C53.8032127,14.1288712 53.8032127,14.1288712 53.8004064,14.1299616 C53.3644359,14.2993484 53.148327,14.7900873 53.3177137,15.2260578 C53.4871005,15.6620283 53.9778394,15.8781372 54.4138099,15.7087505 L54.4138099,15.7087505 Z" id="Shape"></path> + </g> + </g> + </g> +</svg> diff --git a/scala3doc/dotty-docs/docs/images/dotty-logo.svg b/scala3doc/dotty-docs/docs/images/dotty-logo.svg new file mode 100644 index 000000000000..0bb0651aeffb --- /dev/null +++ b/scala3doc/dotty-docs/docs/images/dotty-logo.svg @@ -0,0 +1,30 @@ +<svg width="64px" height="109px" viewBox="0 0 64 109" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> + <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd"> + <g id="logo-background" transform="translate(0.000000, 16.000000)" fill="#CA445E" fill-opacity="0.45"> + <path d="M0.5,59.5 C0.5,59.5 63.4,65.8 63.4,76.3 L63.4,51.1 C63.4,51.1 63.4,40.6 0.5,34.3 L0.5,59.5 L0.5,59.5 Z" id="logo-background-bottom"></path> + <path d="M0.5,25.9 C0.5,25.9 63.4,32.2 63.4,42.7 L63.4,17.5 C63.4,17.5 63.4,7 0.5,0.7 L0.5,10.5 L0.5,25.9 L0.5,25.9 Z" id="logo-background-top"></path> + </g> + <g id="logo-foreground" fill="#CA445E"> + <path d="M0.5,109 L0.5,83.8 C0.5,83.8 63.4,77.5 63.4,67 L63.4,92.2 C63.5,92.3 63.5,102.7 0.5,109" id="Logo_Foreground_Bottom"></path> + <path d="M0.5,50.3 C0.5,50.3 63.4,44 63.4,33.5 L63.4,58.7 C63.4,58.7 63.4,69.2 0.5,75.5 L0.5,50.3 L0.5,50.3 Z" id="Logo_Foreground_Middle"></path> + <path d="M63.5,0 L63.5,25.2 C63.5,25.2 63.5,35.7 0.6,42 L0.6,16.7 C0.5,16.7 63.5,10.5 63.5,0" id="logo-foreground-top"></path> + </g> + <g id="dots" transform="translate(4.000000, 10.000000)" fill="#B6B6B6"> + <g id="bottom" transform="translate(0.000000, 67.500000)"> + <path d="M1.4471104,12.2885536 C1.45019313,12.2881864 1.45019313,12.2881864 1.45327704,12.2878189 C1.91768952,12.2324787 2.24930811,11.8111359 2.1939679,11.3467234 C2.13862768,10.8823109 1.71728488,10.5506923 1.2528724,10.6060325 C1.24980572,10.606398 1.24980572,10.606398 1.24674021,10.6067632 C0.782326598,10.6620939 0.450699376,11.0834299 0.506030077,11.5478435 C0.561360778,12.0122571 0.982696791,12.3438844 1.4471104,12.2885536 L1.4471104,12.2885536 Z M9.34858965,12.1304907 C9.35409106,12.129756 9.35409106,12.129756 9.35959333,12.129021 C10.1807278,12.019341 10.7574756,11.2647668 10.6477955,10.4436323 C10.5381155,9.62249789 9.78354128,9.04575011 8.96240683,9.15543014 C8.95693539,9.15616095 8.95693539,9.15616095 8.95146479,9.15689157 C8.13032806,9.26655457 7.55356464,10.0211168 7.66322765,10.8422536 C7.77289065,11.6633903 8.52745292,12.2401537 9.34858965,12.1304907 L9.34858965,12.1304907 Z M17.0066591,11.0493128 C17.0121188,11.0484984 17.0121188,11.0484984 17.0175789,11.0476838 C17.8369369,10.9254384 18.4020584,10.1621181 18.279813,9.34276003 C18.1575677,8.52340201 17.3942473,7.95828052 16.5748893,8.08052587 C16.5694641,8.08133528 16.5694641,8.08133528 16.5640392,8.08214454 C15.7446783,8.20437064 15.1795389,8.96767771 15.301765,9.78703861 C15.4239911,10.6063995 16.1872982,11.1715389 17.0066591,11.0493128 L17.0066591,11.0493128 Z M24.8225082,10.4232459 C24.8297684,10.4220275 24.8297684,10.4220275 24.8370287,10.420809 C25.922252,10.2386672 26.6543441,9.21126476 26.4722023,8.12604147 C26.2900605,7.04081818 25.2626581,6.30872601 24.1774348,6.49086783 C24.1702314,6.4920768 24.1702314,6.4920768 24.1630282,6.49328559 C23.0777996,6.67539597 22.3456777,7.70277717 22.5277881,8.78800574 C22.7098984,9.87323431 23.7372796,10.6053563 24.8225082,10.4232459 L24.8225082,10.4232459 Z M32.4725265,9.05261002 C33.5532108,8.84532993 34.2612448,7.80122849 34.0539647,6.72054419 C33.8466846,5.63985989 32.8025831,4.93182589 31.7218988,5.13910599 C31.714724,5.14048211 31.714724,5.14048211 31.7075489,5.14185807 C30.6268567,5.34909665 29.9187826,6.39317088 30.1260211,7.47386314 C30.3332597,8.5545554 31.3773339,9.26262952 32.4580262,9.05539095 C32.4652764,9.05400057 32.4652764,9.05400057 32.4725265,9.05261002 Z M39.8723866,6.89476879 C39.877909,6.8935242 39.877909,6.8935242 39.883431,6.89227947 C40.6915794,6.71010471 41.1990314,5.90728916 41.0168566,5.09914075 C40.8346818,4.29099233 40.0318663,3.78354042 39.2237179,3.96571517 C39.218279,3.96694116 39.218279,3.96694116 39.2128397,3.96816703 C38.404681,4.15029588 37.8971834,4.9530826 38.0793123,5.76124136 C38.2614411,6.56940012 39.0642279,7.07689764 39.8723866,6.89476879 L39.8723866,6.89476879 Z M47.4276119,5.00828445 C47.4329844,5.00678549 47.4329844,5.00678549 47.4383562,5.00528637 C48.2362896,4.78258973 48.7026111,3.95520561 48.4799145,3.15727221 C48.2572179,2.35933881 47.4298337,1.89301728 46.6319003,2.11571391 C46.6266645,2.1171751 46.6266645,2.1171751 46.6214279,2.11863616 C45.8234736,2.34125773 45.3570742,3.16859798 45.5796958,3.96655233 C45.8023173,4.76450667 46.6296576,5.23090603 47.4276119,5.00828445 L47.4276119,5.00828445 Z M54.419759,2.30643871 C54.8556933,2.13695884 55.0716973,1.6461737 54.9022174,1.21023941 C54.7327376,0.774305114 54.2419524,0.558301127 53.8060181,0.727780997 C53.8032127,0.728871549 53.8032127,0.728871549 53.8004064,0.729962021 C53.3644359,0.89934874 53.148327,1.39008772 53.3177137,1.82605822 C53.4871005,2.26202871 53.9778394,2.47813756 54.4138099,2.30875084 C54.4167849,2.30759485 54.4167849,2.30759485 54.419759,2.30643871 Z" id="Shape"></path> + <path d="M1.67760355,20.096503 C1.68306395,20.0958524 1.68306395,20.0958524 1.68852642,20.0952015 C2.51113381,19.9971782 3.09852524,19.2508595 3.00050189,18.4282521 C2.90247854,17.6056447 2.15615986,17.0182533 1.33355246,17.1162767 C1.3281205,17.1169239 1.3281205,17.1169239 1.3226906,17.1175709 C0.500081196,17.2155774 -0.0873255124,17.961884 0.0106809923,18.7844934 C0.108687497,19.6071028 0.854994145,20.1945095 1.67760355,20.096503 L1.67760355,20.096503 Z M9.34858965,19.1274206 C9.35409106,19.1266859 9.35409106,19.1266859 9.35959333,19.1259509 C10.1807278,19.0162709 10.7574756,18.2616967 10.6477955,17.4405622 C10.5381155,16.6194278 9.78354128,16.04268 8.96240683,16.15236 C8.95693539,16.1530908 8.95693539,16.1530908 8.95146479,16.1538215 C8.13032806,16.2634845 7.55356464,17.0180467 7.66322765,17.8391835 C7.77289065,18.6603202 8.52745292,19.2370836 9.34858965,19.1274206 L9.34858965,19.1274206 Z M17.1767435,18.6256231 C17.1839958,18.6245412 17.1839958,18.6245412 17.1912486,18.6234592 C18.27963,18.4610765 19.0303002,17.4471319 18.8679175,16.3587506 C18.7055348,15.2703693 17.6915903,14.519699 16.6032089,14.6820817 C16.5960024,14.6831569 16.5960024,14.6831569 16.5887964,14.6842319 C15.5004112,14.846589 14.7497172,15.8605159 14.9120743,16.948901 C15.0744314,18.0372862 16.0883584,18.7879802 17.1767435,18.6256231 L17.1767435,18.6256231 Z M24.8370287,17.4177379 C25.922252,17.2355961 26.6543441,16.2081937 26.4722023,15.1229704 C26.2900605,14.0377471 25.2626581,13.3056549 24.1774348,13.4877968 C24.1702314,13.4890057 24.1702314,13.4890057 24.1630282,13.4902145 C23.0777996,13.6723249 22.3456777,14.6997061 22.5277881,15.7849347 C22.7098984,16.8701632 23.7372796,17.6022852 24.8225082,17.4201748 C24.8297684,17.4189565 24.8297684,17.4189565 24.8370287,17.4177379 Z M32.4725265,16.061085 C33.5532108,15.853199 34.2612448,14.8060455 34.0539647,13.7222022 C33.8466846,12.6383589 32.8025831,11.9282552 31.7218988,12.1361412 C31.714724,12.1375214 31.714724,12.1375214 31.7075489,12.1389013 C30.6268567,12.3467457 29.9187826,13.3938719 30.1260211,14.4777232 C30.3332597,15.5615745 31.3773339,16.2717185 32.4580262,16.0638741 C32.4652764,16.0624797 32.4652764,16.0624797 32.4725265,16.061085 Z M40.0707225,14.4695476 C40.0780573,14.4678946 40.0780573,14.4678946 40.0853916,14.4662413 C41.158768,14.2242783 41.8327617,13.1579849 41.5907986,12.0846085 C41.3488355,11.011232 40.2825422,10.3372384 39.2091657,10.5792015 C39.2019419,10.5808298 39.2019419,10.5808298 39.1947175,10.582458 C38.1213273,10.8243601 37.447273,11.8906152 37.6891752,12.9640053 C37.9310773,14.0373955 38.9973324,14.7114498 40.0707225,14.4695476 L40.0707225,14.4695476 Z M47.4276119,12.0082845 C47.4329844,12.0067855 47.4329844,12.0067855 47.4383562,12.0052864 C48.2362896,11.7825897 48.7026111,10.9552056 48.4799145,10.1572722 C48.2572179,9.35933881 47.4298337,8.89301728 46.6319003,9.11571391 C46.6266645,9.1171751 46.6266645,9.1171751 46.6214279,9.11863616 C45.8234736,9.34125773 45.3570742,10.168598 45.5796958,10.9665523 C45.8023173,11.7645067 46.6296576,12.230906 47.4276119,12.0082845 L47.4276119,12.0082845 Z M54.8999721,9.57562965 C54.9052414,9.57358217 54.9052414,9.57358217 54.9105092,9.57153441 C55.6826371,9.27135123 56.0652239,8.40207131 55.7650408,7.62994336 C55.4648576,6.85781542 54.5955777,6.4752286 53.8234497,6.77541179 C53.8184808,6.77734338 53.8184808,6.77734338 53.8135101,6.77927482 C53.0413181,7.07929302 52.6585455,7.94849117 52.9585637,8.72068323 C53.2585819,9.4928753 54.12778,9.87564785 54.8999721,9.57562965 L54.8999721,9.57562965 Z" id="Shape"></path> + <path d="M1.45327704,26.6978168 C1.54647464,26.6867112 1.63432439,26.660866 1.7147722,26.6228911 C2.03520341,26.4716332 2.23820252,26.1279362 2.1939679,25.7567213 C2.13862768,25.2923089 1.71728488,24.9606903 1.2528724,25.0160305 C1.24980572,25.0163959 1.24980572,25.0163959 1.24674021,25.0167611 C0.782326598,25.0720918 0.450699376,25.4934278 0.506030077,25.9578415 C0.561360778,26.4222551 0.982696791,26.7538823 1.4471104,26.6985516 C1.45019313,26.6981843 1.45019313,26.6981843 1.45327704,26.6978168 Z M9.34858965,26.1274206 C9.35409106,26.1266859 9.35409106,26.1266859 9.35959333,26.1259509 C10.1807278,26.0162709 10.7574756,25.2616967 10.6477955,24.4405622 C10.5381155,23.6194278 9.78354128,23.04268 8.96240683,23.15236 C8.95693539,23.1530908 8.95693539,23.1530908 8.95146479,23.1538215 C8.13032806,23.2634845 7.55356464,24.0180467 7.66322765,24.8391835 C7.77289065,25.6603202 8.52745292,26.2370836 9.34858965,26.1274206 L9.34858965,26.1274206 Z M17.0066591,25.0462427 C17.0121188,25.0454283 17.0121188,25.0454283 17.0175789,25.0446136 C17.8369369,24.9223683 18.4020584,24.1590479 18.279813,23.3396899 C18.1575677,22.5203319 17.3942473,21.9552104 16.5748893,22.0774558 C16.5694641,22.0782652 16.5694641,22.0782652 16.5640392,22.0790744 C15.7446783,22.2013005 15.1795389,22.9646076 15.301765,23.7839685 C15.4239911,24.6033294 16.1872982,25.1684688 17.0066591,25.0462427 L17.0066591,25.0462427 Z M24.8225082,24.4201748 C24.8297684,24.4189565 24.8297684,24.4189565 24.8370287,24.4177379 C25.922252,24.2355961 26.6543441,23.2081937 26.4722023,22.1229704 C26.2900605,21.0377471 25.2626581,20.3056549 24.1774348,20.4877968 C24.1702314,20.4890057 24.1702314,20.4890057 24.1630282,20.4902145 C23.0777996,20.6723249 22.3456777,21.6997061 22.5277881,22.7849347 C22.7098984,23.8701632 23.7372796,24.6022852 24.8225082,24.4201748 L24.8225082,24.4201748 Z M32.4725265,23.0495399 C33.5532108,22.8422598 34.2612448,21.7981584 34.0539647,20.7174741 C33.8466846,19.6367898 32.8025831,18.9287558 31.7218988,19.1360359 C31.714724,19.137412 31.714724,19.137412 31.7075489,19.138788 C30.6268567,19.3460265 29.9187826,20.3901008 30.1260211,21.470793 C30.3332597,22.5514853 31.3773339,23.2595594 32.4580262,23.0523208 C32.4652764,23.0509305 32.4652764,23.0509305 32.4725265,23.0495399 Z M39.8723866,20.8947688 C39.877909,20.8935242 39.877909,20.8935242 39.883431,20.8922795 C40.6915794,20.7101047 41.1990314,19.9072892 41.0168566,19.0991407 C40.8346818,18.2909923 40.0318663,17.7835404 39.2237179,17.9657152 C39.218279,17.9669412 39.218279,17.9669412 39.2128397,17.968167 C38.404681,18.1502959 37.8971834,18.9530826 38.0793123,19.7612414 C38.2614411,20.5694001 39.0642279,21.0768976 39.8723866,20.8947688 L39.8723866,20.8947688 Z M47.4276119,19.0082845 C47.4329844,19.0067855 47.4329844,19.0067855 47.4383562,19.0052864 C48.2362896,18.7825897 48.7026111,17.9552056 48.4799145,17.1572722 C48.2572179,16.3593388 47.4298337,15.8930173 46.6319003,16.1157139 C46.6266645,16.1171751 46.6266645,16.1171751 46.6214279,16.1186362 C45.8234736,16.3412577 45.3570742,17.168598 45.5796958,17.9665523 C45.8023173,18.7645067 46.6296576,19.230906 47.4276119,19.0082845 L47.4276119,19.0082845 Z M54.4138099,15.7087505 C54.4167849,15.7075945 54.4167849,15.7075945 54.419759,15.7064383 C54.8556933,15.5369585 55.0716973,15.0461733 54.9022174,14.610239 C54.7327376,14.1743047 54.2419524,13.9583007 53.8060181,14.1277806 C53.8032127,14.1288712 53.8032127,14.1288712 53.8004064,14.1299616 C53.3644359,14.2993484 53.148327,14.7900873 53.3177137,15.2260578 C53.4871005,15.6620283 53.9778394,15.8781372 54.4138099,15.7087505 L54.4138099,15.7087505 Z" id="Shape"></path> + </g> + <g id="middle" transform="translate(0.000000, 33.900002)"> + <path d="M1.4471104,12.2885536 C1.45019313,12.2881864 1.45019313,12.2881864 1.45327704,12.2878189 C1.91768952,12.2324787 2.24930811,11.8111359 2.1939679,11.3467234 C2.13862768,10.8823109 1.71728488,10.5506923 1.2528724,10.6060325 C1.24980572,10.606398 1.24980572,10.606398 1.24674021,10.6067632 C0.782326598,10.6620939 0.450699376,11.0834299 0.506030077,11.5478435 C0.561360778,12.0122571 0.982696791,12.3438844 1.4471104,12.2885536 L1.4471104,12.2885536 Z M9.34858965,12.1304907 C9.35409106,12.129756 9.35409106,12.129756 9.35959333,12.129021 C10.1807278,12.019341 10.7574756,11.2647668 10.6477955,10.4436323 C10.5381155,9.62249789 9.78354128,9.04575011 8.96240683,9.15543014 C8.95693539,9.15616095 8.95693539,9.15616095 8.95146479,9.15689157 C8.13032806,9.26655457 7.55356464,10.0211168 7.66322765,10.8422536 C7.77289065,11.6633903 8.52745292,12.2401537 9.34858965,12.1304907 L9.34858965,12.1304907 Z M17.0066591,11.0493128 C17.0121188,11.0484984 17.0121188,11.0484984 17.0175789,11.0476838 C17.8369369,10.9254384 18.4020584,10.1621181 18.279813,9.34276003 C18.1575677,8.52340201 17.3942473,7.95828052 16.5748893,8.08052587 C16.5694641,8.08133528 16.5694641,8.08133528 16.5640392,8.08214454 C15.7446783,8.20437064 15.1795389,8.96767771 15.301765,9.78703861 C15.4239911,10.6063995 16.1872982,11.1715389 17.0066591,11.0493128 L17.0066591,11.0493128 Z M24.8225082,10.4232459 C24.8297684,10.4220275 24.8297684,10.4220275 24.8370287,10.420809 C25.922252,10.2386672 26.6543441,9.21126476 26.4722023,8.12604147 C26.2900605,7.04081818 25.2626581,6.30872601 24.1774348,6.49086783 C24.1702314,6.4920768 24.1702314,6.4920768 24.1630282,6.49328559 C23.0777996,6.67539597 22.3456777,7.70277717 22.5277881,8.78800574 C22.7098984,9.87323431 23.7372796,10.6053563 24.8225082,10.4232459 L24.8225082,10.4232459 Z M32.4725265,9.05261002 C33.5532108,8.84532993 34.2612448,7.80122849 34.0539647,6.72054419 C33.8466846,5.63985989 32.8025831,4.93182589 31.7218988,5.13910599 C31.714724,5.14048211 31.714724,5.14048211 31.7075489,5.14185807 C30.6268567,5.34909665 29.9187826,6.39317088 30.1260211,7.47386314 C30.3332597,8.5545554 31.3773339,9.26262952 32.4580262,9.05539095 C32.4652764,9.05400057 32.4652764,9.05400057 32.4725265,9.05261002 Z M39.8723866,6.89476879 C39.877909,6.8935242 39.877909,6.8935242 39.883431,6.89227947 C40.6915794,6.71010471 41.1990314,5.90728916 41.0168566,5.09914075 C40.8346818,4.29099233 40.0318663,3.78354042 39.2237179,3.96571517 C39.218279,3.96694116 39.218279,3.96694116 39.2128397,3.96816703 C38.404681,4.15029588 37.8971834,4.9530826 38.0793123,5.76124136 C38.2614411,6.56940012 39.0642279,7.07689764 39.8723866,6.89476879 L39.8723866,6.89476879 Z M47.4276119,5.00828445 C47.4329844,5.00678549 47.4329844,5.00678549 47.4383562,5.00528637 C48.2362896,4.78258973 48.7026111,3.95520561 48.4799145,3.15727221 C48.2572179,2.35933881 47.4298337,1.89301728 46.6319003,2.11571391 C46.6266645,2.1171751 46.6266645,2.1171751 46.6214279,2.11863616 C45.8234736,2.34125773 45.3570742,3.16859798 45.5796958,3.96655233 C45.8023173,4.76450667 46.6296576,5.23090603 47.4276119,5.00828445 L47.4276119,5.00828445 Z M54.419759,2.30643871 C54.8556933,2.13695884 55.0716973,1.6461737 54.9022174,1.21023941 C54.7327376,0.774305114 54.2419524,0.558301127 53.8060181,0.727780997 C53.8032127,0.728871549 53.8032127,0.728871549 53.8004064,0.729962021 C53.3644359,0.89934874 53.148327,1.39008772 53.3177137,1.82605822 C53.4871005,2.26202871 53.9778394,2.47813756 54.4138099,2.30875084 C54.4167849,2.30759485 54.4167849,2.30759485 54.419759,2.30643871 Z" id="Shape"></path> + <path d="M1.67760355,20.096503 C1.68306395,20.0958524 1.68306395,20.0958524 1.68852642,20.0952015 C2.51113381,19.9971782 3.09852524,19.2508595 3.00050189,18.4282521 C2.90247854,17.6056447 2.15615986,17.0182533 1.33355246,17.1162767 C1.3281205,17.1169239 1.3281205,17.1169239 1.3226906,17.1175709 C0.500081196,17.2155774 -0.0873255124,17.961884 0.0106809923,18.7844934 C0.108687497,19.6071028 0.854994145,20.1945095 1.67760355,20.096503 L1.67760355,20.096503 Z M9.34858965,19.1274206 C9.35409106,19.1266859 9.35409106,19.1266859 9.35959333,19.1259509 C10.1807278,19.0162709 10.7574756,18.2616967 10.6477955,17.4405622 C10.5381155,16.6194278 9.78354128,16.04268 8.96240683,16.15236 C8.95693539,16.1530908 8.95693539,16.1530908 8.95146479,16.1538215 C8.13032806,16.2634845 7.55356464,17.0180467 7.66322765,17.8391835 C7.77289065,18.6603202 8.52745292,19.2370836 9.34858965,19.1274206 L9.34858965,19.1274206 Z M17.1767435,18.6256231 C17.1839958,18.6245412 17.1839958,18.6245412 17.1912486,18.6234592 C18.27963,18.4610765 19.0303002,17.4471319 18.8679175,16.3587506 C18.7055348,15.2703693 17.6915903,14.519699 16.6032089,14.6820817 C16.5960024,14.6831569 16.5960024,14.6831569 16.5887964,14.6842319 C15.5004112,14.846589 14.7497172,15.8605159 14.9120743,16.948901 C15.0744314,18.0372862 16.0883584,18.7879802 17.1767435,18.6256231 L17.1767435,18.6256231 Z M24.8370287,17.4177379 C25.922252,17.2355961 26.6543441,16.2081937 26.4722023,15.1229704 C26.2900605,14.0377471 25.2626581,13.3056549 24.1774348,13.4877968 C24.1702314,13.4890057 24.1702314,13.4890057 24.1630282,13.4902145 C23.0777996,13.6723249 22.3456777,14.6997061 22.5277881,15.7849347 C22.7098984,16.8701632 23.7372796,17.6022852 24.8225082,17.4201748 C24.8297684,17.4189565 24.8297684,17.4189565 24.8370287,17.4177379 Z M32.4725265,16.061085 C33.5532108,15.853199 34.2612448,14.8060455 34.0539647,13.7222022 C33.8466846,12.6383589 32.8025831,11.9282552 31.7218988,12.1361412 C31.714724,12.1375214 31.714724,12.1375214 31.7075489,12.1389013 C30.6268567,12.3467457 29.9187826,13.3938719 30.1260211,14.4777232 C30.3332597,15.5615745 31.3773339,16.2717185 32.4580262,16.0638741 C32.4652764,16.0624797 32.4652764,16.0624797 32.4725265,16.061085 Z M40.0707225,14.4695476 C40.0780573,14.4678946 40.0780573,14.4678946 40.0853916,14.4662413 C41.158768,14.2242783 41.8327617,13.1579849 41.5907986,12.0846085 C41.3488355,11.011232 40.2825422,10.3372384 39.2091657,10.5792015 C39.2019419,10.5808298 39.2019419,10.5808298 39.1947175,10.582458 C38.1213273,10.8243601 37.447273,11.8906152 37.6891752,12.9640053 C37.9310773,14.0373955 38.9973324,14.7114498 40.0707225,14.4695476 L40.0707225,14.4695476 Z M47.4276119,12.0082845 C47.4329844,12.0067855 47.4329844,12.0067855 47.4383562,12.0052864 C48.2362896,11.7825897 48.7026111,10.9552056 48.4799145,10.1572722 C48.2572179,9.35933881 47.4298337,8.89301728 46.6319003,9.11571391 C46.6266645,9.1171751 46.6266645,9.1171751 46.6214279,9.11863616 C45.8234736,9.34125773 45.3570742,10.168598 45.5796958,10.9665523 C45.8023173,11.7645067 46.6296576,12.230906 47.4276119,12.0082845 L47.4276119,12.0082845 Z M54.8999721,9.57562965 C54.9052414,9.57358217 54.9052414,9.57358217 54.9105092,9.57153441 C55.6826371,9.27135123 56.0652239,8.40207131 55.7650408,7.62994336 C55.4648576,6.85781542 54.5955777,6.4752286 53.8234497,6.77541179 C53.8184808,6.77734338 53.8184808,6.77734338 53.8135101,6.77927482 C53.0413181,7.07929302 52.6585455,7.94849117 52.9585637,8.72068323 C53.2585819,9.4928753 54.12778,9.87564785 54.8999721,9.57562965 L54.8999721,9.57562965 Z" id="Shape"></path> + <path d="M1.45327704,26.6978168 C1.54647464,26.6867112 1.63432439,26.660866 1.7147722,26.6228911 C2.03520341,26.4716332 2.23820252,26.1279362 2.1939679,25.7567213 C2.13862768,25.2923089 1.71728488,24.9606903 1.2528724,25.0160305 C1.24980572,25.0163959 1.24980572,25.0163959 1.24674021,25.0167611 C0.782326598,25.0720918 0.450699376,25.4934278 0.506030077,25.9578415 C0.561360778,26.4222551 0.982696791,26.7538823 1.4471104,26.6985516 C1.45019313,26.6981843 1.45019313,26.6981843 1.45327704,26.6978168 Z M9.34858965,26.1274206 C9.35409106,26.1266859 9.35409106,26.1266859 9.35959333,26.1259509 C10.1807278,26.0162709 10.7574756,25.2616967 10.6477955,24.4405622 C10.5381155,23.6194278 9.78354128,23.04268 8.96240683,23.15236 C8.95693539,23.1530908 8.95693539,23.1530908 8.95146479,23.1538215 C8.13032806,23.2634845 7.55356464,24.0180467 7.66322765,24.8391835 C7.77289065,25.6603202 8.52745292,26.2370836 9.34858965,26.1274206 L9.34858965,26.1274206 Z M17.0066591,25.0462427 C17.0121188,25.0454283 17.0121188,25.0454283 17.0175789,25.0446136 C17.8369369,24.9223683 18.4020584,24.1590479 18.279813,23.3396899 C18.1575677,22.5203319 17.3942473,21.9552104 16.5748893,22.0774558 C16.5694641,22.0782652 16.5694641,22.0782652 16.5640392,22.0790744 C15.7446783,22.2013005 15.1795389,22.9646076 15.301765,23.7839685 C15.4239911,24.6033294 16.1872982,25.1684688 17.0066591,25.0462427 L17.0066591,25.0462427 Z M24.8225082,24.4201748 C24.8297684,24.4189565 24.8297684,24.4189565 24.8370287,24.4177379 C25.922252,24.2355961 26.6543441,23.2081937 26.4722023,22.1229704 C26.2900605,21.0377471 25.2626581,20.3056549 24.1774348,20.4877968 C24.1702314,20.4890057 24.1702314,20.4890057 24.1630282,20.4902145 C23.0777996,20.6723249 22.3456777,21.6997061 22.5277881,22.7849347 C22.7098984,23.8701632 23.7372796,24.6022852 24.8225082,24.4201748 L24.8225082,24.4201748 Z M32.4725265,23.0495399 C33.5532108,22.8422598 34.2612448,21.7981584 34.0539647,20.7174741 C33.8466846,19.6367898 32.8025831,18.9287558 31.7218988,19.1360359 C31.714724,19.137412 31.714724,19.137412 31.7075489,19.138788 C30.6268567,19.3460265 29.9187826,20.3901008 30.1260211,21.470793 C30.3332597,22.5514853 31.3773339,23.2595594 32.4580262,23.0523208 C32.4652764,23.0509305 32.4652764,23.0509305 32.4725265,23.0495399 Z M39.8723866,20.8947688 C39.877909,20.8935242 39.877909,20.8935242 39.883431,20.8922795 C40.6915794,20.7101047 41.1990314,19.9072892 41.0168566,19.0991407 C40.8346818,18.2909923 40.0318663,17.7835404 39.2237179,17.9657152 C39.218279,17.9669412 39.218279,17.9669412 39.2128397,17.968167 C38.404681,18.1502959 37.8971834,18.9530826 38.0793123,19.7612414 C38.2614411,20.5694001 39.0642279,21.0768976 39.8723866,20.8947688 L39.8723866,20.8947688 Z M47.4276119,19.0082845 C47.4329844,19.0067855 47.4329844,19.0067855 47.4383562,19.0052864 C48.2362896,18.7825897 48.7026111,17.9552056 48.4799145,17.1572722 C48.2572179,16.3593388 47.4298337,15.8930173 46.6319003,16.1157139 C46.6266645,16.1171751 46.6266645,16.1171751 46.6214279,16.1186362 C45.8234736,16.3412577 45.3570742,17.168598 45.5796958,17.9665523 C45.8023173,18.7645067 46.6296576,19.230906 47.4276119,19.0082845 L47.4276119,19.0082845 Z M54.4138099,15.7087505 C54.4167849,15.7075945 54.4167849,15.7075945 54.419759,15.7064383 C54.8556933,15.5369585 55.0716973,15.0461733 54.9022174,14.610239 C54.7327376,14.1743047 54.2419524,13.9583007 53.8060181,14.1277806 C53.8032127,14.1288712 53.8032127,14.1288712 53.8004064,14.1299616 C53.3644359,14.2993484 53.148327,14.7900873 53.3177137,15.2260578 C53.4871005,15.6620283 53.9778394,15.8781372 54.4138099,15.7087505 L54.4138099,15.7087505 Z" id="Shape"></path> + </g> + <g id="top"> + <path d="M1.4471104,12.2885536 C1.45019313,12.2881864 1.45019313,12.2881864 1.45327704,12.2878189 C1.91768952,12.2324787 2.24930811,11.8111359 2.1939679,11.3467234 C2.13862768,10.8823109 1.71728488,10.5506923 1.2528724,10.6060325 C1.24980572,10.606398 1.24980572,10.606398 1.24674021,10.6067632 C0.782326598,10.6620939 0.450699376,11.0834299 0.506030077,11.5478435 C0.561360778,12.0122571 0.982696791,12.3438844 1.4471104,12.2885536 L1.4471104,12.2885536 Z M9.34858965,12.1304907 C9.35409106,12.129756 9.35409106,12.129756 9.35959333,12.129021 C10.1807278,12.019341 10.7574756,11.2647668 10.6477955,10.4436323 C10.5381155,9.62249789 9.78354128,9.04575011 8.96240683,9.15543014 C8.95693539,9.15616095 8.95693539,9.15616095 8.95146479,9.15689157 C8.13032806,9.26655457 7.55356464,10.0211168 7.66322765,10.8422536 C7.77289065,11.6633903 8.52745292,12.2401537 9.34858965,12.1304907 L9.34858965,12.1304907 Z M17.0066591,11.0493128 C17.0121188,11.0484984 17.0121188,11.0484984 17.0175789,11.0476838 C17.8369369,10.9254384 18.4020584,10.1621181 18.279813,9.34276003 C18.1575677,8.52340201 17.3942473,7.95828052 16.5748893,8.08052587 C16.5694641,8.08133528 16.5694641,8.08133528 16.5640392,8.08214454 C15.7446783,8.20437064 15.1795389,8.96767771 15.301765,9.78703861 C15.4239911,10.6063995 16.1872982,11.1715389 17.0066591,11.0493128 L17.0066591,11.0493128 Z M24.8225082,10.4232459 C24.8297684,10.4220275 24.8297684,10.4220275 24.8370287,10.420809 C25.922252,10.2386672 26.6543441,9.21126476 26.4722023,8.12604147 C26.2900605,7.04081818 25.2626581,6.30872601 24.1774348,6.49086783 C24.1702314,6.4920768 24.1702314,6.4920768 24.1630282,6.49328559 C23.0777996,6.67539597 22.3456777,7.70277717 22.5277881,8.78800574 C22.7098984,9.87323431 23.7372796,10.6053563 24.8225082,10.4232459 L24.8225082,10.4232459 Z M32.4725265,9.05261002 C33.5532108,8.84532993 34.2612448,7.80122849 34.0539647,6.72054419 C33.8466846,5.63985989 32.8025831,4.93182589 31.7218988,5.13910599 C31.714724,5.14048211 31.714724,5.14048211 31.7075489,5.14185807 C30.6268567,5.34909665 29.9187826,6.39317088 30.1260211,7.47386314 C30.3332597,8.5545554 31.3773339,9.26262952 32.4580262,9.05539095 C32.4652764,9.05400057 32.4652764,9.05400057 32.4725265,9.05261002 Z M39.8723866,6.89476879 C39.877909,6.8935242 39.877909,6.8935242 39.883431,6.89227947 C40.6915794,6.71010471 41.1990314,5.90728916 41.0168566,5.09914075 C40.8346818,4.29099233 40.0318663,3.78354042 39.2237179,3.96571517 C39.218279,3.96694116 39.218279,3.96694116 39.2128397,3.96816703 C38.404681,4.15029588 37.8971834,4.9530826 38.0793123,5.76124136 C38.2614411,6.56940012 39.0642279,7.07689764 39.8723866,6.89476879 L39.8723866,6.89476879 Z M47.4276119,5.00828445 C47.4329844,5.00678549 47.4329844,5.00678549 47.4383562,5.00528637 C48.2362896,4.78258973 48.7026111,3.95520561 48.4799145,3.15727221 C48.2572179,2.35933881 47.4298337,1.89301728 46.6319003,2.11571391 C46.6266645,2.1171751 46.6266645,2.1171751 46.6214279,2.11863616 C45.8234736,2.34125773 45.3570742,3.16859798 45.5796958,3.96655233 C45.8023173,4.76450667 46.6296576,5.23090603 47.4276119,5.00828445 L47.4276119,5.00828445 Z M54.419759,2.30643871 C54.8556933,2.13695884 55.0716973,1.6461737 54.9022174,1.21023941 C54.7327376,0.774305114 54.2419524,0.558301127 53.8060181,0.727780997 C53.8032127,0.728871549 53.8032127,0.728871549 53.8004064,0.729962021 C53.3644359,0.89934874 53.148327,1.39008772 53.3177137,1.82605822 C53.4871005,2.26202871 53.9778394,2.47813756 54.4138099,2.30875084 C54.4167849,2.30759485 54.4167849,2.30759485 54.419759,2.30643871 Z" id="Shape"></path> + <path d="M1.67760355,20.096503 C1.68306395,20.0958524 1.68306395,20.0958524 1.68852642,20.0952015 C2.51113381,19.9971782 3.09852524,19.2508595 3.00050189,18.4282521 C2.90247854,17.6056447 2.15615986,17.0182533 1.33355246,17.1162767 C1.3281205,17.1169239 1.3281205,17.1169239 1.3226906,17.1175709 C0.500081196,17.2155774 -0.0873255124,17.961884 0.0106809923,18.7844934 C0.108687497,19.6071028 0.854994145,20.1945095 1.67760355,20.096503 L1.67760355,20.096503 Z M9.34858965,19.1274206 C9.35409106,19.1266859 9.35409106,19.1266859 9.35959333,19.1259509 C10.1807278,19.0162709 10.7574756,18.2616967 10.6477955,17.4405622 C10.5381155,16.6194278 9.78354128,16.04268 8.96240683,16.15236 C8.95693539,16.1530908 8.95693539,16.1530908 8.95146479,16.1538215 C8.13032806,16.2634845 7.55356464,17.0180467 7.66322765,17.8391835 C7.77289065,18.6603202 8.52745292,19.2370836 9.34858965,19.1274206 L9.34858965,19.1274206 Z M17.1767435,18.6256231 C17.1839958,18.6245412 17.1839958,18.6245412 17.1912486,18.6234592 C18.27963,18.4610765 19.0303002,17.4471319 18.8679175,16.3587506 C18.7055348,15.2703693 17.6915903,14.519699 16.6032089,14.6820817 C16.5960024,14.6831569 16.5960024,14.6831569 16.5887964,14.6842319 C15.5004112,14.846589 14.7497172,15.8605159 14.9120743,16.948901 C15.0744314,18.0372862 16.0883584,18.7879802 17.1767435,18.6256231 L17.1767435,18.6256231 Z M24.8370287,17.4177379 C25.922252,17.2355961 26.6543441,16.2081937 26.4722023,15.1229704 C26.2900605,14.0377471 25.2626581,13.3056549 24.1774348,13.4877968 C24.1702314,13.4890057 24.1702314,13.4890057 24.1630282,13.4902145 C23.0777996,13.6723249 22.3456777,14.6997061 22.5277881,15.7849347 C22.7098984,16.8701632 23.7372796,17.6022852 24.8225082,17.4201748 C24.8297684,17.4189565 24.8297684,17.4189565 24.8370287,17.4177379 Z M32.4725265,16.061085 C33.5532108,15.853199 34.2612448,14.8060455 34.0539647,13.7222022 C33.8466846,12.6383589 32.8025831,11.9282552 31.7218988,12.1361412 C31.714724,12.1375214 31.714724,12.1375214 31.7075489,12.1389013 C30.6268567,12.3467457 29.9187826,13.3938719 30.1260211,14.4777232 C30.3332597,15.5615745 31.3773339,16.2717185 32.4580262,16.0638741 C32.4652764,16.0624797 32.4652764,16.0624797 32.4725265,16.061085 Z M40.0707225,14.4695476 C40.0780573,14.4678946 40.0780573,14.4678946 40.0853916,14.4662413 C41.158768,14.2242783 41.8327617,13.1579849 41.5907986,12.0846085 C41.3488355,11.011232 40.2825422,10.3372384 39.2091657,10.5792015 C39.2019419,10.5808298 39.2019419,10.5808298 39.1947175,10.582458 C38.1213273,10.8243601 37.447273,11.8906152 37.6891752,12.9640053 C37.9310773,14.0373955 38.9973324,14.7114498 40.0707225,14.4695476 L40.0707225,14.4695476 Z M47.4276119,12.0082845 C47.4329844,12.0067855 47.4329844,12.0067855 47.4383562,12.0052864 C48.2362896,11.7825897 48.7026111,10.9552056 48.4799145,10.1572722 C48.2572179,9.35933881 47.4298337,8.89301728 46.6319003,9.11571391 C46.6266645,9.1171751 46.6266645,9.1171751 46.6214279,9.11863616 C45.8234736,9.34125773 45.3570742,10.168598 45.5796958,10.9665523 C45.8023173,11.7645067 46.6296576,12.230906 47.4276119,12.0082845 L47.4276119,12.0082845 Z M54.8999721,9.57562965 C54.9052414,9.57358217 54.9052414,9.57358217 54.9105092,9.57153441 C55.6826371,9.27135123 56.0652239,8.40207131 55.7650408,7.62994336 C55.4648576,6.85781542 54.5955777,6.4752286 53.8234497,6.77541179 C53.8184808,6.77734338 53.8184808,6.77734338 53.8135101,6.77927482 C53.0413181,7.07929302 52.6585455,7.94849117 52.9585637,8.72068323 C53.2585819,9.4928753 54.12778,9.87564785 54.8999721,9.57562965 L54.8999721,9.57562965 Z" id="Shape"></path> + <path d="M1.45327704,26.6978168 C1.54647464,26.6867112 1.63432439,26.660866 1.7147722,26.6228911 C2.03520341,26.4716332 2.23820252,26.1279362 2.1939679,25.7567213 C2.13862768,25.2923089 1.71728488,24.9606903 1.2528724,25.0160305 C1.24980572,25.0163959 1.24980572,25.0163959 1.24674021,25.0167611 C0.782326598,25.0720918 0.450699376,25.4934278 0.506030077,25.9578415 C0.561360778,26.4222551 0.982696791,26.7538823 1.4471104,26.6985516 C1.45019313,26.6981843 1.45019313,26.6981843 1.45327704,26.6978168 Z M9.34858965,26.1274206 C9.35409106,26.1266859 9.35409106,26.1266859 9.35959333,26.1259509 C10.1807278,26.0162709 10.7574756,25.2616967 10.6477955,24.4405622 C10.5381155,23.6194278 9.78354128,23.04268 8.96240683,23.15236 C8.95693539,23.1530908 8.95693539,23.1530908 8.95146479,23.1538215 C8.13032806,23.2634845 7.55356464,24.0180467 7.66322765,24.8391835 C7.77289065,25.6603202 8.52745292,26.2370836 9.34858965,26.1274206 L9.34858965,26.1274206 Z M17.0066591,25.0462427 C17.0121188,25.0454283 17.0121188,25.0454283 17.0175789,25.0446136 C17.8369369,24.9223683 18.4020584,24.1590479 18.279813,23.3396899 C18.1575677,22.5203319 17.3942473,21.9552104 16.5748893,22.0774558 C16.5694641,22.0782652 16.5694641,22.0782652 16.5640392,22.0790744 C15.7446783,22.2013005 15.1795389,22.9646076 15.301765,23.7839685 C15.4239911,24.6033294 16.1872982,25.1684688 17.0066591,25.0462427 L17.0066591,25.0462427 Z M24.8225082,24.4201748 C24.8297684,24.4189565 24.8297684,24.4189565 24.8370287,24.4177379 C25.922252,24.2355961 26.6543441,23.2081937 26.4722023,22.1229704 C26.2900605,21.0377471 25.2626581,20.3056549 24.1774348,20.4877968 C24.1702314,20.4890057 24.1702314,20.4890057 24.1630282,20.4902145 C23.0777996,20.6723249 22.3456777,21.6997061 22.5277881,22.7849347 C22.7098984,23.8701632 23.7372796,24.6022852 24.8225082,24.4201748 L24.8225082,24.4201748 Z M32.4725265,23.0495399 C33.5532108,22.8422598 34.2612448,21.7981584 34.0539647,20.7174741 C33.8466846,19.6367898 32.8025831,18.9287558 31.7218988,19.1360359 C31.714724,19.137412 31.714724,19.137412 31.7075489,19.138788 C30.6268567,19.3460265 29.9187826,20.3901008 30.1260211,21.470793 C30.3332597,22.5514853 31.3773339,23.2595594 32.4580262,23.0523208 C32.4652764,23.0509305 32.4652764,23.0509305 32.4725265,23.0495399 Z M39.8723866,20.8947688 C39.877909,20.8935242 39.877909,20.8935242 39.883431,20.8922795 C40.6915794,20.7101047 41.1990314,19.9072892 41.0168566,19.0991407 C40.8346818,18.2909923 40.0318663,17.7835404 39.2237179,17.9657152 C39.218279,17.9669412 39.218279,17.9669412 39.2128397,17.968167 C38.404681,18.1502959 37.8971834,18.9530826 38.0793123,19.7612414 C38.2614411,20.5694001 39.0642279,21.0768976 39.8723866,20.8947688 L39.8723866,20.8947688 Z M47.4276119,19.0082845 C47.4329844,19.0067855 47.4329844,19.0067855 47.4383562,19.0052864 C48.2362896,18.7825897 48.7026111,17.9552056 48.4799145,17.1572722 C48.2572179,16.3593388 47.4298337,15.8930173 46.6319003,16.1157139 C46.6266645,16.1171751 46.6266645,16.1171751 46.6214279,16.1186362 C45.8234736,16.3412577 45.3570742,17.168598 45.5796958,17.9665523 C45.8023173,18.7645067 46.6296576,19.230906 47.4276119,19.0082845 L47.4276119,19.0082845 Z M54.4138099,15.7087505 C54.4167849,15.7075945 54.4167849,15.7075945 54.419759,15.7064383 C54.8556933,15.5369585 55.0716973,15.0461733 54.9022174,14.610239 C54.7327376,14.1743047 54.2419524,13.9583007 53.8060181,14.1277806 C53.8032127,14.1288712 53.8032127,14.1288712 53.8004064,14.1299616 C53.3644359,14.2993484 53.148327,14.7900873 53.3177137,15.2260578 C53.4871005,15.6620283 53.9778394,15.8781372 54.4138099,15.7087505 L54.4138099,15.7087505 Z" id="Shape"></path> + </g> + </g> + </g> +</svg> diff --git a/scala3doc/dotty-docs/docs/images/explicit-nulls/explicit-nulls-type-hierarchy.png b/scala3doc/dotty-docs/docs/images/explicit-nulls/explicit-nulls-type-hierarchy.png new file mode 100644 index 000000000000..65179260c246 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/explicit-nulls/explicit-nulls-type-hierarchy.png differ diff --git a/scala3doc/dotty-docs/docs/images/favicon.png b/scala3doc/dotty-docs/docs/images/favicon.png new file mode 100644 index 000000000000..ecd0e98fb123 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/favicon.png differ diff --git a/scala3doc/dotty-docs/docs/images/felix.jpg b/scala3doc/dotty-docs/docs/images/felix.jpg new file mode 100644 index 000000000000..a2ecfe933100 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/felix.jpg differ diff --git a/scala3doc/dotty-docs/docs/images/fengyun.jpg b/scala3doc/dotty-docs/docs/images/fengyun.jpg new file mode 100644 index 000000000000..ed5de3159957 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/fengyun.jpg differ diff --git a/scala3doc/dotty-docs/docs/images/github-logo.svg b/scala3doc/dotty-docs/docs/images/github-logo.svg new file mode 100644 index 000000000000..aea183a803eb --- /dev/null +++ b/scala3doc/dotty-docs/docs/images/github-logo.svg @@ -0,0 +1,3 @@ +<svg height="1024" width="1024" xmlns="http://www.w3.org/2000/svg"> + <path fill="#ffffff" d="M512 0C229.25 0 0 229.25 0 512c0 226.25 146.688 418.125 350.156 485.812 25.594 4.688 34.938-11.125 34.938-24.625 0-12.188-0.469-52.562-0.719-95.312C242 908.812 211.906 817.5 211.906 817.5c-23.312-59.125-56.844-74.875-56.844-74.875-46.531-31.75 3.53-31.125 3.53-31.125 51.406 3.562 78.47 52.75 78.47 52.75 45.688 78.25 119.875 55.625 149 42.5 4.654-33 17.904-55.625 32.5-68.375C304.906 725.438 185.344 681.5 185.344 485.312c0-55.938 19.969-101.562 52.656-137.406-5.219-13-22.844-65.094 5.062-135.562 0 0 42.938-13.75 140.812 52.5 40.812-11.406 84.594-17.031 128.125-17.219 43.5 0.188 87.312 5.875 128.188 17.281 97.688-66.312 140.688-52.5 140.688-52.5 28 70.531 10.375 122.562 5.125 135.5 32.812 35.844 52.625 81.469 52.625 137.406 0 196.688-119.75 240-233.812 252.688 18.438 15.875 34.75 47 34.75 94.75 0 68.438-0.688 123.625-0.688 140.5 0 13.625 9.312 29.562 35.25 24.562C877.438 930 1024 738.125 1024 512 1024 229.25 794.75 0 512 0z" /> +</svg> diff --git a/scala3doc/dotty-docs/docs/images/martin.jpg b/scala3doc/dotty-docs/docs/images/martin.jpg new file mode 100644 index 000000000000..88fcde550d02 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/martin.jpg differ diff --git a/scala3doc/dotty-docs/docs/images/nico.jpg b/scala3doc/dotty-docs/docs/images/nico.jpg new file mode 100644 index 000000000000..aa890470841d Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/nico.jpg differ diff --git a/scala3doc/dotty-docs/docs/images/olivier.jpg b/scala3doc/dotty-docs/docs/images/olivier.jpg new file mode 100644 index 000000000000..62f899d16d42 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/olivier.jpg differ diff --git a/scala3doc/dotty-docs/docs/images/others/scala-days-logo.png b/scala3doc/dotty-docs/docs/images/others/scala-days-logo.png new file mode 100644 index 000000000000..46b1d32abcf4 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/others/scala-days-logo.png differ diff --git a/scala3doc/dotty-docs/docs/images/petrashko.jpg b/scala3doc/dotty-docs/docs/images/petrashko.jpg new file mode 100644 index 000000000000..dc402cd61b21 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/petrashko.jpg differ diff --git a/scala3doc/dotty-docs/docs/images/preview.png b/scala3doc/dotty-docs/docs/images/preview.png new file mode 100644 index 000000000000..695aab44736e Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/preview.png differ diff --git a/scala3doc/dotty-docs/docs/images/scala-logo copy.svg b/scala3doc/dotty-docs/docs/images/scala-logo copy.svg new file mode 100644 index 000000000000..6eb7cb4523f6 --- /dev/null +++ b/scala3doc/dotty-docs/docs/images/scala-logo copy.svg @@ -0,0 +1,13 @@ +<svg width="64px" height="109px" viewBox="0 0 64 109" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> + <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd"> + <g id="logo-background" transform="translate(0.000000, 16.000000)" fill="#7F0C1D"> + <path d="M0.5,59.5 C0.5,59.5 63.4,65.8 63.4,76.3 L63.4,51.1 C63.4,51.1 63.4,40.6 0.5,34.3 L0.5,59.5 L0.5,59.5 Z" id="logo-background-bottom"></path> + <path d="M0.5,25.9 C0.5,25.9 63.4,32.2 63.4,42.7 L63.4,17.5 C63.4,17.5 63.4,7 0.5,0.7 L0.5,10.5 L0.5,25.9 L0.5,25.9 Z" id="logo-background-top"></path> + </g> + <g id="logo-foreground" fill="#F11205"> + <path d="M0.5,109 L0.5,83.8 C0.5,83.8 63.4,77.5 63.4,67 L63.4,92.2 C63.5,92.3 63.5,102.7 0.5,109" id="Logo_Foreground_Bottom"></path> + <path d="M0.5,50.3 C0.5,50.3 63.4,44 63.4,33.5 L63.4,58.7 C63.4,58.7 63.4,69.2 0.5,75.5 L0.5,50.3 L0.5,50.3 Z" id="Logo_Foreground_Middle"></path> + <path d="M63.5,0 L63.5,25.2 C63.5,25.2 63.5,35.7 0.6,42 L0.6,16.7 C0.5,16.7 63.5,10.5 63.5,0" id="logo-foreground-top"></path> + </g> + </g> +</svg> diff --git a/scala3doc/dotty-docs/docs/images/scala-logo-white.svg b/scala3doc/dotty-docs/docs/images/scala-logo-white.svg new file mode 100644 index 000000000000..be4e80b10768 --- /dev/null +++ b/scala3doc/dotty-docs/docs/images/scala-logo-white.svg @@ -0,0 +1,13 @@ +<svg width="64px" height="109px" viewBox="0 0 64 109" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> + <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd"> + <g id="logo-background" transform="translate(0.000000, 16.000000)" fill="#CCC"> + <path d="M0.5,59.5 C0.5,59.5 63.4,65.8 63.4,76.3 L63.4,51.1 C63.4,51.1 63.4,40.6 0.5,34.3 L0.5,59.5 L0.5,59.5 Z" id="logo-background-bottom"></path> + <path d="M0.5,25.9 C0.5,25.9 63.4,32.2 63.4,42.7 L63.4,17.5 C63.4,17.5 63.4,7 0.5,0.7 L0.5,10.5 L0.5,25.9 L0.5,25.9 Z" id="logo-background-top"></path> + </g> + <g id="logo-foreground" fill="#FFF"> + <path d="M0.5,109 L0.5,83.8 C0.5,83.8 63.4,77.5 63.4,67 L63.4,92.2 C63.5,92.3 63.5,102.7 0.5,109" id="Logo_Foreground_Bottom"></path> + <path d="M0.5,50.3 C0.5,50.3 63.4,44 63.4,33.5 L63.4,58.7 C63.4,58.7 63.4,69.2 0.5,75.5 L0.5,50.3 L0.5,50.3 Z" id="Logo_Foreground_Middle"></path> + <path d="M63.5,0 L63.5,25.2 C63.5,25.2 63.5,35.7 0.6,42 L0.6,16.7 C0.5,16.7 63.5,10.5 63.5,0" id="logo-foreground-top"></path> + </g> + </g> +</svg> diff --git a/scala3doc/dotty-docs/docs/images/scala-logo.svg b/scala3doc/dotty-docs/docs/images/scala-logo.svg new file mode 100644 index 000000000000..1fb642c8bfa0 --- /dev/null +++ b/scala3doc/dotty-docs/docs/images/scala-logo.svg @@ -0,0 +1,30 @@ +<svg width="64px" height="109px" viewBox="0 0 64 109" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> + <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd"> + <g id="logo-background" transform="translate(0.000000, 16.000000)" fill="#ccc"> + <path d="M0.5,59.5 C0.5,59.5 63.4,65.8 63.4,76.3 L63.4,51.1 C63.4,51.1 63.4,40.6 0.5,34.3 L0.5,59.5 L0.5,59.5 Z" id="logo-background-bottom"></path> + <path d="M0.5,25.9 C0.5,25.9 63.4,32.2 63.4,42.7 L63.4,17.5 C63.4,17.5 63.4,7 0.5,0.7 L0.5,10.5 L0.5,25.9 L0.5,25.9 Z" id="logo-background-top"></path> + </g> + <g id="logo-foreground" fill="#ffffff"> + <path d="M0.5,109 L0.5,83.8 C0.5,83.8 63.4,77.5 63.4,67 L63.4,92.2 C63.5,92.3 63.5,102.7 0.5,109" id="Logo_Foreground_Bottom"></path> + <path d="M0.5,50.3 C0.5,50.3 63.4,44 63.4,33.5 L63.4,58.7 C63.4,58.7 63.4,69.2 0.5,75.5 L0.5,50.3 L0.5,50.3 Z" id="Logo_Foreground_Middle"></path> + <path d="M63.5,0 L63.5,25.2 C63.5,25.2 63.5,35.7 0.6,42 L0.6,16.7 C0.5,16.7 63.5,10.5 63.5,0" id="logo-foreground-top"></path> + </g> + <g id="dots" transform="translate(4.000000, 10.000000)" fill="#B6B6B6"> + <g id="bottom" transform="translate(0.000000, 67.500000)"> + <path d="M1.4471104,12.2885536 C1.45019313,12.2881864 1.45019313,12.2881864 1.45327704,12.2878189 C1.91768952,12.2324787 2.24930811,11.8111359 2.1939679,11.3467234 C2.13862768,10.8823109 1.71728488,10.5506923 1.2528724,10.6060325 C1.24980572,10.606398 1.24980572,10.606398 1.24674021,10.6067632 C0.782326598,10.6620939 0.450699376,11.0834299 0.506030077,11.5478435 C0.561360778,12.0122571 0.982696791,12.3438844 1.4471104,12.2885536 L1.4471104,12.2885536 Z M9.34858965,12.1304907 C9.35409106,12.129756 9.35409106,12.129756 9.35959333,12.129021 C10.1807278,12.019341 10.7574756,11.2647668 10.6477955,10.4436323 C10.5381155,9.62249789 9.78354128,9.04575011 8.96240683,9.15543014 C8.95693539,9.15616095 8.95693539,9.15616095 8.95146479,9.15689157 C8.13032806,9.26655457 7.55356464,10.0211168 7.66322765,10.8422536 C7.77289065,11.6633903 8.52745292,12.2401537 9.34858965,12.1304907 L9.34858965,12.1304907 Z M17.0066591,11.0493128 C17.0121188,11.0484984 17.0121188,11.0484984 17.0175789,11.0476838 C17.8369369,10.9254384 18.4020584,10.1621181 18.279813,9.34276003 C18.1575677,8.52340201 17.3942473,7.95828052 16.5748893,8.08052587 C16.5694641,8.08133528 16.5694641,8.08133528 16.5640392,8.08214454 C15.7446783,8.20437064 15.1795389,8.96767771 15.301765,9.78703861 C15.4239911,10.6063995 16.1872982,11.1715389 17.0066591,11.0493128 L17.0066591,11.0493128 Z M24.8225082,10.4232459 C24.8297684,10.4220275 24.8297684,10.4220275 24.8370287,10.420809 C25.922252,10.2386672 26.6543441,9.21126476 26.4722023,8.12604147 C26.2900605,7.04081818 25.2626581,6.30872601 24.1774348,6.49086783 C24.1702314,6.4920768 24.1702314,6.4920768 24.1630282,6.49328559 C23.0777996,6.67539597 22.3456777,7.70277717 22.5277881,8.78800574 C22.7098984,9.87323431 23.7372796,10.6053563 24.8225082,10.4232459 L24.8225082,10.4232459 Z M32.4725265,9.05261002 C33.5532108,8.84532993 34.2612448,7.80122849 34.0539647,6.72054419 C33.8466846,5.63985989 32.8025831,4.93182589 31.7218988,5.13910599 C31.714724,5.14048211 31.714724,5.14048211 31.7075489,5.14185807 C30.6268567,5.34909665 29.9187826,6.39317088 30.1260211,7.47386314 C30.3332597,8.5545554 31.3773339,9.26262952 32.4580262,9.05539095 C32.4652764,9.05400057 32.4652764,9.05400057 32.4725265,9.05261002 Z M39.8723866,6.89476879 C39.877909,6.8935242 39.877909,6.8935242 39.883431,6.89227947 C40.6915794,6.71010471 41.1990314,5.90728916 41.0168566,5.09914075 C40.8346818,4.29099233 40.0318663,3.78354042 39.2237179,3.96571517 C39.218279,3.96694116 39.218279,3.96694116 39.2128397,3.96816703 C38.404681,4.15029588 37.8971834,4.9530826 38.0793123,5.76124136 C38.2614411,6.56940012 39.0642279,7.07689764 39.8723866,6.89476879 L39.8723866,6.89476879 Z M47.4276119,5.00828445 C47.4329844,5.00678549 47.4329844,5.00678549 47.4383562,5.00528637 C48.2362896,4.78258973 48.7026111,3.95520561 48.4799145,3.15727221 C48.2572179,2.35933881 47.4298337,1.89301728 46.6319003,2.11571391 C46.6266645,2.1171751 46.6266645,2.1171751 46.6214279,2.11863616 C45.8234736,2.34125773 45.3570742,3.16859798 45.5796958,3.96655233 C45.8023173,4.76450667 46.6296576,5.23090603 47.4276119,5.00828445 L47.4276119,5.00828445 Z M54.419759,2.30643871 C54.8556933,2.13695884 55.0716973,1.6461737 54.9022174,1.21023941 C54.7327376,0.774305114 54.2419524,0.558301127 53.8060181,0.727780997 C53.8032127,0.728871549 53.8032127,0.728871549 53.8004064,0.729962021 C53.3644359,0.89934874 53.148327,1.39008772 53.3177137,1.82605822 C53.4871005,2.26202871 53.9778394,2.47813756 54.4138099,2.30875084 C54.4167849,2.30759485 54.4167849,2.30759485 54.419759,2.30643871 Z" id="Shape"></path> + <path d="M1.67760355,20.096503 C1.68306395,20.0958524 1.68306395,20.0958524 1.68852642,20.0952015 C2.51113381,19.9971782 3.09852524,19.2508595 3.00050189,18.4282521 C2.90247854,17.6056447 2.15615986,17.0182533 1.33355246,17.1162767 C1.3281205,17.1169239 1.3281205,17.1169239 1.3226906,17.1175709 C0.500081196,17.2155774 -0.0873255124,17.961884 0.0106809923,18.7844934 C0.108687497,19.6071028 0.854994145,20.1945095 1.67760355,20.096503 L1.67760355,20.096503 Z M9.34858965,19.1274206 C9.35409106,19.1266859 9.35409106,19.1266859 9.35959333,19.1259509 C10.1807278,19.0162709 10.7574756,18.2616967 10.6477955,17.4405622 C10.5381155,16.6194278 9.78354128,16.04268 8.96240683,16.15236 C8.95693539,16.1530908 8.95693539,16.1530908 8.95146479,16.1538215 C8.13032806,16.2634845 7.55356464,17.0180467 7.66322765,17.8391835 C7.77289065,18.6603202 8.52745292,19.2370836 9.34858965,19.1274206 L9.34858965,19.1274206 Z M17.1767435,18.6256231 C17.1839958,18.6245412 17.1839958,18.6245412 17.1912486,18.6234592 C18.27963,18.4610765 19.0303002,17.4471319 18.8679175,16.3587506 C18.7055348,15.2703693 17.6915903,14.519699 16.6032089,14.6820817 C16.5960024,14.6831569 16.5960024,14.6831569 16.5887964,14.6842319 C15.5004112,14.846589 14.7497172,15.8605159 14.9120743,16.948901 C15.0744314,18.0372862 16.0883584,18.7879802 17.1767435,18.6256231 L17.1767435,18.6256231 Z M24.8370287,17.4177379 C25.922252,17.2355961 26.6543441,16.2081937 26.4722023,15.1229704 C26.2900605,14.0377471 25.2626581,13.3056549 24.1774348,13.4877968 C24.1702314,13.4890057 24.1702314,13.4890057 24.1630282,13.4902145 C23.0777996,13.6723249 22.3456777,14.6997061 22.5277881,15.7849347 C22.7098984,16.8701632 23.7372796,17.6022852 24.8225082,17.4201748 C24.8297684,17.4189565 24.8297684,17.4189565 24.8370287,17.4177379 Z M32.4725265,16.061085 C33.5532108,15.853199 34.2612448,14.8060455 34.0539647,13.7222022 C33.8466846,12.6383589 32.8025831,11.9282552 31.7218988,12.1361412 C31.714724,12.1375214 31.714724,12.1375214 31.7075489,12.1389013 C30.6268567,12.3467457 29.9187826,13.3938719 30.1260211,14.4777232 C30.3332597,15.5615745 31.3773339,16.2717185 32.4580262,16.0638741 C32.4652764,16.0624797 32.4652764,16.0624797 32.4725265,16.061085 Z M40.0707225,14.4695476 C40.0780573,14.4678946 40.0780573,14.4678946 40.0853916,14.4662413 C41.158768,14.2242783 41.8327617,13.1579849 41.5907986,12.0846085 C41.3488355,11.011232 40.2825422,10.3372384 39.2091657,10.5792015 C39.2019419,10.5808298 39.2019419,10.5808298 39.1947175,10.582458 C38.1213273,10.8243601 37.447273,11.8906152 37.6891752,12.9640053 C37.9310773,14.0373955 38.9973324,14.7114498 40.0707225,14.4695476 L40.0707225,14.4695476 Z M47.4276119,12.0082845 C47.4329844,12.0067855 47.4329844,12.0067855 47.4383562,12.0052864 C48.2362896,11.7825897 48.7026111,10.9552056 48.4799145,10.1572722 C48.2572179,9.35933881 47.4298337,8.89301728 46.6319003,9.11571391 C46.6266645,9.1171751 46.6266645,9.1171751 46.6214279,9.11863616 C45.8234736,9.34125773 45.3570742,10.168598 45.5796958,10.9665523 C45.8023173,11.7645067 46.6296576,12.230906 47.4276119,12.0082845 L47.4276119,12.0082845 Z M54.8999721,9.57562965 C54.9052414,9.57358217 54.9052414,9.57358217 54.9105092,9.57153441 C55.6826371,9.27135123 56.0652239,8.40207131 55.7650408,7.62994336 C55.4648576,6.85781542 54.5955777,6.4752286 53.8234497,6.77541179 C53.8184808,6.77734338 53.8184808,6.77734338 53.8135101,6.77927482 C53.0413181,7.07929302 52.6585455,7.94849117 52.9585637,8.72068323 C53.2585819,9.4928753 54.12778,9.87564785 54.8999721,9.57562965 L54.8999721,9.57562965 Z" id="Shape"></path> + <path d="M1.45327704,26.6978168 C1.54647464,26.6867112 1.63432439,26.660866 1.7147722,26.6228911 C2.03520341,26.4716332 2.23820252,26.1279362 2.1939679,25.7567213 C2.13862768,25.2923089 1.71728488,24.9606903 1.2528724,25.0160305 C1.24980572,25.0163959 1.24980572,25.0163959 1.24674021,25.0167611 C0.782326598,25.0720918 0.450699376,25.4934278 0.506030077,25.9578415 C0.561360778,26.4222551 0.982696791,26.7538823 1.4471104,26.6985516 C1.45019313,26.6981843 1.45019313,26.6981843 1.45327704,26.6978168 Z M9.34858965,26.1274206 C9.35409106,26.1266859 9.35409106,26.1266859 9.35959333,26.1259509 C10.1807278,26.0162709 10.7574756,25.2616967 10.6477955,24.4405622 C10.5381155,23.6194278 9.78354128,23.04268 8.96240683,23.15236 C8.95693539,23.1530908 8.95693539,23.1530908 8.95146479,23.1538215 C8.13032806,23.2634845 7.55356464,24.0180467 7.66322765,24.8391835 C7.77289065,25.6603202 8.52745292,26.2370836 9.34858965,26.1274206 L9.34858965,26.1274206 Z M17.0066591,25.0462427 C17.0121188,25.0454283 17.0121188,25.0454283 17.0175789,25.0446136 C17.8369369,24.9223683 18.4020584,24.1590479 18.279813,23.3396899 C18.1575677,22.5203319 17.3942473,21.9552104 16.5748893,22.0774558 C16.5694641,22.0782652 16.5694641,22.0782652 16.5640392,22.0790744 C15.7446783,22.2013005 15.1795389,22.9646076 15.301765,23.7839685 C15.4239911,24.6033294 16.1872982,25.1684688 17.0066591,25.0462427 L17.0066591,25.0462427 Z M24.8225082,24.4201748 C24.8297684,24.4189565 24.8297684,24.4189565 24.8370287,24.4177379 C25.922252,24.2355961 26.6543441,23.2081937 26.4722023,22.1229704 C26.2900605,21.0377471 25.2626581,20.3056549 24.1774348,20.4877968 C24.1702314,20.4890057 24.1702314,20.4890057 24.1630282,20.4902145 C23.0777996,20.6723249 22.3456777,21.6997061 22.5277881,22.7849347 C22.7098984,23.8701632 23.7372796,24.6022852 24.8225082,24.4201748 L24.8225082,24.4201748 Z M32.4725265,23.0495399 C33.5532108,22.8422598 34.2612448,21.7981584 34.0539647,20.7174741 C33.8466846,19.6367898 32.8025831,18.9287558 31.7218988,19.1360359 C31.714724,19.137412 31.714724,19.137412 31.7075489,19.138788 C30.6268567,19.3460265 29.9187826,20.3901008 30.1260211,21.470793 C30.3332597,22.5514853 31.3773339,23.2595594 32.4580262,23.0523208 C32.4652764,23.0509305 32.4652764,23.0509305 32.4725265,23.0495399 Z M39.8723866,20.8947688 C39.877909,20.8935242 39.877909,20.8935242 39.883431,20.8922795 C40.6915794,20.7101047 41.1990314,19.9072892 41.0168566,19.0991407 C40.8346818,18.2909923 40.0318663,17.7835404 39.2237179,17.9657152 C39.218279,17.9669412 39.218279,17.9669412 39.2128397,17.968167 C38.404681,18.1502959 37.8971834,18.9530826 38.0793123,19.7612414 C38.2614411,20.5694001 39.0642279,21.0768976 39.8723866,20.8947688 L39.8723866,20.8947688 Z M47.4276119,19.0082845 C47.4329844,19.0067855 47.4329844,19.0067855 47.4383562,19.0052864 C48.2362896,18.7825897 48.7026111,17.9552056 48.4799145,17.1572722 C48.2572179,16.3593388 47.4298337,15.8930173 46.6319003,16.1157139 C46.6266645,16.1171751 46.6266645,16.1171751 46.6214279,16.1186362 C45.8234736,16.3412577 45.3570742,17.168598 45.5796958,17.9665523 C45.8023173,18.7645067 46.6296576,19.230906 47.4276119,19.0082845 L47.4276119,19.0082845 Z M54.4138099,15.7087505 C54.4167849,15.7075945 54.4167849,15.7075945 54.419759,15.7064383 C54.8556933,15.5369585 55.0716973,15.0461733 54.9022174,14.610239 C54.7327376,14.1743047 54.2419524,13.9583007 53.8060181,14.1277806 C53.8032127,14.1288712 53.8032127,14.1288712 53.8004064,14.1299616 C53.3644359,14.2993484 53.148327,14.7900873 53.3177137,15.2260578 C53.4871005,15.6620283 53.9778394,15.8781372 54.4138099,15.7087505 L54.4138099,15.7087505 Z" id="Shape"></path> + </g> + <g id="middle" transform="translate(0.000000, 33.900002)"> + <path d="M1.4471104,12.2885536 C1.45019313,12.2881864 1.45019313,12.2881864 1.45327704,12.2878189 C1.91768952,12.2324787 2.24930811,11.8111359 2.1939679,11.3467234 C2.13862768,10.8823109 1.71728488,10.5506923 1.2528724,10.6060325 C1.24980572,10.606398 1.24980572,10.606398 1.24674021,10.6067632 C0.782326598,10.6620939 0.450699376,11.0834299 0.506030077,11.5478435 C0.561360778,12.0122571 0.982696791,12.3438844 1.4471104,12.2885536 L1.4471104,12.2885536 Z M9.34858965,12.1304907 C9.35409106,12.129756 9.35409106,12.129756 9.35959333,12.129021 C10.1807278,12.019341 10.7574756,11.2647668 10.6477955,10.4436323 C10.5381155,9.62249789 9.78354128,9.04575011 8.96240683,9.15543014 C8.95693539,9.15616095 8.95693539,9.15616095 8.95146479,9.15689157 C8.13032806,9.26655457 7.55356464,10.0211168 7.66322765,10.8422536 C7.77289065,11.6633903 8.52745292,12.2401537 9.34858965,12.1304907 L9.34858965,12.1304907 Z M17.0066591,11.0493128 C17.0121188,11.0484984 17.0121188,11.0484984 17.0175789,11.0476838 C17.8369369,10.9254384 18.4020584,10.1621181 18.279813,9.34276003 C18.1575677,8.52340201 17.3942473,7.95828052 16.5748893,8.08052587 C16.5694641,8.08133528 16.5694641,8.08133528 16.5640392,8.08214454 C15.7446783,8.20437064 15.1795389,8.96767771 15.301765,9.78703861 C15.4239911,10.6063995 16.1872982,11.1715389 17.0066591,11.0493128 L17.0066591,11.0493128 Z M24.8225082,10.4232459 C24.8297684,10.4220275 24.8297684,10.4220275 24.8370287,10.420809 C25.922252,10.2386672 26.6543441,9.21126476 26.4722023,8.12604147 C26.2900605,7.04081818 25.2626581,6.30872601 24.1774348,6.49086783 C24.1702314,6.4920768 24.1702314,6.4920768 24.1630282,6.49328559 C23.0777996,6.67539597 22.3456777,7.70277717 22.5277881,8.78800574 C22.7098984,9.87323431 23.7372796,10.6053563 24.8225082,10.4232459 L24.8225082,10.4232459 Z M32.4725265,9.05261002 C33.5532108,8.84532993 34.2612448,7.80122849 34.0539647,6.72054419 C33.8466846,5.63985989 32.8025831,4.93182589 31.7218988,5.13910599 C31.714724,5.14048211 31.714724,5.14048211 31.7075489,5.14185807 C30.6268567,5.34909665 29.9187826,6.39317088 30.1260211,7.47386314 C30.3332597,8.5545554 31.3773339,9.26262952 32.4580262,9.05539095 C32.4652764,9.05400057 32.4652764,9.05400057 32.4725265,9.05261002 Z M39.8723866,6.89476879 C39.877909,6.8935242 39.877909,6.8935242 39.883431,6.89227947 C40.6915794,6.71010471 41.1990314,5.90728916 41.0168566,5.09914075 C40.8346818,4.29099233 40.0318663,3.78354042 39.2237179,3.96571517 C39.218279,3.96694116 39.218279,3.96694116 39.2128397,3.96816703 C38.404681,4.15029588 37.8971834,4.9530826 38.0793123,5.76124136 C38.2614411,6.56940012 39.0642279,7.07689764 39.8723866,6.89476879 L39.8723866,6.89476879 Z M47.4276119,5.00828445 C47.4329844,5.00678549 47.4329844,5.00678549 47.4383562,5.00528637 C48.2362896,4.78258973 48.7026111,3.95520561 48.4799145,3.15727221 C48.2572179,2.35933881 47.4298337,1.89301728 46.6319003,2.11571391 C46.6266645,2.1171751 46.6266645,2.1171751 46.6214279,2.11863616 C45.8234736,2.34125773 45.3570742,3.16859798 45.5796958,3.96655233 C45.8023173,4.76450667 46.6296576,5.23090603 47.4276119,5.00828445 L47.4276119,5.00828445 Z M54.419759,2.30643871 C54.8556933,2.13695884 55.0716973,1.6461737 54.9022174,1.21023941 C54.7327376,0.774305114 54.2419524,0.558301127 53.8060181,0.727780997 C53.8032127,0.728871549 53.8032127,0.728871549 53.8004064,0.729962021 C53.3644359,0.89934874 53.148327,1.39008772 53.3177137,1.82605822 C53.4871005,2.26202871 53.9778394,2.47813756 54.4138099,2.30875084 C54.4167849,2.30759485 54.4167849,2.30759485 54.419759,2.30643871 Z" id="Shape"></path> + <path d="M1.67760355,20.096503 C1.68306395,20.0958524 1.68306395,20.0958524 1.68852642,20.0952015 C2.51113381,19.9971782 3.09852524,19.2508595 3.00050189,18.4282521 C2.90247854,17.6056447 2.15615986,17.0182533 1.33355246,17.1162767 C1.3281205,17.1169239 1.3281205,17.1169239 1.3226906,17.1175709 C0.500081196,17.2155774 -0.0873255124,17.961884 0.0106809923,18.7844934 C0.108687497,19.6071028 0.854994145,20.1945095 1.67760355,20.096503 L1.67760355,20.096503 Z M9.34858965,19.1274206 C9.35409106,19.1266859 9.35409106,19.1266859 9.35959333,19.1259509 C10.1807278,19.0162709 10.7574756,18.2616967 10.6477955,17.4405622 C10.5381155,16.6194278 9.78354128,16.04268 8.96240683,16.15236 C8.95693539,16.1530908 8.95693539,16.1530908 8.95146479,16.1538215 C8.13032806,16.2634845 7.55356464,17.0180467 7.66322765,17.8391835 C7.77289065,18.6603202 8.52745292,19.2370836 9.34858965,19.1274206 L9.34858965,19.1274206 Z M17.1767435,18.6256231 C17.1839958,18.6245412 17.1839958,18.6245412 17.1912486,18.6234592 C18.27963,18.4610765 19.0303002,17.4471319 18.8679175,16.3587506 C18.7055348,15.2703693 17.6915903,14.519699 16.6032089,14.6820817 C16.5960024,14.6831569 16.5960024,14.6831569 16.5887964,14.6842319 C15.5004112,14.846589 14.7497172,15.8605159 14.9120743,16.948901 C15.0744314,18.0372862 16.0883584,18.7879802 17.1767435,18.6256231 L17.1767435,18.6256231 Z M24.8370287,17.4177379 C25.922252,17.2355961 26.6543441,16.2081937 26.4722023,15.1229704 C26.2900605,14.0377471 25.2626581,13.3056549 24.1774348,13.4877968 C24.1702314,13.4890057 24.1702314,13.4890057 24.1630282,13.4902145 C23.0777996,13.6723249 22.3456777,14.6997061 22.5277881,15.7849347 C22.7098984,16.8701632 23.7372796,17.6022852 24.8225082,17.4201748 C24.8297684,17.4189565 24.8297684,17.4189565 24.8370287,17.4177379 Z M32.4725265,16.061085 C33.5532108,15.853199 34.2612448,14.8060455 34.0539647,13.7222022 C33.8466846,12.6383589 32.8025831,11.9282552 31.7218988,12.1361412 C31.714724,12.1375214 31.714724,12.1375214 31.7075489,12.1389013 C30.6268567,12.3467457 29.9187826,13.3938719 30.1260211,14.4777232 C30.3332597,15.5615745 31.3773339,16.2717185 32.4580262,16.0638741 C32.4652764,16.0624797 32.4652764,16.0624797 32.4725265,16.061085 Z M40.0707225,14.4695476 C40.0780573,14.4678946 40.0780573,14.4678946 40.0853916,14.4662413 C41.158768,14.2242783 41.8327617,13.1579849 41.5907986,12.0846085 C41.3488355,11.011232 40.2825422,10.3372384 39.2091657,10.5792015 C39.2019419,10.5808298 39.2019419,10.5808298 39.1947175,10.582458 C38.1213273,10.8243601 37.447273,11.8906152 37.6891752,12.9640053 C37.9310773,14.0373955 38.9973324,14.7114498 40.0707225,14.4695476 L40.0707225,14.4695476 Z M47.4276119,12.0082845 C47.4329844,12.0067855 47.4329844,12.0067855 47.4383562,12.0052864 C48.2362896,11.7825897 48.7026111,10.9552056 48.4799145,10.1572722 C48.2572179,9.35933881 47.4298337,8.89301728 46.6319003,9.11571391 C46.6266645,9.1171751 46.6266645,9.1171751 46.6214279,9.11863616 C45.8234736,9.34125773 45.3570742,10.168598 45.5796958,10.9665523 C45.8023173,11.7645067 46.6296576,12.230906 47.4276119,12.0082845 L47.4276119,12.0082845 Z M54.8999721,9.57562965 C54.9052414,9.57358217 54.9052414,9.57358217 54.9105092,9.57153441 C55.6826371,9.27135123 56.0652239,8.40207131 55.7650408,7.62994336 C55.4648576,6.85781542 54.5955777,6.4752286 53.8234497,6.77541179 C53.8184808,6.77734338 53.8184808,6.77734338 53.8135101,6.77927482 C53.0413181,7.07929302 52.6585455,7.94849117 52.9585637,8.72068323 C53.2585819,9.4928753 54.12778,9.87564785 54.8999721,9.57562965 L54.8999721,9.57562965 Z" id="Shape"></path> + <path d="M1.45327704,26.6978168 C1.54647464,26.6867112 1.63432439,26.660866 1.7147722,26.6228911 C2.03520341,26.4716332 2.23820252,26.1279362 2.1939679,25.7567213 C2.13862768,25.2923089 1.71728488,24.9606903 1.2528724,25.0160305 C1.24980572,25.0163959 1.24980572,25.0163959 1.24674021,25.0167611 C0.782326598,25.0720918 0.450699376,25.4934278 0.506030077,25.9578415 C0.561360778,26.4222551 0.982696791,26.7538823 1.4471104,26.6985516 C1.45019313,26.6981843 1.45019313,26.6981843 1.45327704,26.6978168 Z M9.34858965,26.1274206 C9.35409106,26.1266859 9.35409106,26.1266859 9.35959333,26.1259509 C10.1807278,26.0162709 10.7574756,25.2616967 10.6477955,24.4405622 C10.5381155,23.6194278 9.78354128,23.04268 8.96240683,23.15236 C8.95693539,23.1530908 8.95693539,23.1530908 8.95146479,23.1538215 C8.13032806,23.2634845 7.55356464,24.0180467 7.66322765,24.8391835 C7.77289065,25.6603202 8.52745292,26.2370836 9.34858965,26.1274206 L9.34858965,26.1274206 Z M17.0066591,25.0462427 C17.0121188,25.0454283 17.0121188,25.0454283 17.0175789,25.0446136 C17.8369369,24.9223683 18.4020584,24.1590479 18.279813,23.3396899 C18.1575677,22.5203319 17.3942473,21.9552104 16.5748893,22.0774558 C16.5694641,22.0782652 16.5694641,22.0782652 16.5640392,22.0790744 C15.7446783,22.2013005 15.1795389,22.9646076 15.301765,23.7839685 C15.4239911,24.6033294 16.1872982,25.1684688 17.0066591,25.0462427 L17.0066591,25.0462427 Z M24.8225082,24.4201748 C24.8297684,24.4189565 24.8297684,24.4189565 24.8370287,24.4177379 C25.922252,24.2355961 26.6543441,23.2081937 26.4722023,22.1229704 C26.2900605,21.0377471 25.2626581,20.3056549 24.1774348,20.4877968 C24.1702314,20.4890057 24.1702314,20.4890057 24.1630282,20.4902145 C23.0777996,20.6723249 22.3456777,21.6997061 22.5277881,22.7849347 C22.7098984,23.8701632 23.7372796,24.6022852 24.8225082,24.4201748 L24.8225082,24.4201748 Z M32.4725265,23.0495399 C33.5532108,22.8422598 34.2612448,21.7981584 34.0539647,20.7174741 C33.8466846,19.6367898 32.8025831,18.9287558 31.7218988,19.1360359 C31.714724,19.137412 31.714724,19.137412 31.7075489,19.138788 C30.6268567,19.3460265 29.9187826,20.3901008 30.1260211,21.470793 C30.3332597,22.5514853 31.3773339,23.2595594 32.4580262,23.0523208 C32.4652764,23.0509305 32.4652764,23.0509305 32.4725265,23.0495399 Z M39.8723866,20.8947688 C39.877909,20.8935242 39.877909,20.8935242 39.883431,20.8922795 C40.6915794,20.7101047 41.1990314,19.9072892 41.0168566,19.0991407 C40.8346818,18.2909923 40.0318663,17.7835404 39.2237179,17.9657152 C39.218279,17.9669412 39.218279,17.9669412 39.2128397,17.968167 C38.404681,18.1502959 37.8971834,18.9530826 38.0793123,19.7612414 C38.2614411,20.5694001 39.0642279,21.0768976 39.8723866,20.8947688 L39.8723866,20.8947688 Z M47.4276119,19.0082845 C47.4329844,19.0067855 47.4329844,19.0067855 47.4383562,19.0052864 C48.2362896,18.7825897 48.7026111,17.9552056 48.4799145,17.1572722 C48.2572179,16.3593388 47.4298337,15.8930173 46.6319003,16.1157139 C46.6266645,16.1171751 46.6266645,16.1171751 46.6214279,16.1186362 C45.8234736,16.3412577 45.3570742,17.168598 45.5796958,17.9665523 C45.8023173,18.7645067 46.6296576,19.230906 47.4276119,19.0082845 L47.4276119,19.0082845 Z M54.4138099,15.7087505 C54.4167849,15.7075945 54.4167849,15.7075945 54.419759,15.7064383 C54.8556933,15.5369585 55.0716973,15.0461733 54.9022174,14.610239 C54.7327376,14.1743047 54.2419524,13.9583007 53.8060181,14.1277806 C53.8032127,14.1288712 53.8032127,14.1288712 53.8004064,14.1299616 C53.3644359,14.2993484 53.148327,14.7900873 53.3177137,15.2260578 C53.4871005,15.6620283 53.9778394,15.8781372 54.4138099,15.7087505 L54.4138099,15.7087505 Z" id="Shape"></path> + </g> + <g id="top"> + <path d="M1.4471104,12.2885536 C1.45019313,12.2881864 1.45019313,12.2881864 1.45327704,12.2878189 C1.91768952,12.2324787 2.24930811,11.8111359 2.1939679,11.3467234 C2.13862768,10.8823109 1.71728488,10.5506923 1.2528724,10.6060325 C1.24980572,10.606398 1.24980572,10.606398 1.24674021,10.6067632 C0.782326598,10.6620939 0.450699376,11.0834299 0.506030077,11.5478435 C0.561360778,12.0122571 0.982696791,12.3438844 1.4471104,12.2885536 L1.4471104,12.2885536 Z M9.34858965,12.1304907 C9.35409106,12.129756 9.35409106,12.129756 9.35959333,12.129021 C10.1807278,12.019341 10.7574756,11.2647668 10.6477955,10.4436323 C10.5381155,9.62249789 9.78354128,9.04575011 8.96240683,9.15543014 C8.95693539,9.15616095 8.95693539,9.15616095 8.95146479,9.15689157 C8.13032806,9.26655457 7.55356464,10.0211168 7.66322765,10.8422536 C7.77289065,11.6633903 8.52745292,12.2401537 9.34858965,12.1304907 L9.34858965,12.1304907 Z M17.0066591,11.0493128 C17.0121188,11.0484984 17.0121188,11.0484984 17.0175789,11.0476838 C17.8369369,10.9254384 18.4020584,10.1621181 18.279813,9.34276003 C18.1575677,8.52340201 17.3942473,7.95828052 16.5748893,8.08052587 C16.5694641,8.08133528 16.5694641,8.08133528 16.5640392,8.08214454 C15.7446783,8.20437064 15.1795389,8.96767771 15.301765,9.78703861 C15.4239911,10.6063995 16.1872982,11.1715389 17.0066591,11.0493128 L17.0066591,11.0493128 Z M24.8225082,10.4232459 C24.8297684,10.4220275 24.8297684,10.4220275 24.8370287,10.420809 C25.922252,10.2386672 26.6543441,9.21126476 26.4722023,8.12604147 C26.2900605,7.04081818 25.2626581,6.30872601 24.1774348,6.49086783 C24.1702314,6.4920768 24.1702314,6.4920768 24.1630282,6.49328559 C23.0777996,6.67539597 22.3456777,7.70277717 22.5277881,8.78800574 C22.7098984,9.87323431 23.7372796,10.6053563 24.8225082,10.4232459 L24.8225082,10.4232459 Z M32.4725265,9.05261002 C33.5532108,8.84532993 34.2612448,7.80122849 34.0539647,6.72054419 C33.8466846,5.63985989 32.8025831,4.93182589 31.7218988,5.13910599 C31.714724,5.14048211 31.714724,5.14048211 31.7075489,5.14185807 C30.6268567,5.34909665 29.9187826,6.39317088 30.1260211,7.47386314 C30.3332597,8.5545554 31.3773339,9.26262952 32.4580262,9.05539095 C32.4652764,9.05400057 32.4652764,9.05400057 32.4725265,9.05261002 Z M39.8723866,6.89476879 C39.877909,6.8935242 39.877909,6.8935242 39.883431,6.89227947 C40.6915794,6.71010471 41.1990314,5.90728916 41.0168566,5.09914075 C40.8346818,4.29099233 40.0318663,3.78354042 39.2237179,3.96571517 C39.218279,3.96694116 39.218279,3.96694116 39.2128397,3.96816703 C38.404681,4.15029588 37.8971834,4.9530826 38.0793123,5.76124136 C38.2614411,6.56940012 39.0642279,7.07689764 39.8723866,6.89476879 L39.8723866,6.89476879 Z M47.4276119,5.00828445 C47.4329844,5.00678549 47.4329844,5.00678549 47.4383562,5.00528637 C48.2362896,4.78258973 48.7026111,3.95520561 48.4799145,3.15727221 C48.2572179,2.35933881 47.4298337,1.89301728 46.6319003,2.11571391 C46.6266645,2.1171751 46.6266645,2.1171751 46.6214279,2.11863616 C45.8234736,2.34125773 45.3570742,3.16859798 45.5796958,3.96655233 C45.8023173,4.76450667 46.6296576,5.23090603 47.4276119,5.00828445 L47.4276119,5.00828445 Z M54.419759,2.30643871 C54.8556933,2.13695884 55.0716973,1.6461737 54.9022174,1.21023941 C54.7327376,0.774305114 54.2419524,0.558301127 53.8060181,0.727780997 C53.8032127,0.728871549 53.8032127,0.728871549 53.8004064,0.729962021 C53.3644359,0.89934874 53.148327,1.39008772 53.3177137,1.82605822 C53.4871005,2.26202871 53.9778394,2.47813756 54.4138099,2.30875084 C54.4167849,2.30759485 54.4167849,2.30759485 54.419759,2.30643871 Z" id="Shape"></path> + <path d="M1.67760355,20.096503 C1.68306395,20.0958524 1.68306395,20.0958524 1.68852642,20.0952015 C2.51113381,19.9971782 3.09852524,19.2508595 3.00050189,18.4282521 C2.90247854,17.6056447 2.15615986,17.0182533 1.33355246,17.1162767 C1.3281205,17.1169239 1.3281205,17.1169239 1.3226906,17.1175709 C0.500081196,17.2155774 -0.0873255124,17.961884 0.0106809923,18.7844934 C0.108687497,19.6071028 0.854994145,20.1945095 1.67760355,20.096503 L1.67760355,20.096503 Z M9.34858965,19.1274206 C9.35409106,19.1266859 9.35409106,19.1266859 9.35959333,19.1259509 C10.1807278,19.0162709 10.7574756,18.2616967 10.6477955,17.4405622 C10.5381155,16.6194278 9.78354128,16.04268 8.96240683,16.15236 C8.95693539,16.1530908 8.95693539,16.1530908 8.95146479,16.1538215 C8.13032806,16.2634845 7.55356464,17.0180467 7.66322765,17.8391835 C7.77289065,18.6603202 8.52745292,19.2370836 9.34858965,19.1274206 L9.34858965,19.1274206 Z M17.1767435,18.6256231 C17.1839958,18.6245412 17.1839958,18.6245412 17.1912486,18.6234592 C18.27963,18.4610765 19.0303002,17.4471319 18.8679175,16.3587506 C18.7055348,15.2703693 17.6915903,14.519699 16.6032089,14.6820817 C16.5960024,14.6831569 16.5960024,14.6831569 16.5887964,14.6842319 C15.5004112,14.846589 14.7497172,15.8605159 14.9120743,16.948901 C15.0744314,18.0372862 16.0883584,18.7879802 17.1767435,18.6256231 L17.1767435,18.6256231 Z M24.8370287,17.4177379 C25.922252,17.2355961 26.6543441,16.2081937 26.4722023,15.1229704 C26.2900605,14.0377471 25.2626581,13.3056549 24.1774348,13.4877968 C24.1702314,13.4890057 24.1702314,13.4890057 24.1630282,13.4902145 C23.0777996,13.6723249 22.3456777,14.6997061 22.5277881,15.7849347 C22.7098984,16.8701632 23.7372796,17.6022852 24.8225082,17.4201748 C24.8297684,17.4189565 24.8297684,17.4189565 24.8370287,17.4177379 Z M32.4725265,16.061085 C33.5532108,15.853199 34.2612448,14.8060455 34.0539647,13.7222022 C33.8466846,12.6383589 32.8025831,11.9282552 31.7218988,12.1361412 C31.714724,12.1375214 31.714724,12.1375214 31.7075489,12.1389013 C30.6268567,12.3467457 29.9187826,13.3938719 30.1260211,14.4777232 C30.3332597,15.5615745 31.3773339,16.2717185 32.4580262,16.0638741 C32.4652764,16.0624797 32.4652764,16.0624797 32.4725265,16.061085 Z M40.0707225,14.4695476 C40.0780573,14.4678946 40.0780573,14.4678946 40.0853916,14.4662413 C41.158768,14.2242783 41.8327617,13.1579849 41.5907986,12.0846085 C41.3488355,11.011232 40.2825422,10.3372384 39.2091657,10.5792015 C39.2019419,10.5808298 39.2019419,10.5808298 39.1947175,10.582458 C38.1213273,10.8243601 37.447273,11.8906152 37.6891752,12.9640053 C37.9310773,14.0373955 38.9973324,14.7114498 40.0707225,14.4695476 L40.0707225,14.4695476 Z M47.4276119,12.0082845 C47.4329844,12.0067855 47.4329844,12.0067855 47.4383562,12.0052864 C48.2362896,11.7825897 48.7026111,10.9552056 48.4799145,10.1572722 C48.2572179,9.35933881 47.4298337,8.89301728 46.6319003,9.11571391 C46.6266645,9.1171751 46.6266645,9.1171751 46.6214279,9.11863616 C45.8234736,9.34125773 45.3570742,10.168598 45.5796958,10.9665523 C45.8023173,11.7645067 46.6296576,12.230906 47.4276119,12.0082845 L47.4276119,12.0082845 Z M54.8999721,9.57562965 C54.9052414,9.57358217 54.9052414,9.57358217 54.9105092,9.57153441 C55.6826371,9.27135123 56.0652239,8.40207131 55.7650408,7.62994336 C55.4648576,6.85781542 54.5955777,6.4752286 53.8234497,6.77541179 C53.8184808,6.77734338 53.8184808,6.77734338 53.8135101,6.77927482 C53.0413181,7.07929302 52.6585455,7.94849117 52.9585637,8.72068323 C53.2585819,9.4928753 54.12778,9.87564785 54.8999721,9.57562965 L54.8999721,9.57562965 Z" id="Shape"></path> + <path d="M1.45327704,26.6978168 C1.54647464,26.6867112 1.63432439,26.660866 1.7147722,26.6228911 C2.03520341,26.4716332 2.23820252,26.1279362 2.1939679,25.7567213 C2.13862768,25.2923089 1.71728488,24.9606903 1.2528724,25.0160305 C1.24980572,25.0163959 1.24980572,25.0163959 1.24674021,25.0167611 C0.782326598,25.0720918 0.450699376,25.4934278 0.506030077,25.9578415 C0.561360778,26.4222551 0.982696791,26.7538823 1.4471104,26.6985516 C1.45019313,26.6981843 1.45019313,26.6981843 1.45327704,26.6978168 Z M9.34858965,26.1274206 C9.35409106,26.1266859 9.35409106,26.1266859 9.35959333,26.1259509 C10.1807278,26.0162709 10.7574756,25.2616967 10.6477955,24.4405622 C10.5381155,23.6194278 9.78354128,23.04268 8.96240683,23.15236 C8.95693539,23.1530908 8.95693539,23.1530908 8.95146479,23.1538215 C8.13032806,23.2634845 7.55356464,24.0180467 7.66322765,24.8391835 C7.77289065,25.6603202 8.52745292,26.2370836 9.34858965,26.1274206 L9.34858965,26.1274206 Z M17.0066591,25.0462427 C17.0121188,25.0454283 17.0121188,25.0454283 17.0175789,25.0446136 C17.8369369,24.9223683 18.4020584,24.1590479 18.279813,23.3396899 C18.1575677,22.5203319 17.3942473,21.9552104 16.5748893,22.0774558 C16.5694641,22.0782652 16.5694641,22.0782652 16.5640392,22.0790744 C15.7446783,22.2013005 15.1795389,22.9646076 15.301765,23.7839685 C15.4239911,24.6033294 16.1872982,25.1684688 17.0066591,25.0462427 L17.0066591,25.0462427 Z M24.8225082,24.4201748 C24.8297684,24.4189565 24.8297684,24.4189565 24.8370287,24.4177379 C25.922252,24.2355961 26.6543441,23.2081937 26.4722023,22.1229704 C26.2900605,21.0377471 25.2626581,20.3056549 24.1774348,20.4877968 C24.1702314,20.4890057 24.1702314,20.4890057 24.1630282,20.4902145 C23.0777996,20.6723249 22.3456777,21.6997061 22.5277881,22.7849347 C22.7098984,23.8701632 23.7372796,24.6022852 24.8225082,24.4201748 L24.8225082,24.4201748 Z M32.4725265,23.0495399 C33.5532108,22.8422598 34.2612448,21.7981584 34.0539647,20.7174741 C33.8466846,19.6367898 32.8025831,18.9287558 31.7218988,19.1360359 C31.714724,19.137412 31.714724,19.137412 31.7075489,19.138788 C30.6268567,19.3460265 29.9187826,20.3901008 30.1260211,21.470793 C30.3332597,22.5514853 31.3773339,23.2595594 32.4580262,23.0523208 C32.4652764,23.0509305 32.4652764,23.0509305 32.4725265,23.0495399 Z M39.8723866,20.8947688 C39.877909,20.8935242 39.877909,20.8935242 39.883431,20.8922795 C40.6915794,20.7101047 41.1990314,19.9072892 41.0168566,19.0991407 C40.8346818,18.2909923 40.0318663,17.7835404 39.2237179,17.9657152 C39.218279,17.9669412 39.218279,17.9669412 39.2128397,17.968167 C38.404681,18.1502959 37.8971834,18.9530826 38.0793123,19.7612414 C38.2614411,20.5694001 39.0642279,21.0768976 39.8723866,20.8947688 L39.8723866,20.8947688 Z M47.4276119,19.0082845 C47.4329844,19.0067855 47.4329844,19.0067855 47.4383562,19.0052864 C48.2362896,18.7825897 48.7026111,17.9552056 48.4799145,17.1572722 C48.2572179,16.3593388 47.4298337,15.8930173 46.6319003,16.1157139 C46.6266645,16.1171751 46.6266645,16.1171751 46.6214279,16.1186362 C45.8234736,16.3412577 45.3570742,17.168598 45.5796958,17.9665523 C45.8023173,18.7645067 46.6296576,19.230906 47.4276119,19.0082845 L47.4276119,19.0082845 Z M54.4138099,15.7087505 C54.4167849,15.7075945 54.4167849,15.7075945 54.419759,15.7064383 C54.8556933,15.5369585 55.0716973,15.0461733 54.9022174,14.610239 C54.7327376,14.1743047 54.2419524,13.9583007 53.8060181,14.1277806 C53.8032127,14.1288712 53.8032127,14.1288712 53.8004064,14.1299616 C53.3644359,14.2993484 53.148327,14.7900873 53.3177137,15.2260578 C53.4871005,15.6620283 53.9778394,15.8781372 54.4138099,15.7087505 L54.4138099,15.7087505 Z" id="Shape"></path> + </g> + </g> + </g> +</svg> diff --git a/scala3doc/dotty-docs/docs/images/smarter.jpg b/scala3doc/dotty-docs/docs/images/smarter.jpg new file mode 100644 index 000000000000..5e5a0cf5a1b8 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/smarter.jpg differ diff --git a/scala3doc/dotty-docs/docs/images/worksheets/config-autorun.png b/scala3doc/dotty-docs/docs/images/worksheets/config-autorun.png new file mode 100644 index 000000000000..510bb3f0f86b Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/worksheets/config-autorun.png differ diff --git a/scala3doc/dotty-docs/docs/images/worksheets/worksheet-demo.gif b/scala3doc/dotty-docs/docs/images/worksheets/worksheet-demo.gif new file mode 100644 index 000000000000..a03d22f7f93b Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/worksheets/worksheet-demo.gif differ diff --git a/scala3doc/dotty-docs/docs/images/worksheets/worksheet-help.png b/scala3doc/dotty-docs/docs/images/worksheets/worksheet-help.png new file mode 100644 index 000000000000..1aee216e7c11 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/worksheets/worksheet-help.png differ diff --git a/scala3doc/dotty-docs/docs/images/worksheets/worksheet-run.png b/scala3doc/dotty-docs/docs/images/worksheets/worksheet-run.png new file mode 100644 index 000000000000..d1aa99bab2a8 Binary files /dev/null and b/scala3doc/dotty-docs/docs/images/worksheets/worksheet-run.png differ diff --git a/scala3doc/dotty-docs/docs/index.html b/scala3doc/dotty-docs/docs/index.html new file mode 100644 index 000000000000..f31017f8e264 --- /dev/null +++ b/scala3doc/dotty-docs/docs/index.html @@ -0,0 +1,206 @@ +--- +title: Dotty +layout: main +hasFrame: false +extraCSS: + - css/frontpage.css +--- + +<section class="page bg-red bg-dark"> + <header> + <nav class="navbar navbar-expand-md navbar-dark"> + <button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarContent"> + <i class="fas fa-bars"></i> + </button> + <div class="collapse navbar-collapse" id="navbarContent"> + <ul class="navbar-nav ml-auto mr-0"> + <li class="nav-item"> + <a class="nav-link" href="#getting-started"> + Try it now + </a> + </li> + <li class="nav-item"> + <a class="nav-link" href="{{ site.baseurl }}/blog/index.html"> + Blog + </a> + </li> + <li class="nav-item"> + <a class="nav-link" href="docs/index.html"> + Docs + </a> + </li> + <li class="nav-item"> + <a class="nav-link" href="https://github.com/lampepfl/dotty"> + <i class="fab fa-github"></i> + </a> + </li> + </ul> + </div> + </nav> + </header> + <div class="container"> + <div class="scala-logo-container"> + <img src="images/dotty-logo-white.svg" alt="logo" /> + <div class="subtitle text-center"> + <h1 id="dotty" class="above-byline">Dotty</h1> + <p>A next-generation compiler for Scala</p> + <p>(scroll down for more info)</p> + </div> + </div> + </div> +</section> + + +<section class="page bg-blue bg-dark"> + <div class="container"> + + <h1 id="getting-started">Try Dotty</h1> + <p>If you are a Mac user, you can install Dotty with <a href="https://brew.sh/">brew</a>:</p> + <pre><code>brew install lampepfl/brew/dotty</code></pre> + + <p>If you are a Linux or Windows user, download the <a href="https://github.com/lampepfl/dotty/releases">latest release</a>. Optionally add path of the folder <code>bin/</code> to the system environment variable <code>PATH</code>. </p> + + <p>Now you can compile Scala source code:</p> + <pre><code>dotc hello.scala</code></pre> + + <p>To start the REPL, run: <code>dotr</code>.</p> + + <p>Or, you can try Dotty in your browser with <a href="https://scastie.scala-lang.org/?target=dotty">Scastie</a>.</p> + + <h1 id="getting-started-with-a-project">Create a Dotty Project</h1> + <p>The fastest way to create a new project in Dotty is using <a href="http://www.scala-sbt.org/">sbt (1.1.4+)</a>.</p> + + <p>Create a Dotty project:</p> + <pre><code>sbt new <a href="https://github.com/lampepfl/dotty.g8">lampepfl/dotty.g8</a></code></pre> + + <p>Or a Dotty project that cross compiles with Scala 2:</p> + <pre><code>sbt new <a href="https://github.com/lampepfl/dotty-cross.g8">lampepfl/dotty-cross.g8</a></code></pre> + + <p>For documentation see the <a href="https://github.com/lampepfl/dotty-example-project">Dotty Example Project</a>.</p> + </div> +</section> + +<section class="page bg-teal bg-dark"> + <div class="container"> + <h1 id="so-features">So, features?</h1> + <div class="centered-table"> + <table> + <colgroup> + <col width="82%" /> + <col width="17%" /> + </colgroup> + <tbody> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/new-types/intersection-types.html">Intersection Types</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/new-types/union-types.html">Union Types</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/new-types/type-lambdas.html">Type lambdas</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/contextual/implicit-function-types.html">Context query</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/other-new-features/trait-parameters.html">Trait parameters</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/contextual/delegates.html">Implied Instances</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/contextual/given-clauses.html">Inferable parameters</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/contextual/extension-methods.html">Extension Methods</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/other-new-features/opaques.html">Opaque Type Aliases</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/dropped-features/package-objects.html">Toplevel definitions</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/other-new-features/export.html">Export clauses</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/changed-features/vararg-patterns.html">Vararg patterns</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/other-new-features/creator-applications.html">Creator applications</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://github.com/scala/scala.github.com/pull/491">@static methods and fields</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/#getting-started">SBT incremental build</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/changed-features/pattern-matching.html">Option-less pattern matching</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/contextual/multiversal-equality.html">Multiversal equality</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://dotty.epfl.ch/docs/reference/metaprogramming/erased-terms.html">Erased Terms</a></td> + <td>Implemented</td> + </tr> + <tr> + <td><a href="https://github.com/dotty-linker/dotty">Auto-Specialization</a></td> + <td>In progress</td> + </tr> + <tr> + <td><a href="https://github.com/lampepfl/dotty/pull/1840">Whole program optimizer</a></td> + <td>In progress</td> + </tr> + <tr> + <td><a href="https://github.com/lampepfl/dotty/pull/2199">HList & HMaps/Record types</a></td> + <td>In progress</td> + </tr> + <tr> + <td></td> + <td></td> + </tr> + <tr> + <td>Effects</td> + <td>Considered</td> + </tr> + <tr> + <td>…and many more, check the <a href="https://dotty.epfl.ch/docs/reference/overview.html">overview page</a> for a comprehensive list</td> + <td></td> + </tr> + </tbody> + </table> + </div> + <h1 id="talks-on-dotty">Talks on Dotty?</h1> + <ul> + <li><a href="https://www.youtube.com/watch?v=GHzWqJKFCk4">Scala's Road Ahead</a> by Martin Odersky (<a href="http://www.slideshare.net/Odersky/scala-days-nyc-2016">slides</a>)</li> + <li><a href="https://www.youtube.com/watch?v=WxyyJyB_Ssc">Compilers are Databases</a> by Martin Odersky (<a href="http://www.slideshare.net/Odersky/compilers-are-databases">slides</a>)</li> + <li><a href="https://www.youtube.com/watch?v=aftdOFuVU1o">Exploring the future of Scala</a> by Dmitry Petrashko (<a href="https://d-d.me/scalaworld2015/#/">slides</a>)</li> + <li><a href="https://dotty.epfl.ch/docs/resources/talks.html">Deep Dive with Dotty</a></li> + </ul> + <h1 id="i-have-more-questions">I have more questions!</h1> + <div class="text-center"> + <p>That’s great! We have more details on the <a href="{{ site.baseurl }}/docs">docs</a> and please join our <a href="https://gitter.im/lampepfl/dotty">Gitter channel</a>!</p> + </div> + <br/> + </div> +</section> + diff --git a/scala3doc/dotty-docs/docs/js/api-search.js b/scala3doc/dotty-docs/docs/js/api-search.js new file mode 100644 index 000000000000..4950d2067ffe --- /dev/null +++ b/scala3doc/dotty-docs/docs/js/api-search.js @@ -0,0 +1,93 @@ +/** This Webworker performs search on the API structure + * + * It can be used as follows: + * + * ```javascript + * var apiSearch = new Worker("<path to this file>"); + * apiSearch.postMessage({ + * "type": "setup", + * "search": "<search term>", + * "docs": <docs API> + * }); + * ``` + * + * It posts a few different messages to its parent: + * + * ```json + * { + * "type": "entityResult", + * "package": <parent package>, + * "entity": <entity> + * } + * + * { + * "type": "memberResult", + * "package": <parent package>, + * "parent": <parent entity>, + * "member": <entity> + * } + * ``` + */ +onmessage = function(e) { + var docs = e.data.docs; + var searchTerm = e.data.search; + + var regexForTerm = function(query) { + var escaped = query.replace(/([\.\*\+\?\|\(\)\[\]\\])/g, '\\$1'); + if (query.toLowerCase() != query) { + // Regexp that matches CamelCase subbits: "BiSe" is + // "[a-z]*Bi[a-z]*Se" and matches "BitSet", "ABitSet", ... + return new RegExp(escaped.replace(/([A-Z])/g,"[a-z]*$1")); + } + else { // if query is all lower case make a normal case insensitive search + return new RegExp(escaped, "i"); + } + }; + + var searchRegex = regexForTerm(searchTerm); + + var filterPackages = function(entity) { + switch(entity.kind) { + case "val": + case "def": + case "type": + case "package": + return false; + default: + return true; + } + }; + + // look at this higher order function, such syntax: + var messageParentIfMatches = function(parent) { + return function(entity) { + var fullName = entity.path.join('.'); + + if (searchRegex.test(fullName)) { + postMessage({ + "type": "entityResult", + "package": parent, + "entity": entity + }); + } + + var searchChild = function(member) { + if (searchRegex.test(member.name)) { + postMessage({ + "type": "memberResult", + "package": parent, + "parent": entity, + "member": member, + }); + } + }; + entity.members.forEach(searchChild); + }; + }; + + docs.forEach(function(pack) { + pack.members + .filter(filterPackages) + .forEach(messageParentIfMatches(pack)); + }); +} diff --git a/scala3doc/dotty-docs/docs/js/bootstrap.min.js b/scala3doc/dotty-docs/docs/js/bootstrap.min.js new file mode 100644 index 000000000000..c4c0d1f95cd3 --- /dev/null +++ b/scala3doc/dotty-docs/docs/js/bootstrap.min.js @@ -0,0 +1,7 @@ +/*! + * Bootstrap v4.3.1 (https://getbootstrap.com/) + * Copyright 2011-2019 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors) + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + */ +!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("jquery"),require("popper.js")):"function"==typeof define&&define.amd?define(["exports","jquery","popper.js"],e):e((t=t||self).bootstrap={},t.jQuery,t.Popper)}(this,function(t,g,u){"use strict";function i(t,e){for(var n=0;n<e.length;n++){var i=e[n];i.enumerable=i.enumerable||!1,i.configurable=!0,"value"in i&&(i.writable=!0),Object.defineProperty(t,i.key,i)}}function s(t,e,n){return e&&i(t.prototype,e),n&&i(t,n),t}function l(o){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{},e=Object.keys(r);"function"==typeof Object.getOwnPropertySymbols&&(e=e.concat(Object.getOwnPropertySymbols(r).filter(function(t){return Object.getOwnPropertyDescriptor(r,t).enumerable}))),e.forEach(function(t){var e,n,i;e=o,i=r[n=t],n in e?Object.defineProperty(e,n,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[n]=i})}return o}g=g&&g.hasOwnProperty("default")?g.default:g,u=u&&u.hasOwnProperty("default")?u.default:u;var e="transitionend";function n(t){var e=this,n=!1;return g(this).one(_.TRANSITION_END,function(){n=!0}),setTimeout(function(){n||_.triggerTransitionEnd(e)},t),this}var _={TRANSITION_END:"bsTransitionEnd",getUID:function(t){for(;t+=~~(1e6*Math.random()),document.getElementById(t););return t},getSelectorFromElement:function(t){var e=t.getAttribute("data-target");if(!e||"#"===e){var n=t.getAttribute("href");e=n&&"#"!==n?n.trim():""}try{return document.querySelector(e)?e:null}catch(t){return null}},getTransitionDurationFromElement:function(t){if(!t)return 0;var e=g(t).css("transition-duration"),n=g(t).css("transition-delay"),i=parseFloat(e),o=parseFloat(n);return i||o?(e=e.split(",")[0],n=n.split(",")[0],1e3*(parseFloat(e)+parseFloat(n))):0},reflow:function(t){return t.offsetHeight},triggerTransitionEnd:function(t){g(t).trigger(e)},supportsTransitionEnd:function(){return Boolean(e)},isElement:function(t){return(t[0]||t).nodeType},typeCheckConfig:function(t,e,n){for(var i in n)if(Object.prototype.hasOwnProperty.call(n,i)){var o=n[i],r=e[i],s=r&&_.isElement(r)?"element":(a=r,{}.toString.call(a).match(/\s([a-z]+)/i)[1].toLowerCase());if(!new RegExp(o).test(s))throw new Error(t.toUpperCase()+': Option "'+i+'" provided type "'+s+'" but expected type "'+o+'".')}var a},findShadowRoot:function(t){if(!document.documentElement.attachShadow)return null;if("function"!=typeof t.getRootNode)return t instanceof ShadowRoot?t:t.parentNode?_.findShadowRoot(t.parentNode):null;var e=t.getRootNode();return e instanceof ShadowRoot?e:null}};g.fn.emulateTransitionEnd=n,g.event.special[_.TRANSITION_END]={bindType:e,delegateType:e,handle:function(t){if(g(t.target).is(this))return t.handleObj.handler.apply(this,arguments)}};var o="alert",r="bs.alert",a="."+r,c=g.fn[o],h={CLOSE:"close"+a,CLOSED:"closed"+a,CLICK_DATA_API:"click"+a+".data-api"},f="alert",d="fade",m="show",p=function(){function i(t){this._element=t}var t=i.prototype;return t.close=function(t){var e=this._element;t&&(e=this._getRootElement(t)),this._triggerCloseEvent(e).isDefaultPrevented()||this._removeElement(e)},t.dispose=function(){g.removeData(this._element,r),this._element=null},t._getRootElement=function(t){var e=_.getSelectorFromElement(t),n=!1;return e&&(n=document.querySelector(e)),n||(n=g(t).closest("."+f)[0]),n},t._triggerCloseEvent=function(t){var e=g.Event(h.CLOSE);return g(t).trigger(e),e},t._removeElement=function(e){var n=this;if(g(e).removeClass(m),g(e).hasClass(d)){var t=_.getTransitionDurationFromElement(e);g(e).one(_.TRANSITION_END,function(t){return n._destroyElement(e,t)}).emulateTransitionEnd(t)}else this._destroyElement(e)},t._destroyElement=function(t){g(t).detach().trigger(h.CLOSED).remove()},i._jQueryInterface=function(n){return this.each(function(){var t=g(this),e=t.data(r);e||(e=new i(this),t.data(r,e)),"close"===n&&e[n](this)})},i._handleDismiss=function(e){return function(t){t&&t.preventDefault(),e.close(this)}},s(i,null,[{key:"VERSION",get:function(){return"4.3.1"}}]),i}();g(document).on(h.CLICK_DATA_API,'[data-dismiss="alert"]',p._handleDismiss(new p)),g.fn[o]=p._jQueryInterface,g.fn[o].Constructor=p,g.fn[o].noConflict=function(){return g.fn[o]=c,p._jQueryInterface};var v="button",y="bs.button",E="."+y,C=".data-api",T=g.fn[v],S="active",b="btn",I="focus",D='[data-toggle^="button"]',w='[data-toggle="buttons"]',A='input:not([type="hidden"])',N=".active",O=".btn",k={CLICK_DATA_API:"click"+E+C,FOCUS_BLUR_DATA_API:"focus"+E+C+" blur"+E+C},P=function(){function n(t){this._element=t}var t=n.prototype;return t.toggle=function(){var t=!0,e=!0,n=g(this._element).closest(w)[0];if(n){var i=this._element.querySelector(A);if(i){if("radio"===i.type)if(i.checked&&this._element.classList.contains(S))t=!1;else{var o=n.querySelector(N);o&&g(o).removeClass(S)}if(t){if(i.hasAttribute("disabled")||n.hasAttribute("disabled")||i.classList.contains("disabled")||n.classList.contains("disabled"))return;i.checked=!this._element.classList.contains(S),g(i).trigger("change")}i.focus(),e=!1}}e&&this._element.setAttribute("aria-pressed",!this._element.classList.contains(S)),t&&g(this._element).toggleClass(S)},t.dispose=function(){g.removeData(this._element,y),this._element=null},n._jQueryInterface=function(e){return this.each(function(){var t=g(this).data(y);t||(t=new n(this),g(this).data(y,t)),"toggle"===e&&t[e]()})},s(n,null,[{key:"VERSION",get:function(){return"4.3.1"}}]),n}();g(document).on(k.CLICK_DATA_API,D,function(t){t.preventDefault();var e=t.target;g(e).hasClass(b)||(e=g(e).closest(O)),P._jQueryInterface.call(g(e),"toggle")}).on(k.FOCUS_BLUR_DATA_API,D,function(t){var e=g(t.target).closest(O)[0];g(e).toggleClass(I,/^focus(in)?$/.test(t.type))}),g.fn[v]=P._jQueryInterface,g.fn[v].Constructor=P,g.fn[v].noConflict=function(){return g.fn[v]=T,P._jQueryInterface};var L="carousel",j="bs.carousel",H="."+j,R=".data-api",x=g.fn[L],F={interval:5e3,keyboard:!0,slide:!1,pause:"hover",wrap:!0,touch:!0},U={interval:"(number|boolean)",keyboard:"boolean",slide:"(boolean|string)",pause:"(string|boolean)",wrap:"boolean",touch:"boolean"},W="next",q="prev",M="left",K="right",Q={SLIDE:"slide"+H,SLID:"slid"+H,KEYDOWN:"keydown"+H,MOUSEENTER:"mouseenter"+H,MOUSELEAVE:"mouseleave"+H,TOUCHSTART:"touchstart"+H,TOUCHMOVE:"touchmove"+H,TOUCHEND:"touchend"+H,POINTERDOWN:"pointerdown"+H,POINTERUP:"pointerup"+H,DRAG_START:"dragstart"+H,LOAD_DATA_API:"load"+H+R,CLICK_DATA_API:"click"+H+R},B="carousel",V="active",Y="slide",z="carousel-item-right",X="carousel-item-left",$="carousel-item-next",G="carousel-item-prev",J="pointer-event",Z=".active",tt=".active.carousel-item",et=".carousel-item",nt=".carousel-item img",it=".carousel-item-next, .carousel-item-prev",ot=".carousel-indicators",rt="[data-slide], [data-slide-to]",st='[data-ride="carousel"]',at={TOUCH:"touch",PEN:"pen"},lt=function(){function r(t,e){this._items=null,this._interval=null,this._activeElement=null,this._isPaused=!1,this._isSliding=!1,this.touchTimeout=null,this.touchStartX=0,this.touchDeltaX=0,this._config=this._getConfig(e),this._element=t,this._indicatorsElement=this._element.querySelector(ot),this._touchSupported="ontouchstart"in document.documentElement||0<navigator.maxTouchPoints,this._pointerEvent=Boolean(window.PointerEvent||window.MSPointerEvent),this._addEventListeners()}var t=r.prototype;return t.next=function(){this._isSliding||this._slide(W)},t.nextWhenVisible=function(){!document.hidden&&g(this._element).is(":visible")&&"hidden"!==g(this._element).css("visibility")&&this.next()},t.prev=function(){this._isSliding||this._slide(q)},t.pause=function(t){t||(this._isPaused=!0),this._element.querySelector(it)&&(_.triggerTransitionEnd(this._element),this.cycle(!0)),clearInterval(this._interval),this._interval=null},t.cycle=function(t){t||(this._isPaused=!1),this._interval&&(clearInterval(this._interval),this._interval=null),this._config.interval&&!this._isPaused&&(this._interval=setInterval((document.visibilityState?this.nextWhenVisible:this.next).bind(this),this._config.interval))},t.to=function(t){var e=this;this._activeElement=this._element.querySelector(tt);var n=this._getItemIndex(this._activeElement);if(!(t>this._items.length-1||t<0))if(this._isSliding)g(this._element).one(Q.SLID,function(){return e.to(t)});else{if(n===t)return this.pause(),void this.cycle();var i=n<t?W:q;this._slide(i,this._items[t])}},t.dispose=function(){g(this._element).off(H),g.removeData(this._element,j),this._items=null,this._config=null,this._element=null,this._interval=null,this._isPaused=null,this._isSliding=null,this._activeElement=null,this._indicatorsElement=null},t._getConfig=function(t){return t=l({},F,t),_.typeCheckConfig(L,t,U),t},t._handleSwipe=function(){var t=Math.abs(this.touchDeltaX);if(!(t<=40)){var e=t/this.touchDeltaX;0<e&&this.prev(),e<0&&this.next()}},t._addEventListeners=function(){var e=this;this._config.keyboard&&g(this._element).on(Q.KEYDOWN,function(t){return e._keydown(t)}),"hover"===this._config.pause&&g(this._element).on(Q.MOUSEENTER,function(t){return e.pause(t)}).on(Q.MOUSELEAVE,function(t){return e.cycle(t)}),this._config.touch&&this._addTouchEventListeners()},t._addTouchEventListeners=function(){var n=this;if(this._touchSupported){var e=function(t){n._pointerEvent&&at[t.originalEvent.pointerType.toUpperCase()]?n.touchStartX=t.originalEvent.clientX:n._pointerEvent||(n.touchStartX=t.originalEvent.touches[0].clientX)},i=function(t){n._pointerEvent&&at[t.originalEvent.pointerType.toUpperCase()]&&(n.touchDeltaX=t.originalEvent.clientX-n.touchStartX),n._handleSwipe(),"hover"===n._config.pause&&(n.pause(),n.touchTimeout&&clearTimeout(n.touchTimeout),n.touchTimeout=setTimeout(function(t){return n.cycle(t)},500+n._config.interval))};g(this._element.querySelectorAll(nt)).on(Q.DRAG_START,function(t){return t.preventDefault()}),this._pointerEvent?(g(this._element).on(Q.POINTERDOWN,function(t){return e(t)}),g(this._element).on(Q.POINTERUP,function(t){return i(t)}),this._element.classList.add(J)):(g(this._element).on(Q.TOUCHSTART,function(t){return e(t)}),g(this._element).on(Q.TOUCHMOVE,function(t){var e;(e=t).originalEvent.touches&&1<e.originalEvent.touches.length?n.touchDeltaX=0:n.touchDeltaX=e.originalEvent.touches[0].clientX-n.touchStartX}),g(this._element).on(Q.TOUCHEND,function(t){return i(t)}))}},t._keydown=function(t){if(!/input|textarea/i.test(t.target.tagName))switch(t.which){case 37:t.preventDefault(),this.prev();break;case 39:t.preventDefault(),this.next()}},t._getItemIndex=function(t){return this._items=t&&t.parentNode?[].slice.call(t.parentNode.querySelectorAll(et)):[],this._items.indexOf(t)},t._getItemByDirection=function(t,e){var n=t===W,i=t===q,o=this._getItemIndex(e),r=this._items.length-1;if((i&&0===o||n&&o===r)&&!this._config.wrap)return e;var s=(o+(t===q?-1:1))%this._items.length;return-1===s?this._items[this._items.length-1]:this._items[s]},t._triggerSlideEvent=function(t,e){var n=this._getItemIndex(t),i=this._getItemIndex(this._element.querySelector(tt)),o=g.Event(Q.SLIDE,{relatedTarget:t,direction:e,from:i,to:n});return g(this._element).trigger(o),o},t._setActiveIndicatorElement=function(t){if(this._indicatorsElement){var e=[].slice.call(this._indicatorsElement.querySelectorAll(Z));g(e).removeClass(V);var n=this._indicatorsElement.children[this._getItemIndex(t)];n&&g(n).addClass(V)}},t._slide=function(t,e){var n,i,o,r=this,s=this._element.querySelector(tt),a=this._getItemIndex(s),l=e||s&&this._getItemByDirection(t,s),c=this._getItemIndex(l),h=Boolean(this._interval);if(o=t===W?(n=X,i=$,M):(n=z,i=G,K),l&&g(l).hasClass(V))this._isSliding=!1;else if(!this._triggerSlideEvent(l,o).isDefaultPrevented()&&s&&l){this._isSliding=!0,h&&this.pause(),this._setActiveIndicatorElement(l);var u=g.Event(Q.SLID,{relatedTarget:l,direction:o,from:a,to:c});if(g(this._element).hasClass(Y)){g(l).addClass(i),_.reflow(l),g(s).addClass(n),g(l).addClass(n);var f=parseInt(l.getAttribute("data-interval"),10);this._config.interval=f?(this._config.defaultInterval=this._config.defaultInterval||this._config.interval,f):this._config.defaultInterval||this._config.interval;var d=_.getTransitionDurationFromElement(s);g(s).one(_.TRANSITION_END,function(){g(l).removeClass(n+" "+i).addClass(V),g(s).removeClass(V+" "+i+" "+n),r._isSliding=!1,setTimeout(function(){return g(r._element).trigger(u)},0)}).emulateTransitionEnd(d)}else g(s).removeClass(V),g(l).addClass(V),this._isSliding=!1,g(this._element).trigger(u);h&&this.cycle()}},r._jQueryInterface=function(i){return this.each(function(){var t=g(this).data(j),e=l({},F,g(this).data());"object"==typeof i&&(e=l({},e,i));var n="string"==typeof i?i:e.slide;if(t||(t=new r(this,e),g(this).data(j,t)),"number"==typeof i)t.to(i);else if("string"==typeof n){if("undefined"==typeof t[n])throw new TypeError('No method named "'+n+'"');t[n]()}else e.interval&&e.ride&&(t.pause(),t.cycle())})},r._dataApiClickHandler=function(t){var e=_.getSelectorFromElement(this);if(e){var n=g(e)[0];if(n&&g(n).hasClass(B)){var i=l({},g(n).data(),g(this).data()),o=this.getAttribute("data-slide-to");o&&(i.interval=!1),r._jQueryInterface.call(g(n),i),o&&g(n).data(j).to(o),t.preventDefault()}}},s(r,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return F}}]),r}();g(document).on(Q.CLICK_DATA_API,rt,lt._dataApiClickHandler),g(window).on(Q.LOAD_DATA_API,function(){for(var t=[].slice.call(document.querySelectorAll(st)),e=0,n=t.length;e<n;e++){var i=g(t[e]);lt._jQueryInterface.call(i,i.data())}}),g.fn[L]=lt._jQueryInterface,g.fn[L].Constructor=lt,g.fn[L].noConflict=function(){return g.fn[L]=x,lt._jQueryInterface};var ct="collapse",ht="bs.collapse",ut="."+ht,ft=g.fn[ct],dt={toggle:!0,parent:""},gt={toggle:"boolean",parent:"(string|element)"},_t={SHOW:"show"+ut,SHOWN:"shown"+ut,HIDE:"hide"+ut,HIDDEN:"hidden"+ut,CLICK_DATA_API:"click"+ut+".data-api"},mt="show",pt="collapse",vt="collapsing",yt="collapsed",Et="width",Ct="height",Tt=".show, .collapsing",St='[data-toggle="collapse"]',bt=function(){function a(e,t){this._isTransitioning=!1,this._element=e,this._config=this._getConfig(t),this._triggerArray=[].slice.call(document.querySelectorAll('[data-toggle="collapse"][href="#'+e.id+'"],[data-toggle="collapse"][data-target="#'+e.id+'"]'));for(var n=[].slice.call(document.querySelectorAll(St)),i=0,o=n.length;i<o;i++){var r=n[i],s=_.getSelectorFromElement(r),a=[].slice.call(document.querySelectorAll(s)).filter(function(t){return t===e});null!==s&&0<a.length&&(this._selector=s,this._triggerArray.push(r))}this._parent=this._config.parent?this._getParent():null,this._config.parent||this._addAriaAndCollapsedClass(this._element,this._triggerArray),this._config.toggle&&this.toggle()}var t=a.prototype;return t.toggle=function(){g(this._element).hasClass(mt)?this.hide():this.show()},t.show=function(){var t,e,n=this;if(!this._isTransitioning&&!g(this._element).hasClass(mt)&&(this._parent&&0===(t=[].slice.call(this._parent.querySelectorAll(Tt)).filter(function(t){return"string"==typeof n._config.parent?t.getAttribute("data-parent")===n._config.parent:t.classList.contains(pt)})).length&&(t=null),!(t&&(e=g(t).not(this._selector).data(ht))&&e._isTransitioning))){var i=g.Event(_t.SHOW);if(g(this._element).trigger(i),!i.isDefaultPrevented()){t&&(a._jQueryInterface.call(g(t).not(this._selector),"hide"),e||g(t).data(ht,null));var o=this._getDimension();g(this._element).removeClass(pt).addClass(vt),this._element.style[o]=0,this._triggerArray.length&&g(this._triggerArray).removeClass(yt).attr("aria-expanded",!0),this.setTransitioning(!0);var r="scroll"+(o[0].toUpperCase()+o.slice(1)),s=_.getTransitionDurationFromElement(this._element);g(this._element).one(_.TRANSITION_END,function(){g(n._element).removeClass(vt).addClass(pt).addClass(mt),n._element.style[o]="",n.setTransitioning(!1),g(n._element).trigger(_t.SHOWN)}).emulateTransitionEnd(s),this._element.style[o]=this._element[r]+"px"}}},t.hide=function(){var t=this;if(!this._isTransitioning&&g(this._element).hasClass(mt)){var e=g.Event(_t.HIDE);if(g(this._element).trigger(e),!e.isDefaultPrevented()){var n=this._getDimension();this._element.style[n]=this._element.getBoundingClientRect()[n]+"px",_.reflow(this._element),g(this._element).addClass(vt).removeClass(pt).removeClass(mt);var i=this._triggerArray.length;if(0<i)for(var o=0;o<i;o++){var r=this._triggerArray[o],s=_.getSelectorFromElement(r);if(null!==s)g([].slice.call(document.querySelectorAll(s))).hasClass(mt)||g(r).addClass(yt).attr("aria-expanded",!1)}this.setTransitioning(!0);this._element.style[n]="";var a=_.getTransitionDurationFromElement(this._element);g(this._element).one(_.TRANSITION_END,function(){t.setTransitioning(!1),g(t._element).removeClass(vt).addClass(pt).trigger(_t.HIDDEN)}).emulateTransitionEnd(a)}}},t.setTransitioning=function(t){this._isTransitioning=t},t.dispose=function(){g.removeData(this._element,ht),this._config=null,this._parent=null,this._element=null,this._triggerArray=null,this._isTransitioning=null},t._getConfig=function(t){return(t=l({},dt,t)).toggle=Boolean(t.toggle),_.typeCheckConfig(ct,t,gt),t},t._getDimension=function(){return g(this._element).hasClass(Et)?Et:Ct},t._getParent=function(){var t,n=this;_.isElement(this._config.parent)?(t=this._config.parent,"undefined"!=typeof this._config.parent.jquery&&(t=this._config.parent[0])):t=document.querySelector(this._config.parent);var e='[data-toggle="collapse"][data-parent="'+this._config.parent+'"]',i=[].slice.call(t.querySelectorAll(e));return g(i).each(function(t,e){n._addAriaAndCollapsedClass(a._getTargetFromElement(e),[e])}),t},t._addAriaAndCollapsedClass=function(t,e){var n=g(t).hasClass(mt);e.length&&g(e).toggleClass(yt,!n).attr("aria-expanded",n)},a._getTargetFromElement=function(t){var e=_.getSelectorFromElement(t);return e?document.querySelector(e):null},a._jQueryInterface=function(i){return this.each(function(){var t=g(this),e=t.data(ht),n=l({},dt,t.data(),"object"==typeof i&&i?i:{});if(!e&&n.toggle&&/show|hide/.test(i)&&(n.toggle=!1),e||(e=new a(this,n),t.data(ht,e)),"string"==typeof i){if("undefined"==typeof e[i])throw new TypeError('No method named "'+i+'"');e[i]()}})},s(a,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return dt}}]),a}();g(document).on(_t.CLICK_DATA_API,St,function(t){"A"===t.currentTarget.tagName&&t.preventDefault();var n=g(this),e=_.getSelectorFromElement(this),i=[].slice.call(document.querySelectorAll(e));g(i).each(function(){var t=g(this),e=t.data(ht)?"toggle":n.data();bt._jQueryInterface.call(t,e)})}),g.fn[ct]=bt._jQueryInterface,g.fn[ct].Constructor=bt,g.fn[ct].noConflict=function(){return g.fn[ct]=ft,bt._jQueryInterface};var It="dropdown",Dt="bs.dropdown",wt="."+Dt,At=".data-api",Nt=g.fn[It],Ot=new RegExp("38|40|27"),kt={HIDE:"hide"+wt,HIDDEN:"hidden"+wt,SHOW:"show"+wt,SHOWN:"shown"+wt,CLICK:"click"+wt,CLICK_DATA_API:"click"+wt+At,KEYDOWN_DATA_API:"keydown"+wt+At,KEYUP_DATA_API:"keyup"+wt+At},Pt="disabled",Lt="show",jt="dropup",Ht="dropright",Rt="dropleft",xt="dropdown-menu-right",Ft="position-static",Ut='[data-toggle="dropdown"]',Wt=".dropdown form",qt=".dropdown-menu",Mt=".navbar-nav",Kt=".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",Qt="top-start",Bt="top-end",Vt="bottom-start",Yt="bottom-end",zt="right-start",Xt="left-start",$t={offset:0,flip:!0,boundary:"scrollParent",reference:"toggle",display:"dynamic"},Gt={offset:"(number|string|function)",flip:"boolean",boundary:"(string|element)",reference:"(string|element)",display:"string"},Jt=function(){function c(t,e){this._element=t,this._popper=null,this._config=this._getConfig(e),this._menu=this._getMenuElement(),this._inNavbar=this._detectNavbar(),this._addEventListeners()}var t=c.prototype;return t.toggle=function(){if(!this._element.disabled&&!g(this._element).hasClass(Pt)){var t=c._getParentFromElement(this._element),e=g(this._menu).hasClass(Lt);if(c._clearMenus(),!e){var n={relatedTarget:this._element},i=g.Event(kt.SHOW,n);if(g(t).trigger(i),!i.isDefaultPrevented()){if(!this._inNavbar){if("undefined"==typeof u)throw new TypeError("Bootstrap's dropdowns require Popper.js (https://popper.js.org/)");var o=this._element;"parent"===this._config.reference?o=t:_.isElement(this._config.reference)&&(o=this._config.reference,"undefined"!=typeof this._config.reference.jquery&&(o=this._config.reference[0])),"scrollParent"!==this._config.boundary&&g(t).addClass(Ft),this._popper=new u(o,this._menu,this._getPopperConfig())}"ontouchstart"in document.documentElement&&0===g(t).closest(Mt).length&&g(document.body).children().on("mouseover",null,g.noop),this._element.focus(),this._element.setAttribute("aria-expanded",!0),g(this._menu).toggleClass(Lt),g(t).toggleClass(Lt).trigger(g.Event(kt.SHOWN,n))}}}},t.show=function(){if(!(this._element.disabled||g(this._element).hasClass(Pt)||g(this._menu).hasClass(Lt))){var t={relatedTarget:this._element},e=g.Event(kt.SHOW,t),n=c._getParentFromElement(this._element);g(n).trigger(e),e.isDefaultPrevented()||(g(this._menu).toggleClass(Lt),g(n).toggleClass(Lt).trigger(g.Event(kt.SHOWN,t)))}},t.hide=function(){if(!this._element.disabled&&!g(this._element).hasClass(Pt)&&g(this._menu).hasClass(Lt)){var t={relatedTarget:this._element},e=g.Event(kt.HIDE,t),n=c._getParentFromElement(this._element);g(n).trigger(e),e.isDefaultPrevented()||(g(this._menu).toggleClass(Lt),g(n).toggleClass(Lt).trigger(g.Event(kt.HIDDEN,t)))}},t.dispose=function(){g.removeData(this._element,Dt),g(this._element).off(wt),this._element=null,(this._menu=null)!==this._popper&&(this._popper.destroy(),this._popper=null)},t.update=function(){this._inNavbar=this._detectNavbar(),null!==this._popper&&this._popper.scheduleUpdate()},t._addEventListeners=function(){var e=this;g(this._element).on(kt.CLICK,function(t){t.preventDefault(),t.stopPropagation(),e.toggle()})},t._getConfig=function(t){return t=l({},this.constructor.Default,g(this._element).data(),t),_.typeCheckConfig(It,t,this.constructor.DefaultType),t},t._getMenuElement=function(){if(!this._menu){var t=c._getParentFromElement(this._element);t&&(this._menu=t.querySelector(qt))}return this._menu},t._getPlacement=function(){var t=g(this._element.parentNode),e=Vt;return t.hasClass(jt)?(e=Qt,g(this._menu).hasClass(xt)&&(e=Bt)):t.hasClass(Ht)?e=zt:t.hasClass(Rt)?e=Xt:g(this._menu).hasClass(xt)&&(e=Yt),e},t._detectNavbar=function(){return 0<g(this._element).closest(".navbar").length},t._getOffset=function(){var e=this,t={};return"function"==typeof this._config.offset?t.fn=function(t){return t.offsets=l({},t.offsets,e._config.offset(t.offsets,e._element)||{}),t}:t.offset=this._config.offset,t},t._getPopperConfig=function(){var t={placement:this._getPlacement(),modifiers:{offset:this._getOffset(),flip:{enabled:this._config.flip},preventOverflow:{boundariesElement:this._config.boundary}}};return"static"===this._config.display&&(t.modifiers.applyStyle={enabled:!1}),t},c._jQueryInterface=function(e){return this.each(function(){var t=g(this).data(Dt);if(t||(t=new c(this,"object"==typeof e?e:null),g(this).data(Dt,t)),"string"==typeof e){if("undefined"==typeof t[e])throw new TypeError('No method named "'+e+'"');t[e]()}})},c._clearMenus=function(t){if(!t||3!==t.which&&("keyup"!==t.type||9===t.which))for(var e=[].slice.call(document.querySelectorAll(Ut)),n=0,i=e.length;n<i;n++){var o=c._getParentFromElement(e[n]),r=g(e[n]).data(Dt),s={relatedTarget:e[n]};if(t&&"click"===t.type&&(s.clickEvent=t),r){var a=r._menu;if(g(o).hasClass(Lt)&&!(t&&("click"===t.type&&/input|textarea/i.test(t.target.tagName)||"keyup"===t.type&&9===t.which)&&g.contains(o,t.target))){var l=g.Event(kt.HIDE,s);g(o).trigger(l),l.isDefaultPrevented()||("ontouchstart"in document.documentElement&&g(document.body).children().off("mouseover",null,g.noop),e[n].setAttribute("aria-expanded","false"),g(a).removeClass(Lt),g(o).removeClass(Lt).trigger(g.Event(kt.HIDDEN,s)))}}}},c._getParentFromElement=function(t){var e,n=_.getSelectorFromElement(t);return n&&(e=document.querySelector(n)),e||t.parentNode},c._dataApiKeydownHandler=function(t){if((/input|textarea/i.test(t.target.tagName)?!(32===t.which||27!==t.which&&(40!==t.which&&38!==t.which||g(t.target).closest(qt).length)):Ot.test(t.which))&&(t.preventDefault(),t.stopPropagation(),!this.disabled&&!g(this).hasClass(Pt))){var e=c._getParentFromElement(this),n=g(e).hasClass(Lt);if(n&&(!n||27!==t.which&&32!==t.which)){var i=[].slice.call(e.querySelectorAll(Kt));if(0!==i.length){var o=i.indexOf(t.target);38===t.which&&0<o&&o--,40===t.which&&o<i.length-1&&o++,o<0&&(o=0),i[o].focus()}}else{if(27===t.which){var r=e.querySelector(Ut);g(r).trigger("focus")}g(this).trigger("click")}}},s(c,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return $t}},{key:"DefaultType",get:function(){return Gt}}]),c}();g(document).on(kt.KEYDOWN_DATA_API,Ut,Jt._dataApiKeydownHandler).on(kt.KEYDOWN_DATA_API,qt,Jt._dataApiKeydownHandler).on(kt.CLICK_DATA_API+" "+kt.KEYUP_DATA_API,Jt._clearMenus).on(kt.CLICK_DATA_API,Ut,function(t){t.preventDefault(),t.stopPropagation(),Jt._jQueryInterface.call(g(this),"toggle")}).on(kt.CLICK_DATA_API,Wt,function(t){t.stopPropagation()}),g.fn[It]=Jt._jQueryInterface,g.fn[It].Constructor=Jt,g.fn[It].noConflict=function(){return g.fn[It]=Nt,Jt._jQueryInterface};var Zt="modal",te="bs.modal",ee="."+te,ne=g.fn[Zt],ie={backdrop:!0,keyboard:!0,focus:!0,show:!0},oe={backdrop:"(boolean|string)",keyboard:"boolean",focus:"boolean",show:"boolean"},re={HIDE:"hide"+ee,HIDDEN:"hidden"+ee,SHOW:"show"+ee,SHOWN:"shown"+ee,FOCUSIN:"focusin"+ee,RESIZE:"resize"+ee,CLICK_DISMISS:"click.dismiss"+ee,KEYDOWN_DISMISS:"keydown.dismiss"+ee,MOUSEUP_DISMISS:"mouseup.dismiss"+ee,MOUSEDOWN_DISMISS:"mousedown.dismiss"+ee,CLICK_DATA_API:"click"+ee+".data-api"},se="modal-dialog-scrollable",ae="modal-scrollbar-measure",le="modal-backdrop",ce="modal-open",he="fade",ue="show",fe=".modal-dialog",de=".modal-body",ge='[data-toggle="modal"]',_e='[data-dismiss="modal"]',me=".fixed-top, .fixed-bottom, .is-fixed, .sticky-top",pe=".sticky-top",ve=function(){function o(t,e){this._config=this._getConfig(e),this._element=t,this._dialog=t.querySelector(fe),this._backdrop=null,this._isShown=!1,this._isBodyOverflowing=!1,this._ignoreBackdropClick=!1,this._isTransitioning=!1,this._scrollbarWidth=0}var t=o.prototype;return t.toggle=function(t){return this._isShown?this.hide():this.show(t)},t.show=function(t){var e=this;if(!this._isShown&&!this._isTransitioning){g(this._element).hasClass(he)&&(this._isTransitioning=!0);var n=g.Event(re.SHOW,{relatedTarget:t});g(this._element).trigger(n),this._isShown||n.isDefaultPrevented()||(this._isShown=!0,this._checkScrollbar(),this._setScrollbar(),this._adjustDialog(),this._setEscapeEvent(),this._setResizeEvent(),g(this._element).on(re.CLICK_DISMISS,_e,function(t){return e.hide(t)}),g(this._dialog).on(re.MOUSEDOWN_DISMISS,function(){g(e._element).one(re.MOUSEUP_DISMISS,function(t){g(t.target).is(e._element)&&(e._ignoreBackdropClick=!0)})}),this._showBackdrop(function(){return e._showElement(t)}))}},t.hide=function(t){var e=this;if(t&&t.preventDefault(),this._isShown&&!this._isTransitioning){var n=g.Event(re.HIDE);if(g(this._element).trigger(n),this._isShown&&!n.isDefaultPrevented()){this._isShown=!1;var i=g(this._element).hasClass(he);if(i&&(this._isTransitioning=!0),this._setEscapeEvent(),this._setResizeEvent(),g(document).off(re.FOCUSIN),g(this._element).removeClass(ue),g(this._element).off(re.CLICK_DISMISS),g(this._dialog).off(re.MOUSEDOWN_DISMISS),i){var o=_.getTransitionDurationFromElement(this._element);g(this._element).one(_.TRANSITION_END,function(t){return e._hideModal(t)}).emulateTransitionEnd(o)}else this._hideModal()}}},t.dispose=function(){[window,this._element,this._dialog].forEach(function(t){return g(t).off(ee)}),g(document).off(re.FOCUSIN),g.removeData(this._element,te),this._config=null,this._element=null,this._dialog=null,this._backdrop=null,this._isShown=null,this._isBodyOverflowing=null,this._ignoreBackdropClick=null,this._isTransitioning=null,this._scrollbarWidth=null},t.handleUpdate=function(){this._adjustDialog()},t._getConfig=function(t){return t=l({},ie,t),_.typeCheckConfig(Zt,t,oe),t},t._showElement=function(t){var e=this,n=g(this._element).hasClass(he);this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE||document.body.appendChild(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),g(this._dialog).hasClass(se)?this._dialog.querySelector(de).scrollTop=0:this._element.scrollTop=0,n&&_.reflow(this._element),g(this._element).addClass(ue),this._config.focus&&this._enforceFocus();var i=g.Event(re.SHOWN,{relatedTarget:t}),o=function(){e._config.focus&&e._element.focus(),e._isTransitioning=!1,g(e._element).trigger(i)};if(n){var r=_.getTransitionDurationFromElement(this._dialog);g(this._dialog).one(_.TRANSITION_END,o).emulateTransitionEnd(r)}else o()},t._enforceFocus=function(){var e=this;g(document).off(re.FOCUSIN).on(re.FOCUSIN,function(t){document!==t.target&&e._element!==t.target&&0===g(e._element).has(t.target).length&&e._element.focus()})},t._setEscapeEvent=function(){var e=this;this._isShown&&this._config.keyboard?g(this._element).on(re.KEYDOWN_DISMISS,function(t){27===t.which&&(t.preventDefault(),e.hide())}):this._isShown||g(this._element).off(re.KEYDOWN_DISMISS)},t._setResizeEvent=function(){var e=this;this._isShown?g(window).on(re.RESIZE,function(t){return e.handleUpdate(t)}):g(window).off(re.RESIZE)},t._hideModal=function(){var t=this;this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._isTransitioning=!1,this._showBackdrop(function(){g(document.body).removeClass(ce),t._resetAdjustments(),t._resetScrollbar(),g(t._element).trigger(re.HIDDEN)})},t._removeBackdrop=function(){this._backdrop&&(g(this._backdrop).remove(),this._backdrop=null)},t._showBackdrop=function(t){var e=this,n=g(this._element).hasClass(he)?he:"";if(this._isShown&&this._config.backdrop){if(this._backdrop=document.createElement("div"),this._backdrop.className=le,n&&this._backdrop.classList.add(n),g(this._backdrop).appendTo(document.body),g(this._element).on(re.CLICK_DISMISS,function(t){e._ignoreBackdropClick?e._ignoreBackdropClick=!1:t.target===t.currentTarget&&("static"===e._config.backdrop?e._element.focus():e.hide())}),n&&_.reflow(this._backdrop),g(this._backdrop).addClass(ue),!t)return;if(!n)return void t();var i=_.getTransitionDurationFromElement(this._backdrop);g(this._backdrop).one(_.TRANSITION_END,t).emulateTransitionEnd(i)}else if(!this._isShown&&this._backdrop){g(this._backdrop).removeClass(ue);var o=function(){e._removeBackdrop(),t&&t()};if(g(this._element).hasClass(he)){var r=_.getTransitionDurationFromElement(this._backdrop);g(this._backdrop).one(_.TRANSITION_END,o).emulateTransitionEnd(r)}else o()}else t&&t()},t._adjustDialog=function(){var t=this._element.scrollHeight>document.documentElement.clientHeight;!this._isBodyOverflowing&&t&&(this._element.style.paddingLeft=this._scrollbarWidth+"px"),this._isBodyOverflowing&&!t&&(this._element.style.paddingRight=this._scrollbarWidth+"px")},t._resetAdjustments=function(){this._element.style.paddingLeft="",this._element.style.paddingRight=""},t._checkScrollbar=function(){var t=document.body.getBoundingClientRect();this._isBodyOverflowing=t.left+t.right<window.innerWidth,this._scrollbarWidth=this._getScrollbarWidth()},t._setScrollbar=function(){var o=this;if(this._isBodyOverflowing){var t=[].slice.call(document.querySelectorAll(me)),e=[].slice.call(document.querySelectorAll(pe));g(t).each(function(t,e){var n=e.style.paddingRight,i=g(e).css("padding-right");g(e).data("padding-right",n).css("padding-right",parseFloat(i)+o._scrollbarWidth+"px")}),g(e).each(function(t,e){var n=e.style.marginRight,i=g(e).css("margin-right");g(e).data("margin-right",n).css("margin-right",parseFloat(i)-o._scrollbarWidth+"px")});var n=document.body.style.paddingRight,i=g(document.body).css("padding-right");g(document.body).data("padding-right",n).css("padding-right",parseFloat(i)+this._scrollbarWidth+"px")}g(document.body).addClass(ce)},t._resetScrollbar=function(){var t=[].slice.call(document.querySelectorAll(me));g(t).each(function(t,e){var n=g(e).data("padding-right");g(e).removeData("padding-right"),e.style.paddingRight=n||""});var e=[].slice.call(document.querySelectorAll(""+pe));g(e).each(function(t,e){var n=g(e).data("margin-right");"undefined"!=typeof n&&g(e).css("margin-right",n).removeData("margin-right")});var n=g(document.body).data("padding-right");g(document.body).removeData("padding-right"),document.body.style.paddingRight=n||""},t._getScrollbarWidth=function(){var t=document.createElement("div");t.className=ae,document.body.appendChild(t);var e=t.getBoundingClientRect().width-t.clientWidth;return document.body.removeChild(t),e},o._jQueryInterface=function(n,i){return this.each(function(){var t=g(this).data(te),e=l({},ie,g(this).data(),"object"==typeof n&&n?n:{});if(t||(t=new o(this,e),g(this).data(te,t)),"string"==typeof n){if("undefined"==typeof t[n])throw new TypeError('No method named "'+n+'"');t[n](i)}else e.show&&t.show(i)})},s(o,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return ie}}]),o}();g(document).on(re.CLICK_DATA_API,ge,function(t){var e,n=this,i=_.getSelectorFromElement(this);i&&(e=document.querySelector(i));var o=g(e).data(te)?"toggle":l({},g(e).data(),g(this).data());"A"!==this.tagName&&"AREA"!==this.tagName||t.preventDefault();var r=g(e).one(re.SHOW,function(t){t.isDefaultPrevented()||r.one(re.HIDDEN,function(){g(n).is(":visible")&&n.focus()})});ve._jQueryInterface.call(g(e),o,this)}),g.fn[Zt]=ve._jQueryInterface,g.fn[Zt].Constructor=ve,g.fn[Zt].noConflict=function(){return g.fn[Zt]=ne,ve._jQueryInterface};var ye=["background","cite","href","itemtype","longdesc","poster","src","xlink:href"],Ee={"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},Ce=/^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi,Te=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i;function Se(t,s,e){if(0===t.length)return t;if(e&&"function"==typeof e)return e(t);for(var n=(new window.DOMParser).parseFromString(t,"text/html"),a=Object.keys(s),l=[].slice.call(n.body.querySelectorAll("*")),i=function(t,e){var n=l[t],i=n.nodeName.toLowerCase();if(-1===a.indexOf(n.nodeName.toLowerCase()))return n.parentNode.removeChild(n),"continue";var o=[].slice.call(n.attributes),r=[].concat(s["*"]||[],s[i]||[]);o.forEach(function(t){(function(t,e){var n=t.nodeName.toLowerCase();if(-1!==e.indexOf(n))return-1===ye.indexOf(n)||Boolean(t.nodeValue.match(Ce)||t.nodeValue.match(Te));for(var i=e.filter(function(t){return t instanceof RegExp}),o=0,r=i.length;o<r;o++)if(n.match(i[o]))return!0;return!1})(t,r)||n.removeAttribute(t.nodeName)})},o=0,r=l.length;o<r;o++)i(o);return n.body.innerHTML}var be="tooltip",Ie="bs.tooltip",De="."+Ie,we=g.fn[be],Ae="bs-tooltip",Ne=new RegExp("(^|\\s)"+Ae+"\\S+","g"),Oe=["sanitize","whiteList","sanitizeFn"],ke={animation:"boolean",template:"string",title:"(string|element|function)",trigger:"string",delay:"(number|object)",html:"boolean",selector:"(string|boolean)",placement:"(string|function)",offset:"(number|string|function)",container:"(string|element|boolean)",fallbackPlacement:"(string|array)",boundary:"(string|element)",sanitize:"boolean",sanitizeFn:"(null|function)",whiteList:"object"},Pe={AUTO:"auto",TOP:"top",RIGHT:"right",BOTTOM:"bottom",LEFT:"left"},Le={animation:!0,template:'<div class="tooltip" role="tooltip"><div class="arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,selector:!1,placement:"top",offset:0,container:!1,fallbackPlacement:"flip",boundary:"scrollParent",sanitize:!0,sanitizeFn:null,whiteList:Ee},je="show",He="out",Re={HIDE:"hide"+De,HIDDEN:"hidden"+De,SHOW:"show"+De,SHOWN:"shown"+De,INSERTED:"inserted"+De,CLICK:"click"+De,FOCUSIN:"focusin"+De,FOCUSOUT:"focusout"+De,MOUSEENTER:"mouseenter"+De,MOUSELEAVE:"mouseleave"+De},xe="fade",Fe="show",Ue=".tooltip-inner",We=".arrow",qe="hover",Me="focus",Ke="click",Qe="manual",Be=function(){function i(t,e){if("undefined"==typeof u)throw new TypeError("Bootstrap's tooltips require Popper.js (https://popper.js.org/)");this._isEnabled=!0,this._timeout=0,this._hoverState="",this._activeTrigger={},this._popper=null,this.element=t,this.config=this._getConfig(e),this.tip=null,this._setListeners()}var t=i.prototype;return t.enable=function(){this._isEnabled=!0},t.disable=function(){this._isEnabled=!1},t.toggleEnabled=function(){this._isEnabled=!this._isEnabled},t.toggle=function(t){if(this._isEnabled)if(t){var e=this.constructor.DATA_KEY,n=g(t.currentTarget).data(e);n||(n=new this.constructor(t.currentTarget,this._getDelegateConfig()),g(t.currentTarget).data(e,n)),n._activeTrigger.click=!n._activeTrigger.click,n._isWithActiveTrigger()?n._enter(null,n):n._leave(null,n)}else{if(g(this.getTipElement()).hasClass(Fe))return void this._leave(null,this);this._enter(null,this)}},t.dispose=function(){clearTimeout(this._timeout),g.removeData(this.element,this.constructor.DATA_KEY),g(this.element).off(this.constructor.EVENT_KEY),g(this.element).closest(".modal").off("hide.bs.modal"),this.tip&&g(this.tip).remove(),this._isEnabled=null,this._timeout=null,this._hoverState=null,(this._activeTrigger=null)!==this._popper&&this._popper.destroy(),this._popper=null,this.element=null,this.config=null,this.tip=null},t.show=function(){var e=this;if("none"===g(this.element).css("display"))throw new Error("Please use show on visible elements");var t=g.Event(this.constructor.Event.SHOW);if(this.isWithContent()&&this._isEnabled){g(this.element).trigger(t);var n=_.findShadowRoot(this.element),i=g.contains(null!==n?n:this.element.ownerDocument.documentElement,this.element);if(t.isDefaultPrevented()||!i)return;var o=this.getTipElement(),r=_.getUID(this.constructor.NAME);o.setAttribute("id",r),this.element.setAttribute("aria-describedby",r),this.setContent(),this.config.animation&&g(o).addClass(xe);var s="function"==typeof this.config.placement?this.config.placement.call(this,o,this.element):this.config.placement,a=this._getAttachment(s);this.addAttachmentClass(a);var l=this._getContainer();g(o).data(this.constructor.DATA_KEY,this),g.contains(this.element.ownerDocument.documentElement,this.tip)||g(o).appendTo(l),g(this.element).trigger(this.constructor.Event.INSERTED),this._popper=new u(this.element,o,{placement:a,modifiers:{offset:this._getOffset(),flip:{behavior:this.config.fallbackPlacement},arrow:{element:We},preventOverflow:{boundariesElement:this.config.boundary}},onCreate:function(t){t.originalPlacement!==t.placement&&e._handlePopperPlacementChange(t)},onUpdate:function(t){return e._handlePopperPlacementChange(t)}}),g(o).addClass(Fe),"ontouchstart"in document.documentElement&&g(document.body).children().on("mouseover",null,g.noop);var c=function(){e.config.animation&&e._fixTransition();var t=e._hoverState;e._hoverState=null,g(e.element).trigger(e.constructor.Event.SHOWN),t===He&&e._leave(null,e)};if(g(this.tip).hasClass(xe)){var h=_.getTransitionDurationFromElement(this.tip);g(this.tip).one(_.TRANSITION_END,c).emulateTransitionEnd(h)}else c()}},t.hide=function(t){var e=this,n=this.getTipElement(),i=g.Event(this.constructor.Event.HIDE),o=function(){e._hoverState!==je&&n.parentNode&&n.parentNode.removeChild(n),e._cleanTipClass(),e.element.removeAttribute("aria-describedby"),g(e.element).trigger(e.constructor.Event.HIDDEN),null!==e._popper&&e._popper.destroy(),t&&t()};if(g(this.element).trigger(i),!i.isDefaultPrevented()){if(g(n).removeClass(Fe),"ontouchstart"in document.documentElement&&g(document.body).children().off("mouseover",null,g.noop),this._activeTrigger[Ke]=!1,this._activeTrigger[Me]=!1,this._activeTrigger[qe]=!1,g(this.tip).hasClass(xe)){var r=_.getTransitionDurationFromElement(n);g(n).one(_.TRANSITION_END,o).emulateTransitionEnd(r)}else o();this._hoverState=""}},t.update=function(){null!==this._popper&&this._popper.scheduleUpdate()},t.isWithContent=function(){return Boolean(this.getTitle())},t.addAttachmentClass=function(t){g(this.getTipElement()).addClass(Ae+"-"+t)},t.getTipElement=function(){return this.tip=this.tip||g(this.config.template)[0],this.tip},t.setContent=function(){var t=this.getTipElement();this.setElementContent(g(t.querySelectorAll(Ue)),this.getTitle()),g(t).removeClass(xe+" "+Fe)},t.setElementContent=function(t,e){"object"!=typeof e||!e.nodeType&&!e.jquery?this.config.html?(this.config.sanitize&&(e=Se(e,this.config.whiteList,this.config.sanitizeFn)),t.html(e)):t.text(e):this.config.html?g(e).parent().is(t)||t.empty().append(e):t.text(g(e).text())},t.getTitle=function(){var t=this.element.getAttribute("data-original-title");return t||(t="function"==typeof this.config.title?this.config.title.call(this.element):this.config.title),t},t._getOffset=function(){var e=this,t={};return"function"==typeof this.config.offset?t.fn=function(t){return t.offsets=l({},t.offsets,e.config.offset(t.offsets,e.element)||{}),t}:t.offset=this.config.offset,t},t._getContainer=function(){return!1===this.config.container?document.body:_.isElement(this.config.container)?g(this.config.container):g(document).find(this.config.container)},t._getAttachment=function(t){return Pe[t.toUpperCase()]},t._setListeners=function(){var i=this;this.config.trigger.split(" ").forEach(function(t){if("click"===t)g(i.element).on(i.constructor.Event.CLICK,i.config.selector,function(t){return i.toggle(t)});else if(t!==Qe){var e=t===qe?i.constructor.Event.MOUSEENTER:i.constructor.Event.FOCUSIN,n=t===qe?i.constructor.Event.MOUSELEAVE:i.constructor.Event.FOCUSOUT;g(i.element).on(e,i.config.selector,function(t){return i._enter(t)}).on(n,i.config.selector,function(t){return i._leave(t)})}}),g(this.element).closest(".modal").on("hide.bs.modal",function(){i.element&&i.hide()}),this.config.selector?this.config=l({},this.config,{trigger:"manual",selector:""}):this._fixTitle()},t._fixTitle=function(){var t=typeof this.element.getAttribute("data-original-title");(this.element.getAttribute("title")||"string"!==t)&&(this.element.setAttribute("data-original-title",this.element.getAttribute("title")||""),this.element.setAttribute("title",""))},t._enter=function(t,e){var n=this.constructor.DATA_KEY;(e=e||g(t.currentTarget).data(n))||(e=new this.constructor(t.currentTarget,this._getDelegateConfig()),g(t.currentTarget).data(n,e)),t&&(e._activeTrigger["focusin"===t.type?Me:qe]=!0),g(e.getTipElement()).hasClass(Fe)||e._hoverState===je?e._hoverState=je:(clearTimeout(e._timeout),e._hoverState=je,e.config.delay&&e.config.delay.show?e._timeout=setTimeout(function(){e._hoverState===je&&e.show()},e.config.delay.show):e.show())},t._leave=function(t,e){var n=this.constructor.DATA_KEY;(e=e||g(t.currentTarget).data(n))||(e=new this.constructor(t.currentTarget,this._getDelegateConfig()),g(t.currentTarget).data(n,e)),t&&(e._activeTrigger["focusout"===t.type?Me:qe]=!1),e._isWithActiveTrigger()||(clearTimeout(e._timeout),e._hoverState=He,e.config.delay&&e.config.delay.hide?e._timeout=setTimeout(function(){e._hoverState===He&&e.hide()},e.config.delay.hide):e.hide())},t._isWithActiveTrigger=function(){for(var t in this._activeTrigger)if(this._activeTrigger[t])return!0;return!1},t._getConfig=function(t){var e=g(this.element).data();return Object.keys(e).forEach(function(t){-1!==Oe.indexOf(t)&&delete e[t]}),"number"==typeof(t=l({},this.constructor.Default,e,"object"==typeof t&&t?t:{})).delay&&(t.delay={show:t.delay,hide:t.delay}),"number"==typeof t.title&&(t.title=t.title.toString()),"number"==typeof t.content&&(t.content=t.content.toString()),_.typeCheckConfig(be,t,this.constructor.DefaultType),t.sanitize&&(t.template=Se(t.template,t.whiteList,t.sanitizeFn)),t},t._getDelegateConfig=function(){var t={};if(this.config)for(var e in this.config)this.constructor.Default[e]!==this.config[e]&&(t[e]=this.config[e]);return t},t._cleanTipClass=function(){var t=g(this.getTipElement()),e=t.attr("class").match(Ne);null!==e&&e.length&&t.removeClass(e.join(""))},t._handlePopperPlacementChange=function(t){var e=t.instance;this.tip=e.popper,this._cleanTipClass(),this.addAttachmentClass(this._getAttachment(t.placement))},t._fixTransition=function(){var t=this.getTipElement(),e=this.config.animation;null===t.getAttribute("x-placement")&&(g(t).removeClass(xe),this.config.animation=!1,this.hide(),this.show(),this.config.animation=e)},i._jQueryInterface=function(n){return this.each(function(){var t=g(this).data(Ie),e="object"==typeof n&&n;if((t||!/dispose|hide/.test(n))&&(t||(t=new i(this,e),g(this).data(Ie,t)),"string"==typeof n)){if("undefined"==typeof t[n])throw new TypeError('No method named "'+n+'"');t[n]()}})},s(i,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return Le}},{key:"NAME",get:function(){return be}},{key:"DATA_KEY",get:function(){return Ie}},{key:"Event",get:function(){return Re}},{key:"EVENT_KEY",get:function(){return De}},{key:"DefaultType",get:function(){return ke}}]),i}();g.fn[be]=Be._jQueryInterface,g.fn[be].Constructor=Be,g.fn[be].noConflict=function(){return g.fn[be]=we,Be._jQueryInterface};var Ve="popover",Ye="bs.popover",ze="."+Ye,Xe=g.fn[Ve],$e="bs-popover",Ge=new RegExp("(^|\\s)"+$e+"\\S+","g"),Je=l({},Be.Default,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-header"></h3><div class="popover-body"></div></div>'}),Ze=l({},Be.DefaultType,{content:"(string|element|function)"}),tn="fade",en="show",nn=".popover-header",on=".popover-body",rn={HIDE:"hide"+ze,HIDDEN:"hidden"+ze,SHOW:"show"+ze,SHOWN:"shown"+ze,INSERTED:"inserted"+ze,CLICK:"click"+ze,FOCUSIN:"focusin"+ze,FOCUSOUT:"focusout"+ze,MOUSEENTER:"mouseenter"+ze,MOUSELEAVE:"mouseleave"+ze},sn=function(t){var e,n;function i(){return t.apply(this,arguments)||this}n=t,(e=i).prototype=Object.create(n.prototype),(e.prototype.constructor=e).__proto__=n;var o=i.prototype;return o.isWithContent=function(){return this.getTitle()||this._getContent()},o.addAttachmentClass=function(t){g(this.getTipElement()).addClass($e+"-"+t)},o.getTipElement=function(){return this.tip=this.tip||g(this.config.template)[0],this.tip},o.setContent=function(){var t=g(this.getTipElement());this.setElementContent(t.find(nn),this.getTitle());var e=this._getContent();"function"==typeof e&&(e=e.call(this.element)),this.setElementContent(t.find(on),e),t.removeClass(tn+" "+en)},o._getContent=function(){return this.element.getAttribute("data-content")||this.config.content},o._cleanTipClass=function(){var t=g(this.getTipElement()),e=t.attr("class").match(Ge);null!==e&&0<e.length&&t.removeClass(e.join(""))},i._jQueryInterface=function(n){return this.each(function(){var t=g(this).data(Ye),e="object"==typeof n?n:null;if((t||!/dispose|hide/.test(n))&&(t||(t=new i(this,e),g(this).data(Ye,t)),"string"==typeof n)){if("undefined"==typeof t[n])throw new TypeError('No method named "'+n+'"');t[n]()}})},s(i,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return Je}},{key:"NAME",get:function(){return Ve}},{key:"DATA_KEY",get:function(){return Ye}},{key:"Event",get:function(){return rn}},{key:"EVENT_KEY",get:function(){return ze}},{key:"DefaultType",get:function(){return Ze}}]),i}(Be);g.fn[Ve]=sn._jQueryInterface,g.fn[Ve].Constructor=sn,g.fn[Ve].noConflict=function(){return g.fn[Ve]=Xe,sn._jQueryInterface};var an="scrollspy",ln="bs.scrollspy",cn="."+ln,hn=g.fn[an],un={offset:10,method:"auto",target:""},fn={offset:"number",method:"string",target:"(string|element)"},dn={ACTIVATE:"activate"+cn,SCROLL:"scroll"+cn,LOAD_DATA_API:"load"+cn+".data-api"},gn="dropdown-item",_n="active",mn='[data-spy="scroll"]',pn=".nav, .list-group",vn=".nav-link",yn=".nav-item",En=".list-group-item",Cn=".dropdown",Tn=".dropdown-item",Sn=".dropdown-toggle",bn="offset",In="position",Dn=function(){function n(t,e){var n=this;this._element=t,this._scrollElement="BODY"===t.tagName?window:t,this._config=this._getConfig(e),this._selector=this._config.target+" "+vn+","+this._config.target+" "+En+","+this._config.target+" "+Tn,this._offsets=[],this._targets=[],this._activeTarget=null,this._scrollHeight=0,g(this._scrollElement).on(dn.SCROLL,function(t){return n._process(t)}),this.refresh(),this._process()}var t=n.prototype;return t.refresh=function(){var e=this,t=this._scrollElement===this._scrollElement.window?bn:In,o="auto"===this._config.method?t:this._config.method,r=o===In?this._getScrollTop():0;this._offsets=[],this._targets=[],this._scrollHeight=this._getScrollHeight(),[].slice.call(document.querySelectorAll(this._selector)).map(function(t){var e,n=_.getSelectorFromElement(t);if(n&&(e=document.querySelector(n)),e){var i=e.getBoundingClientRect();if(i.width||i.height)return[g(e)[o]().top+r,n]}return null}).filter(function(t){return t}).sort(function(t,e){return t[0]-e[0]}).forEach(function(t){e._offsets.push(t[0]),e._targets.push(t[1])})},t.dispose=function(){g.removeData(this._element,ln),g(this._scrollElement).off(cn),this._element=null,this._scrollElement=null,this._config=null,this._selector=null,this._offsets=null,this._targets=null,this._activeTarget=null,this._scrollHeight=null},t._getConfig=function(t){if("string"!=typeof(t=l({},un,"object"==typeof t&&t?t:{})).target){var e=g(t.target).attr("id");e||(e=_.getUID(an),g(t.target).attr("id",e)),t.target="#"+e}return _.typeCheckConfig(an,t,fn),t},t._getScrollTop=function(){return this._scrollElement===window?this._scrollElement.pageYOffset:this._scrollElement.scrollTop},t._getScrollHeight=function(){return this._scrollElement.scrollHeight||Math.max(document.body.scrollHeight,document.documentElement.scrollHeight)},t._getOffsetHeight=function(){return this._scrollElement===window?window.innerHeight:this._scrollElement.getBoundingClientRect().height},t._process=function(){var t=this._getScrollTop()+this._config.offset,e=this._getScrollHeight(),n=this._config.offset+e-this._getOffsetHeight();if(this._scrollHeight!==e&&this.refresh(),n<=t){var i=this._targets[this._targets.length-1];this._activeTarget!==i&&this._activate(i)}else{if(this._activeTarget&&t<this._offsets[0]&&0<this._offsets[0])return this._activeTarget=null,void this._clear();for(var o=this._offsets.length;o--;){this._activeTarget!==this._targets[o]&&t>=this._offsets[o]&&("undefined"==typeof this._offsets[o+1]||t<this._offsets[o+1])&&this._activate(this._targets[o])}}},t._activate=function(e){this._activeTarget=e,this._clear();var t=this._selector.split(",").map(function(t){return t+'[data-target="'+e+'"],'+t+'[href="'+e+'"]'}),n=g([].slice.call(document.querySelectorAll(t.join(","))));n.hasClass(gn)?(n.closest(Cn).find(Sn).addClass(_n),n.addClass(_n)):(n.addClass(_n),n.parents(pn).prev(vn+", "+En).addClass(_n),n.parents(pn).prev(yn).children(vn).addClass(_n)),g(this._scrollElement).trigger(dn.ACTIVATE,{relatedTarget:e})},t._clear=function(){[].slice.call(document.querySelectorAll(this._selector)).filter(function(t){return t.classList.contains(_n)}).forEach(function(t){return t.classList.remove(_n)})},n._jQueryInterface=function(e){return this.each(function(){var t=g(this).data(ln);if(t||(t=new n(this,"object"==typeof e&&e),g(this).data(ln,t)),"string"==typeof e){if("undefined"==typeof t[e])throw new TypeError('No method named "'+e+'"');t[e]()}})},s(n,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return un}}]),n}();g(window).on(dn.LOAD_DATA_API,function(){for(var t=[].slice.call(document.querySelectorAll(mn)),e=t.length;e--;){var n=g(t[e]);Dn._jQueryInterface.call(n,n.data())}}),g.fn[an]=Dn._jQueryInterface,g.fn[an].Constructor=Dn,g.fn[an].noConflict=function(){return g.fn[an]=hn,Dn._jQueryInterface};var wn="bs.tab",An="."+wn,Nn=g.fn.tab,On={HIDE:"hide"+An,HIDDEN:"hidden"+An,SHOW:"show"+An,SHOWN:"shown"+An,CLICK_DATA_API:"click"+An+".data-api"},kn="dropdown-menu",Pn="active",Ln="disabled",jn="fade",Hn="show",Rn=".dropdown",xn=".nav, .list-group",Fn=".active",Un="> li > .active",Wn='[data-toggle="tab"], [data-toggle="pill"], [data-toggle="list"]',qn=".dropdown-toggle",Mn="> .dropdown-menu .active",Kn=function(){function i(t){this._element=t}var t=i.prototype;return t.show=function(){var n=this;if(!(this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE&&g(this._element).hasClass(Pn)||g(this._element).hasClass(Ln))){var t,i,e=g(this._element).closest(xn)[0],o=_.getSelectorFromElement(this._element);if(e){var r="UL"===e.nodeName||"OL"===e.nodeName?Un:Fn;i=(i=g.makeArray(g(e).find(r)))[i.length-1]}var s=g.Event(On.HIDE,{relatedTarget:this._element}),a=g.Event(On.SHOW,{relatedTarget:i});if(i&&g(i).trigger(s),g(this._element).trigger(a),!a.isDefaultPrevented()&&!s.isDefaultPrevented()){o&&(t=document.querySelector(o)),this._activate(this._element,e);var l=function(){var t=g.Event(On.HIDDEN,{relatedTarget:n._element}),e=g.Event(On.SHOWN,{relatedTarget:i});g(i).trigger(t),g(n._element).trigger(e)};t?this._activate(t,t.parentNode,l):l()}}},t.dispose=function(){g.removeData(this._element,wn),this._element=null},t._activate=function(t,e,n){var i=this,o=(!e||"UL"!==e.nodeName&&"OL"!==e.nodeName?g(e).children(Fn):g(e).find(Un))[0],r=n&&o&&g(o).hasClass(jn),s=function(){return i._transitionComplete(t,o,n)};if(o&&r){var a=_.getTransitionDurationFromElement(o);g(o).removeClass(Hn).one(_.TRANSITION_END,s).emulateTransitionEnd(a)}else s()},t._transitionComplete=function(t,e,n){if(e){g(e).removeClass(Pn);var i=g(e.parentNode).find(Mn)[0];i&&g(i).removeClass(Pn),"tab"===e.getAttribute("role")&&e.setAttribute("aria-selected",!1)}if(g(t).addClass(Pn),"tab"===t.getAttribute("role")&&t.setAttribute("aria-selected",!0),_.reflow(t),t.classList.contains(jn)&&t.classList.add(Hn),t.parentNode&&g(t.parentNode).hasClass(kn)){var o=g(t).closest(Rn)[0];if(o){var r=[].slice.call(o.querySelectorAll(qn));g(r).addClass(Pn)}t.setAttribute("aria-expanded",!0)}n&&n()},i._jQueryInterface=function(n){return this.each(function(){var t=g(this),e=t.data(wn);if(e||(e=new i(this),t.data(wn,e)),"string"==typeof n){if("undefined"==typeof e[n])throw new TypeError('No method named "'+n+'"');e[n]()}})},s(i,null,[{key:"VERSION",get:function(){return"4.3.1"}}]),i}();g(document).on(On.CLICK_DATA_API,Wn,function(t){t.preventDefault(),Kn._jQueryInterface.call(g(this),"show")}),g.fn.tab=Kn._jQueryInterface,g.fn.tab.Constructor=Kn,g.fn.tab.noConflict=function(){return g.fn.tab=Nn,Kn._jQueryInterface};var Qn="toast",Bn="bs.toast",Vn="."+Bn,Yn=g.fn[Qn],zn={CLICK_DISMISS:"click.dismiss"+Vn,HIDE:"hide"+Vn,HIDDEN:"hidden"+Vn,SHOW:"show"+Vn,SHOWN:"shown"+Vn},Xn="fade",$n="hide",Gn="show",Jn="showing",Zn={animation:"boolean",autohide:"boolean",delay:"number"},ti={animation:!0,autohide:!0,delay:500},ei='[data-dismiss="toast"]',ni=function(){function i(t,e){this._element=t,this._config=this._getConfig(e),this._timeout=null,this._setListeners()}var t=i.prototype;return t.show=function(){var t=this;g(this._element).trigger(zn.SHOW),this._config.animation&&this._element.classList.add(Xn);var e=function(){t._element.classList.remove(Jn),t._element.classList.add(Gn),g(t._element).trigger(zn.SHOWN),t._config.autohide&&t.hide()};if(this._element.classList.remove($n),this._element.classList.add(Jn),this._config.animation){var n=_.getTransitionDurationFromElement(this._element);g(this._element).one(_.TRANSITION_END,e).emulateTransitionEnd(n)}else e()},t.hide=function(t){var e=this;this._element.classList.contains(Gn)&&(g(this._element).trigger(zn.HIDE),t?this._close():this._timeout=setTimeout(function(){e._close()},this._config.delay))},t.dispose=function(){clearTimeout(this._timeout),this._timeout=null,this._element.classList.contains(Gn)&&this._element.classList.remove(Gn),g(this._element).off(zn.CLICK_DISMISS),g.removeData(this._element,Bn),this._element=null,this._config=null},t._getConfig=function(t){return t=l({},ti,g(this._element).data(),"object"==typeof t&&t?t:{}),_.typeCheckConfig(Qn,t,this.constructor.DefaultType),t},t._setListeners=function(){var t=this;g(this._element).on(zn.CLICK_DISMISS,ei,function(){return t.hide(!0)})},t._close=function(){var t=this,e=function(){t._element.classList.add($n),g(t._element).trigger(zn.HIDDEN)};if(this._element.classList.remove(Gn),this._config.animation){var n=_.getTransitionDurationFromElement(this._element);g(this._element).one(_.TRANSITION_END,e).emulateTransitionEnd(n)}else e()},i._jQueryInterface=function(n){return this.each(function(){var t=g(this),e=t.data(Bn);if(e||(e=new i(this,"object"==typeof n&&n),t.data(Bn,e)),"string"==typeof n){if("undefined"==typeof e[n])throw new TypeError('No method named "'+n+'"');e[n](this)}})},s(i,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"DefaultType",get:function(){return Zn}},{key:"Default",get:function(){return ti}}]),i}();g.fn[Qn]=ni._jQueryInterface,g.fn[Qn].Constructor=ni,g.fn[Qn].noConflict=function(){return g.fn[Qn]=Yn,ni._jQueryInterface},function(){if("undefined"==typeof g)throw new TypeError("Bootstrap's JavaScript requires jQuery. jQuery must be included before Bootstrap's JavaScript.");var t=g.fn.jquery.split(" ")[0].split(".");if(t[0]<2&&t[1]<9||1===t[0]&&9===t[1]&&t[2]<1||4<=t[0])throw new Error("Bootstrap's JavaScript requires at least jQuery v1.9.1 but less than v4.0.0")}(),t.Util=_,t.Alert=p,t.Button=P,t.Carousel=lt,t.Collapse=bt,t.Dropdown=Jt,t.Modal=ve,t.Popover=sn,t.Scrollspy=Dn,t.Tab=Kn,t.Toast=ni,t.Tooltip=Be,Object.defineProperty(t,"__esModule",{value:!0})}); +//# sourceMappingURL=bootstrap.min.js.map \ No newline at end of file diff --git a/scala3doc/dotty-docs/docs/js/highlight.pack.js b/scala3doc/dotty-docs/docs/js/highlight.pack.js new file mode 100644 index 000000000000..134c2336e173 --- /dev/null +++ b/scala3doc/dotty-docs/docs/js/highlight.pack.js @@ -0,0 +1,2 @@ +/*! highlight.js v9.15.9 | BSD3 License | git.io/hljslicense */ +!function(e){var n="object"==typeof window&&window||"object"==typeof self&&self;"undefined"==typeof exports||exports.nodeType?n&&(n.hljs=e({}),"function"==typeof define&&define.amd&&define([],function(){return n.hljs})):e(exports)}(function(a){var f=[],u=Object.keys,N={},c={},n=/^(no-?highlight|plain|text)$/i,s=/\blang(?:uage)?-([\w-]+)\b/i,t=/((^(<[^>]+>|\t|)+|(?:\n)))/gm,r={case_insensitive:"cI",lexemes:"l",contains:"c",keywords:"k",subLanguage:"sL",className:"cN",begin:"b",beginKeywords:"bK",end:"e",endsWithParent:"eW",illegal:"i",excludeBegin:"eB",excludeEnd:"eE",returnBegin:"rB",returnEnd:"rE",relevance:"r",variants:"v",IDENT_RE:"IR",UNDERSCORE_IDENT_RE:"UIR",NUMBER_RE:"NR",C_NUMBER_RE:"CNR",BINARY_NUMBER_RE:"BNR",RE_STARTERS_RE:"RSR",BACKSLASH_ESCAPE:"BE",APOS_STRING_MODE:"ASM",QUOTE_STRING_MODE:"QSM",PHRASAL_WORDS_MODE:"PWM",C_LINE_COMMENT_MODE:"CLCM",C_BLOCK_COMMENT_MODE:"CBCM",HASH_COMMENT_MODE:"HCM",NUMBER_MODE:"NM",C_NUMBER_MODE:"CNM",BINARY_NUMBER_MODE:"BNM",CSS_NUMBER_MODE:"CSSNM",REGEXP_MODE:"RM",TITLE_MODE:"TM",UNDERSCORE_TITLE_MODE:"UTM",COMMENT:"C",beginRe:"bR",endRe:"eR",illegalRe:"iR",lexemesRe:"lR",terminators:"t",terminator_end:"tE"},b="</span>",h={classPrefix:"hljs-",tabReplace:null,useBR:!1,languages:void 0};function _(e){return e.replace(/&/g,"&").replace(/</g,"<").replace(/>/g,">")}function E(e){return e.nodeName.toLowerCase()}function v(e,n){var t=e&&e.exec(n);return t&&0===t.index}function l(e){return n.test(e)}function g(e){var n,t={},r=Array.prototype.slice.call(arguments,1);for(n in e)t[n]=e[n];return r.forEach(function(e){for(n in e)t[n]=e[n]}),t}function R(e){var a=[];return function e(n,t){for(var r=n.firstChild;r;r=r.nextSibling)3===r.nodeType?t+=r.nodeValue.length:1===r.nodeType&&(a.push({event:"start",offset:t,node:r}),t=e(r,t),E(r).match(/br|hr|img|input/)||a.push({event:"stop",offset:t,node:r}));return t}(e,0),a}function i(e){if(r&&!e.langApiRestored){for(var n in e.langApiRestored=!0,r)e[n]&&(e[r[n]]=e[n]);(e.c||[]).concat(e.v||[]).forEach(i)}}function m(o){function s(e){return e&&e.source||e}function c(e,n){return new RegExp(s(e),"m"+(o.cI?"i":"")+(n?"g":""))}!function n(t,e){if(!t.compiled){if(t.compiled=!0,t.k=t.k||t.bK,t.k){function r(t,e){o.cI&&(e=e.toLowerCase()),e.split(" ").forEach(function(e){var n=e.split("|");a[n[0]]=[t,n[1]?Number(n[1]):1]})}var a={};"string"==typeof t.k?r("keyword",t.k):u(t.k).forEach(function(e){r(e,t.k[e])}),t.k=a}t.lR=c(t.l||/\w+/,!0),e&&(t.bK&&(t.b="\\b("+t.bK.split(" ").join("|")+")\\b"),t.b||(t.b=/\B|\b/),t.bR=c(t.b),t.endSameAsBegin&&(t.e=t.b),t.e||t.eW||(t.e=/\B|\b/),t.e&&(t.eR=c(t.e)),t.tE=s(t.e)||"",t.eW&&e.tE&&(t.tE+=(t.e?"|":"")+e.tE)),t.i&&(t.iR=c(t.i)),null==t.r&&(t.r=1),t.c||(t.c=[]),t.c=Array.prototype.concat.apply([],t.c.map(function(e){return function(n){return n.v&&!n.cached_variants&&(n.cached_variants=n.v.map(function(e){return g(n,{v:null},e)})),n.cached_variants||n.eW&&[g(n)]||[n]}("self"===e?t:e)})),t.c.forEach(function(e){n(e,t)}),t.starts&&n(t.starts,e);var i=t.c.map(function(e){return e.bK?"\\.?(?:"+e.b+")\\.?":e.b}).concat([t.tE,t.i]).map(s).filter(Boolean);t.t=i.length?c(function(e,n){for(var t=/\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./,r=0,a="",i=0;i<e.length;i++){var o=r,c=s(e[i]);for(0<i&&(a+=n);0<c.length;){var u=t.exec(c);if(null==u){a+=c;break}a+=c.substring(0,u.index),c=c.substring(u.index+u[0].length),"\\"==u[0][0]&&u[1]?a+="\\"+String(Number(u[1])+o):(a+=u[0],"("==u[0]&&r++)}}return a}(i,"|"),!0):{exec:function(){return null}}}}(o)}function C(e,n,i,t){function c(e,n,t,r){var a='<span class="'+(r?"":h.classPrefix);return e?(a+=e+'">')+n+(t?"":b):n}function o(){E+=null!=l.sL?function(){var e="string"==typeof l.sL;if(e&&!N[l.sL])return _(g);var n=e?C(l.sL,g,!0,f[l.sL]):O(g,l.sL.length?l.sL:void 0);return 0<l.r&&(R+=n.r),e&&(f[l.sL]=n.top),c(n.language,n.value,!1,!0)}():function(){var e,n,t,r,a,i,o;if(!l.k)return _(g);for(r="",n=0,l.lR.lastIndex=0,t=l.lR.exec(g);t;)r+=_(g.substring(n,t.index)),a=l,i=t,void 0,o=s.cI?i[0].toLowerCase():i[0],(e=a.k.hasOwnProperty(o)&&a.k[o])?(R+=e[1],r+=c(e[0],_(t[0]))):r+=_(t[0]),n=l.lR.lastIndex,t=l.lR.exec(g);return r+_(g.substr(n))}(),g=""}function u(e){E+=e.cN?c(e.cN,"",!0):"",l=Object.create(e,{parent:{value:l}})}function r(e,n){if(g+=e,null==n)return o(),0;var t=function(e,n){var t,r,a;for(t=0,r=n.c.length;t<r;t++)if(v(n.c[t].bR,e))return n.c[t].endSameAsBegin&&(n.c[t].eR=(a=n.c[t].bR.exec(e)[0],new RegExp(a.replace(/[-\/\\^$*+?.()|[\]{}]/g,"\\$&"),"m"))),n.c[t]}(n,l);if(t)return t.skip?g+=n:(t.eB&&(g+=n),o(),t.rB||t.eB||(g=n)),u(t),t.rB?0:n.length;var r=function e(n,t){if(v(n.eR,t)){for(;n.endsParent&&n.parent;)n=n.parent;return n}if(n.eW)return e(n.parent,t)}(l,n);if(r){var a=l;for(a.skip?g+=n:(a.rE||a.eE||(g+=n),o(),a.eE&&(g=n));l.cN&&(E+=b),l.skip||l.sL||(R+=l.r),(l=l.parent)!==r.parent;);return r.starts&&(r.endSameAsBegin&&(r.starts.eR=r.eR),u(r.starts)),a.rE?0:n.length}if(function(e,n){return!i&&v(n.iR,e)}(n,l))throw new Error('Illegal lexeme "'+n+'" for mode "'+(l.cN||"<unnamed>")+'"');return g+=n,n.length||1}var s=B(e);if(!s)throw new Error('Unknown language: "'+e+'"');m(s);var a,l=t||s,f={},E="";for(a=l;a!==s;a=a.parent)a.cN&&(E=c(a.cN,"",!0)+E);var g="",R=0;try{for(var d,p,M=0;l.t.lastIndex=M,d=l.t.exec(n);)p=r(n.substring(M,d.index),d[0]),M=d.index+p;for(r(n.substr(M)),a=l;a.parent;a=a.parent)a.cN&&(E+=b);return{r:R,value:E,language:e,top:l}}catch(e){if(e.message&&-1!==e.message.indexOf("Illegal"))return{r:0,value:_(n)};throw e}}function O(t,e){e=e||h.languages||u(N);var r={r:0,value:_(t)},a=r;return e.filter(B).filter(M).forEach(function(e){var n=C(e,t,!1);n.language=e,n.r>a.r&&(a=n),n.r>r.r&&(a=r,r=n)}),a.language&&(r.second_best=a),r}function d(e){return h.tabReplace||h.useBR?e.replace(t,function(e,n){return h.useBR&&"\n"===e?"<br>":h.tabReplace?n.replace(/\t/g,h.tabReplace):""}):e}function o(e){var n,t,r,a,i,o=function(e){var n,t,r,a,i=e.className+" ";if(i+=e.parentNode?e.parentNode.className:"",t=s.exec(i))return B(t[1])?t[1]:"no-highlight";for(n=0,r=(i=i.split(/\s+/)).length;n<r;n++)if(l(a=i[n])||B(a))return a}(e);l(o)||(h.useBR?(n=document.createElementNS("http://www.w3.org/1999/xhtml","div")).innerHTML=e.innerHTML.replace(/\n/g,"").replace(/<br[ \/]*>/g,"\n"):n=e,i=n.textContent,r=o?C(o,i,!0):O(i),(t=R(n)).length&&((a=document.createElementNS("http://www.w3.org/1999/xhtml","div")).innerHTML=r.value,r.value=function(e,n,t){var r=0,a="",i=[];function o(){return e.length&&n.length?e[0].offset!==n[0].offset?e[0].offset<n[0].offset?e:n:"start"===n[0].event?e:n:e.length?e:n}function c(e){a+="<"+E(e)+f.map.call(e.attributes,function(e){return" "+e.nodeName+'="'+_(e.value).replace('"',""")+'"'}).join("")+">"}function u(e){a+="</"+E(e)+">"}function s(e){("start"===e.event?c:u)(e.node)}for(;e.length||n.length;){var l=o();if(a+=_(t.substring(r,l[0].offset)),r=l[0].offset,l===e){for(i.reverse().forEach(u);s(l.splice(0,1)[0]),(l=o())===e&&l.length&&l[0].offset===r;);i.reverse().forEach(c)}else"start"===l[0].event?i.push(l[0].node):i.pop(),s(l.splice(0,1)[0])}return a+_(t.substr(r))}(t,R(a),i)),r.value=d(r.value),e.innerHTML=r.value,e.className=function(e,n,t){var r=n?c[n]:t,a=[e.trim()];return e.match(/\bhljs\b/)||a.push("hljs"),-1===e.indexOf(r)&&a.push(r),a.join(" ").trim()}(e.className,o,r.language),e.result={language:r.language,re:r.r},r.second_best&&(e.second_best={language:r.second_best.language,re:r.second_best.r}))}function p(){if(!p.called){p.called=!0;var e=document.querySelectorAll("pre code");f.forEach.call(e,o)}}function B(e){return e=(e||"").toLowerCase(),N[e]||N[c[e]]}function M(e){var n=B(e);return n&&!n.disableAutodetect}return a.highlight=C,a.highlightAuto=O,a.fixMarkup=d,a.highlightBlock=o,a.configure=function(e){h=g(h,e)},a.initHighlighting=p,a.initHighlightingOnLoad=function(){addEventListener("DOMContentLoaded",p,!1),addEventListener("load",p,!1)},a.registerLanguage=function(n,e){var t=N[n]=e(a);i(t),t.aliases&&t.aliases.forEach(function(e){c[e]=n})},a.listLanguages=function(){return u(N)},a.getLanguage=B,a.autoDetection=M,a.inherit=g,a.IR=a.IDENT_RE="[a-zA-Z]\\w*",a.UIR=a.UNDERSCORE_IDENT_RE="[a-zA-Z_]\\w*",a.NR=a.NUMBER_RE="\\b\\d+(\\.\\d+)?",a.CNR=a.C_NUMBER_RE="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",a.BNR=a.BINARY_NUMBER_RE="\\b(0b[01]+)",a.RSR=a.RE_STARTERS_RE="!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",a.BE=a.BACKSLASH_ESCAPE={b:"\\\\[\\s\\S]",r:0},a.ASM=a.APOS_STRING_MODE={cN:"string",b:"'",e:"'",i:"\\n",c:[a.BE]},a.QSM=a.QUOTE_STRING_MODE={cN:"string",b:'"',e:'"',i:"\\n",c:[a.BE]},a.PWM=a.PHRASAL_WORDS_MODE={b:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/},a.C=a.COMMENT=function(e,n,t){var r=a.inherit({cN:"comment",b:e,e:n,c:[]},t||{});return r.c.push(a.PWM),r.c.push({cN:"doctag",b:"(?:TODO|FIXME|NOTE|BUG|XXX):",r:0}),r},a.CLCM=a.C_LINE_COMMENT_MODE=a.C("//","$"),a.CBCM=a.C_BLOCK_COMMENT_MODE=a.C("/\\*","\\*/"),a.HCM=a.HASH_COMMENT_MODE=a.C("#","$"),a.NM=a.NUMBER_MODE={cN:"number",b:a.NR,r:0},a.CNM=a.C_NUMBER_MODE={cN:"number",b:a.CNR,r:0},a.BNM=a.BINARY_NUMBER_MODE={cN:"number",b:a.BNR,r:0},a.CSSNM=a.CSS_NUMBER_MODE={cN:"number",b:a.NR+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",r:0},a.RM=a.REGEXP_MODE={cN:"regexp",b:/\//,e:/\/[gimuy]*/,i:/\n/,c:[a.BE,{b:/\[/,e:/\]/,r:0,c:[a.BE]}]},a.TM=a.TITLE_MODE={cN:"title",b:a.IR,r:0},a.UTM=a.UNDERSCORE_TITLE_MODE={cN:"title",b:a.UIR,r:0},a.METHOD_GUARD={b:"\\.\\s*"+a.UIR,r:0},a});hljs.registerLanguage("json",function(e){var i={literal:"true false null"},n=[e.QSM,e.CNM],r={e:",",eW:!0,eE:!0,c:n,k:i},t={b:"{",e:"}",c:[{cN:"attr",b:/"/,e:/"/,c:[e.BE],i:"\\n"},e.inherit(r,{b:/:/})],i:"\\S"},c={b:"\\[",e:"\\]",c:[e.inherit(r)],i:"\\S"};return n.splice(n.length,0,t,c),{c:n,k:i,i:"\\S"}});hljs.registerLanguage("ocaml",function(e){return{aliases:["ml"],k:{keyword:"and as assert asr begin class constraint do done downto else end exception external for fun function functor if in include inherit! inherit initializer land lazy let lor lsl lsr lxor match method!|10 method mod module mutable new object of open! open or private rec sig struct then to try type val! val virtual when while with parser value",built_in:"array bool bytes char exn|5 float int int32 int64 list lazy_t|5 nativeint|5 string unit in_channel out_channel ref",literal:"true false"},i:/\/\/|>>/,l:"[a-z_]\\w*!?",c:[{cN:"literal",b:"\\[(\\|\\|)?\\]|\\(\\)",r:0},e.C("\\(\\*","\\*\\)",{c:["self"]}),{cN:"symbol",b:"'[A-Za-z_](?!')[\\w']*"},{cN:"type",b:"`[A-Z][\\w']*"},{cN:"type",b:"\\b[A-Z][\\w']*",r:0},{b:"[a-z_]\\w*'[\\w']*",r:0},e.inherit(e.ASM,{cN:"string",r:0}),e.inherit(e.QSM,{i:null}),{cN:"number",b:"\\b(0[xX][a-fA-F0-9_]+[Lln]?|0[oO][0-7_]+[Lln]?|0[bB][01_]+[Lln]?|[0-9][0-9_]*([Lln]|(\\.[0-9_]*)?([eE][-+]?[0-9_]+)?)?)",r:0},{b:/[-=]>/}]}});hljs.registerLanguage("java",function(e){var a="false synchronized int abstract float private char boolean var static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do",t={cN:"number",b:"\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\d]+[\\d_]+[\\d]+|[\\d]+)(\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))?|\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))([eE][-+]?\\d+)?)[lLfF]?",r:0};return{aliases:["jsp"],k:a,i:/<\/|#/,c:[e.C("/\\*\\*","\\*/",{r:0,c:[{b:/\w+@/,r:0},{cN:"doctag",b:"@[A-Za-z]+"}]}),e.CLCM,e.CBCM,e.ASM,e.QSM,{cN:"class",bK:"class interface",e:/[{;=]/,eE:!0,k:"class interface",i:/[:"\[\]]/,c:[{bK:"extends implements"},e.UTM]},{bK:"new throw return else",r:0},{cN:"function",b:"([À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(<[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(\\s*,\\s*[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*)*>)?\\s+)+"+e.UIR+"\\s*\\(",rB:!0,e:/[{;=]/,eE:!0,k:a,c:[{b:e.UIR+"\\s*\\(",rB:!0,r:0,c:[e.UTM]},{cN:"params",b:/\(/,e:/\)/,k:a,r:0,c:[e.ASM,e.QSM,e.CNM,e.CBCM]},e.CLCM,e.CBCM]},t,{cN:"meta",b:"@[A-Za-z]+"}]}});hljs.registerLanguage("ini",function(e){var b={cN:"string",c:[e.BE],v:[{b:"'''",e:"'''",r:10},{b:'"""',e:'"""',r:10},{b:'"',e:'"'},{b:"'",e:"'"}]};return{aliases:["toml"],cI:!0,i:/\S/,c:[e.C(";","$"),e.HCM,{cN:"section",b:/^\s*\[+/,e:/\]+/},{b:/^[a-z0-9\[\]_\.-]+\s*=\s*/,e:"$",rB:!0,c:[{cN:"attr",b:/[a-z0-9\[\]_\.-]+/},{b:/=/,eW:!0,r:0,c:[e.C(";","$"),e.HCM,{cN:"literal",b:/\bon|off|true|false|yes|no\b/},{cN:"variable",v:[{b:/\$[\w\d"][\w\d_]*/},{b:/\$\{(.*?)}/}]},b,{cN:"number",b:/([\+\-]+)?[\d]+_[\d_]+/},e.NM]}]}]}});hljs.registerLanguage("xml",function(s){var e={eW:!0,i:/</,r:0,c:[{cN:"attr",b:"[A-Za-z0-9\\._:-]+",r:0},{b:/=\s*/,r:0,c:[{cN:"string",endsParent:!0,v:[{b:/"/,e:/"/},{b:/'/,e:/'/},{b:/[^\s"'=<>`]+/}]}]}]};return{aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist"],cI:!0,c:[{cN:"meta",b:"<!DOCTYPE",e:">",r:10,c:[{b:"\\[",e:"\\]"}]},s.C("\x3c!--","--\x3e",{r:10}),{b:"<\\!\\[CDATA\\[",e:"\\]\\]>",r:10},{cN:"meta",b:/<\?xml/,e:/\?>/,r:10},{b:/<\?(php)?/,e:/\?>/,sL:"php",c:[{b:"/\\*",e:"\\*/",skip:!0},{b:'b"',e:'"',skip:!0},{b:"b'",e:"'",skip:!0},s.inherit(s.ASM,{i:null,cN:null,c:null,skip:!0}),s.inherit(s.QSM,{i:null,cN:null,c:null,skip:!0})]},{cN:"tag",b:"<style(?=\\s|>|$)",e:">",k:{name:"style"},c:[e],starts:{e:"</style>",rE:!0,sL:["css","xml"]}},{cN:"tag",b:"<script(?=\\s|>|$)",e:">",k:{name:"script"},c:[e],starts:{e:"<\/script>",rE:!0,sL:["actionscript","javascript","handlebars","xml"]}},{cN:"tag",b:"</?",e:"/?>",c:[{cN:"name",b:/[^\/><\s]+/,r:0},e]}]}});hljs.registerLanguage("markdown",function(e){return{aliases:["md","mkdown","mkd"],c:[{cN:"section",v:[{b:"^#{1,6}",e:"$"},{b:"^.+?\\n[=-]{2,}$"}]},{b:"<",e:">",sL:"xml",r:0},{cN:"bullet",b:"^\\s*([*+-]|(\\d+\\.))\\s+"},{cN:"strong",b:"[*_]{2}.+?[*_]{2}"},{cN:"emphasis",v:[{b:"\\*.+?\\*"},{b:"_.+?_",r:0}]},{cN:"quote",b:"^>\\s+",e:"$"},{cN:"code",v:[{b:"^```w*s*$",e:"^```s*$"},{b:"`.+?`"},{b:"^( {4}|\t)",e:"$",r:0}]},{b:"^[-\\*]{3,}",e:"$"},{b:"\\[.+?\\][\\(\\[].*?[\\)\\]]",rB:!0,c:[{cN:"string",b:"\\[",e:"\\]",eB:!0,rE:!0,r:0},{cN:"link",b:"\\]\\(",e:"\\)",eB:!0,eE:!0},{cN:"symbol",b:"\\]\\[",e:"\\]",eB:!0,eE:!0}],r:10},{b:/^\[[^\n]+\]:/,rB:!0,c:[{cN:"symbol",b:/\[/,e:/\]/,eB:!0,eE:!0},{cN:"link",b:/:\s*/,e:/$/,eB:!0}]}]}});hljs.registerLanguage("properties",function(r){var t="[ \\t\\f]*",e="("+t+"[:=]"+t+"|[ \\t\\f]+)",s="([^\\\\\\W:= \\t\\f\\n]|\\\\.)+",n="([^\\\\:= \\t\\f\\n]|\\\\.)+",a={e:e,r:0,starts:{cN:"string",e:/$/,r:0,c:[{b:"\\\\\\n"}]}};return{cI:!0,i:/\S/,c:[r.C("^\\s*[!#]","$"),{b:s+e,rB:!0,c:[{cN:"attr",b:s,endsParent:!0,r:0}],starts:a},{b:n+e,rB:!0,r:0,c:[{cN:"meta",b:n,endsParent:!0,r:0}],starts:a},{cN:"attr",r:0,b:n+t+"$"}]}});hljs.registerLanguage("ruby",function(e){var b="[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?",r={keyword:"and then defined module in return redo if BEGIN retry end for self when next until do begin unless END rescue else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor",literal:"true false nil"},c={cN:"doctag",b:"@[A-Za-z]+"},a={b:"#<",e:">"},s=[e.C("#","$",{c:[c]}),e.C("^\\=begin","^\\=end",{c:[c],r:10}),e.C("^__END__","\\n$")],n={cN:"subst",b:"#\\{",e:"}",k:r},t={cN:"string",c:[e.BE,n],v:[{b:/'/,e:/'/},{b:/"/,e:/"/},{b:/`/,e:/`/},{b:"%[qQwWx]?\\(",e:"\\)"},{b:"%[qQwWx]?\\[",e:"\\]"},{b:"%[qQwWx]?{",e:"}"},{b:"%[qQwWx]?<",e:">"},{b:"%[qQwWx]?/",e:"/"},{b:"%[qQwWx]?%",e:"%"},{b:"%[qQwWx]?-",e:"-"},{b:"%[qQwWx]?\\|",e:"\\|"},{b:/\B\?(\\\d{1,3}|\\x[A-Fa-f0-9]{1,2}|\\u[A-Fa-f0-9]{4}|\\?\S)\b/},{b:/<<(-?)\w+$/,e:/^\s*\w+$/}]},i={cN:"params",b:"\\(",e:"\\)",endsParent:!0,k:r},d=[t,a,{cN:"class",bK:"class module",e:"$|;",i:/=/,c:[e.inherit(e.TM,{b:"[A-Za-z_]\\w*(::\\w+)*(\\?|\\!)?"}),{b:"<\\s*",c:[{b:"("+e.IR+"::)?"+e.IR}]}].concat(s)},{cN:"function",bK:"def",e:"$|;",c:[e.inherit(e.TM,{b:b}),i].concat(s)},{b:e.IR+"::"},{cN:"symbol",b:e.UIR+"(\\!|\\?)?:",r:0},{cN:"symbol",b:":(?!\\s)",c:[t,{b:b}],r:0},{cN:"number",b:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",r:0},{b:"(\\$\\W)|((\\$|\\@\\@?)(\\w+))"},{cN:"params",b:/\|/,e:/\|/,k:r},{b:"("+e.RSR+"|unless)\\s*",k:"unless",c:[a,{cN:"regexp",c:[e.BE,n],i:/\n/,v:[{b:"/",e:"/[a-z]*"},{b:"%r{",e:"}[a-z]*"},{b:"%r\\(",e:"\\)[a-z]*"},{b:"%r!",e:"![a-z]*"},{b:"%r\\[",e:"\\][a-z]*"}]}].concat(s),r:0}].concat(s);n.c=d;var l=[{b:/^\s*=>/,starts:{e:"$",c:i.c=d}},{cN:"meta",b:"^([>?]>|[\\w#]+\\(\\w+\\):\\d+:\\d+>|(\\w+-)?\\d+\\.\\d+\\.\\d(p\\d+)?[^>]+>)",starts:{e:"$",c:d}}];return{aliases:["rb","gemspec","podspec","thor","irb"],k:r,i:/\/\*/,c:s.concat(l).concat(d)}});hljs.registerLanguage("yaml",function(e){var b="true false yes no null",a="^[ \\-]*",r="[a-zA-Z_][\\w\\-]*",t={cN:"attr",v:[{b:a+r+":"},{b:a+'"'+r+'":'},{b:a+"'"+r+"':"}]},c={cN:"string",r:0,v:[{b:/'/,e:/'/},{b:/"/,e:/"/},{b:/\S+/}],c:[e.BE,{cN:"template-variable",v:[{b:"{{",e:"}}"},{b:"%{",e:"}"}]}]};return{cI:!0,aliases:["yml","YAML","yaml"],c:[t,{cN:"meta",b:"^---s*$",r:10},{cN:"string",b:"[\\|>] *$",rE:!0,c:c.c,e:t.v[0].b},{b:"<%[%=-]?",e:"[%-]?%>",sL:"ruby",eB:!0,eE:!0,r:0},{cN:"type",b:"!"+e.UIR},{cN:"type",b:"!!"+e.UIR},{cN:"meta",b:"&"+e.UIR+"$"},{cN:"meta",b:"\\*"+e.UIR+"$"},{cN:"bullet",b:"^ *-",r:0},e.HCM,{bK:b,k:{literal:b}},e.CNM,c]}});hljs.registerLanguage("bash",function(e){var t={cN:"variable",v:[{b:/\$[\w\d#@][\w\d_]*/},{b:/\$\{(.*?)}/}]},s={cN:"string",b:/"/,e:/"/,c:[e.BE,t,{cN:"variable",b:/\$\(/,e:/\)/,c:[e.BE]}]};return{aliases:["sh","zsh"],l:/\b-?[a-z\._]+\b/,k:{keyword:"if then else elif fi for while in do done case esac function",literal:"true false",built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp",_:"-ne -eq -lt -gt -f -d -e -s -l -a"},c:[{cN:"meta",b:/^#![^\n]+sh\s*$/,r:10},{cN:"function",b:/\w[\w\d_]*\s*\(\s*\)\s*\{/,rB:!0,c:[e.inherit(e.TM,{b:/\w[\w\d_]*/})],r:0},e.HCM,s,{cN:"",b:/\\"/},{cN:"string",b:/'/,e:/'/},t]}});hljs.registerLanguage("scala",function(e){var t={cN:"subst",v:[{b:"\\$[A-Za-z0-9_]+"},{b:"\\${",e:"}"}]},a={cN:"string",v:[{b:'"',e:'"',i:"\\n",c:[e.BE]},{b:'"""',e:'"""',r:10},{b:'[a-z]+"',e:'"',i:"\\n",c:[e.BE,t]},{cN:"string",b:'[a-z]+"""',e:'"""',c:[t],r:10}]},r={cN:"type",b:"\\b[A-Z][A-Za-z0-9_]*",r:0},c={cN:"title",b:/[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/,r:0},i={cN:"class",bK:"class object trait type",e:/[:={\[\n;]/,eE:!0,c:[{bK:"extends with",r:10},{b:/\[/,e:/\]/,eB:!0,eE:!0,r:0,c:[r]},{cN:"params",b:/\(/,e:/\)/,eB:!0,eE:!0,r:0,c:[r]},c]},s={cN:"function",bK:"def",e:/[:={\[(\n;]/,eE:!0,c:[c]};return{k:{literal:"true false null",keyword:"type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit"},c:[e.CLCM,e.CBCM,a,{cN:"symbol",b:"'\\w[\\w\\d_]*(?!')"},r,s,i,e.CNM,{cN:"meta",b:"@[A-Za-z]+"}]}}); \ No newline at end of file diff --git a/scala3doc/dotty-docs/docs/js/jquery.min.js b/scala3doc/dotty-docs/docs/js/jquery.min.js new file mode 100644 index 000000000000..a1c07fd803b5 --- /dev/null +++ b/scala3doc/dotty-docs/docs/js/jquery.min.js @@ -0,0 +1,2 @@ +/*! jQuery v3.4.1 | (c) JS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],E=C.document,r=Object.getPrototypeOf,s=t.slice,g=t.concat,u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType},x=function(e){return null!=e&&e===e.window},c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.4.1",k=function(e,t){return new k.fn.init(e,t)},p=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;function d(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0<t&&t-1 in e)}k.fn=k.prototype={jquery:f,constructor:k,length:0,toArray:function(){return s.call(this)},get:function(e){return null==e?s.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=k.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return k.each(this,e)},map:function(n){return this.pushStack(k.map(this,function(e,t){return n.call(e,t,e)}))},slice:function(){return this.pushStack(s.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(0<=n&&n<t?[this[n]]:[])},end:function(){return this.prevObject||this.constructor()},push:u,sort:t.sort,splice:t.splice},k.extend=k.fn.extend=function(){var e,t,n,r,i,o,a=arguments[0]||{},s=1,u=arguments.length,l=!1;for("boolean"==typeof a&&(l=a,a=arguments[s]||{},s++),"object"==typeof a||m(a)||(a={}),s===u&&(a=this,s--);s<u;s++)if(null!=(e=arguments[s]))for(t in e)r=e[t],"__proto__"!==t&&a!==r&&(l&&r&&(k.isPlainObject(r)||(i=Array.isArray(r)))?(n=a[t],o=i&&!Array.isArray(n)?[]:i||k.isPlainObject(n)?n:{},i=!1,a[t]=k.extend(l,o,r)):void 0!==r&&(a[t]=r));return a},k.extend({expando:"jQuery"+(f+Math.random()).replace(/\D/g,""),isReady:!0,error:function(e){throw new Error(e)},noop:function(){},isPlainObject:function(e){var t,n;return!(!e||"[object Object]"!==o.call(e))&&(!(t=r(e))||"function"==typeof(n=v.call(t,"constructor")&&t.constructor)&&a.call(n)===l)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},globalEval:function(e,t){b(e,{nonce:t&&t.nonce})},each:function(e,t){var n,r=0;if(d(e)){for(n=e.length;r<n;r++)if(!1===t.call(e[r],r,e[r]))break}else for(r in e)if(!1===t.call(e[r],r,e[r]))break;return e},trim:function(e){return null==e?"":(e+"").replace(p,"")},makeArray:function(e,t){var n=t||[];return null!=e&&(d(Object(e))?k.merge(n,"string"==typeof e?[e]:e):u.call(n,e)),n},inArray:function(e,t,n){return null==t?-1:i.call(t,e,n)},merge:function(e,t){for(var n=+t.length,r=0,i=e.length;r<n;r++)e[i++]=t[r];return e.length=i,e},grep:function(e,t,n){for(var r=[],i=0,o=e.length,a=!n;i<o;i++)!t(e[i],i)!==a&&r.push(e[i]);return r},map:function(e,t,n){var r,i,o=0,a=[];if(d(e))for(r=e.length;o<r;o++)null!=(i=t(e[o],o,n))&&a.push(i);else for(o in e)null!=(i=t(e[o],o,n))&&a.push(i);return g.apply([],a)},guid:1,support:y}),"function"==typeof Symbol&&(k.fn[Symbol.iterator]=t[Symbol.iterator]),k.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),function(e,t){n["[object "+t+"]"]=t.toLowerCase()});var h=function(n){var e,d,b,o,i,h,f,g,w,u,l,T,C,a,E,v,s,c,y,k="sizzle"+1*new Date,m=n.document,S=0,r=0,p=ue(),x=ue(),N=ue(),A=ue(),D=function(e,t){return e===t&&(l=!0),0},j={}.hasOwnProperty,t=[],q=t.pop,L=t.push,H=t.push,O=t.slice,P=function(e,t){for(var n=0,r=e.length;n<r;n++)if(e[n]===t)return n;return-1},R="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",M="[\\x20\\t\\r\\n\\f]",I="(?:\\\\.|[\\w-]|[^\0-\\xa0])+",W="\\["+M+"*("+I+")(?:"+M+"*([*^$|!~]?=)"+M+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+I+"))|)"+M+"*\\]",$=":("+I+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+W+")*)|.*)\\)|)",F=new RegExp(M+"+","g"),B=new RegExp("^"+M+"+|((?:^|[^\\\\])(?:\\\\.)*)"+M+"+$","g"),_=new RegExp("^"+M+"*,"+M+"*"),z=new RegExp("^"+M+"*([>+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp($),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+$),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ne=function(e,t,n){var r="0x"+t-65536;return r!=r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023&r|56320)},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(m.childNodes),m.childNodes),t[m.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&((e?e.ownerDocument||e:m)!==C&&T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!A[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&U.test(t)){(s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=k),o=(l=h(t)).length;while(o--)l[o]="#"+s+" "+xe(l[o]);c=l.join(","),f=ee.test(t)&&ye(e.parentNode)||e}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){A(t,!0)}finally{s===k&&e.removeAttribute("id")}}}return g(t.replace(B,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[k]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e.namespaceURI,n=(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:m;return r!==C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),m!==C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=k,!C.getElementsByName||!C.getElementsByName(k).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){a.appendChild(e).innerHTML="<a id='"+k+"'></a><select id='"+k+"-\r\\' msallowcapture=''><option selected=''></option></select>",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+k+"-]").length||v.push("~="),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+k+"+*").length||v.push(".#.+[+~]")}),ce(function(e){e.innerHTML="<a href='' disabled='disabled'></a><select disabled='disabled'><option/></select>";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",$)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)===(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e===C||e.ownerDocument===m&&y(m,e)?-1:t===C||t.ownerDocument===m&&y(m,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e===C?-1:t===C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]===m?-1:s[r]===m?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if((e.ownerDocument||e)!==C&&T(e),d.matchesSelector&&E&&!A[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){A(t,!0)}return 0<se(t,C,null,[e]).length},se.contains=function(e,t){return(e.ownerDocument||e)!==C&&T(e),y(e,t)},se.attr=function(e,t){(e.ownerDocument||e)!==C&&T(e);var n=b.attrHandle[t.toLowerCase()],r=n&&j.call(b.attrHandle,t.toLowerCase())?n(e,t,!E):void 0;return void 0!==r?r:d.attributes||!E?e.getAttribute(t):(r=e.getAttributeNode(t))&&r.specified?r.value:null},se.escape=function(e){return(e+"").replace(re,ie)},se.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},se.uniqueSort=function(e){var t,n=[],r=0,i=0;if(l=!d.detectDuplicates,u=!d.sortStable&&e.slice(0),e.sort(D),l){while(t=e[i++])t===e[i]&&(r=n.push(i));while(r--)e.splice(n[r],1)}return u=null,e},o=se.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(1===i||9===i||11===i){if("string"==typeof e.textContent)return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=o(e)}else if(3===i||4===i)return e.nodeValue}else while(t=e[r++])n+=o(t);return n},(b=se.selectors={cacheLength:50,createPseudo:le,match:G,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=p[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&p(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1<t.indexOf(i):"$="===r?i&&t.slice(-i.length)===i:"~="===r?-1<(" "+t.replace(F," ")+" ").indexOf(i):"|="===r&&(t===i||t.slice(0,i.length+1)===i+"-"))}},CHILD:function(h,e,t,g,v){var y="nth"!==h.slice(0,3),m="last"!==h.slice(-4),x="of-type"===e;return 1===g&&0===v?function(e){return!!e.parentNode}:function(e,t,n){var r,i,o,a,s,u,l=y!==m?"nextSibling":"previousSibling",c=e.parentNode,f=x&&e.nodeName.toLowerCase(),p=!n&&!x,d=!1;if(c){if(y){while(l){a=e;while(a=a[l])if(x?a.nodeName.toLowerCase()===f:1===a.nodeType)return!1;u=l="only"===h&&!u&&"nextSibling"}return!0}if(u=[m?c.firstChild:c.lastChild],m&&p){d=(s=(r=(i=(o=(a=c)[k]||(a[k]={}))[a.uniqueID]||(o[a.uniqueID]={}))[h]||[])[0]===S&&r[1])&&r[2],a=s&&c.childNodes[s];while(a=++s&&a&&a[l]||(d=s=0)||u.pop())if(1===a.nodeType&&++d&&a===e){i[h]=[S,s,d];break}}else if(p&&(d=s=(r=(i=(o=(a=e)[k]||(a[k]={}))[a.uniqueID]||(o[a.uniqueID]={}))[h]||[])[0]===S&&r[1]),!1===d)while(a=++s&&a&&a[l]||(d=s=0)||u.pop())if((x?a.nodeName.toLowerCase()===f:1===a.nodeType)&&++d&&(p&&((i=(o=a[k]||(a[k]={}))[a.uniqueID]||(o[a.uniqueID]={}))[h]=[S,d]),a===e))break;return(d-=v)===g||d%g==0&&0<=d/g}}},PSEUDO:function(e,o){var t,a=b.pseudos[e]||b.setFilters[e.toLowerCase()]||se.error("unsupported pseudo: "+e);return a[k]?a(o):1<a.length?(t=[e,e,"",o],b.setFilters.hasOwnProperty(e.toLowerCase())?le(function(e,t){var n,r=a(e,o),i=r.length;while(i--)e[n=P(e,r[i])]=!(t[n]=r[i])}):function(e){return a(e,0,t)}):a}},pseudos:{not:le(function(e){var r=[],i=[],s=f(e.replace(B,"$1"));return s[k]?le(function(e,t,n,r){var i,o=s(e,null,r,[]),a=e.length;while(a--)(i=o[a])&&(e[a]=!(t[a]=i))}):function(e,t,n){return r[0]=e,s(r,null,n,i),r[0]=null,!i.pop()}}),has:le(function(t){return function(e){return 0<se(t,e).length}}),contains:le(function(t){return t=t.replace(te,ne),function(e){return-1<(e.textContent||o(e)).indexOf(t)}}),lang:le(function(n){return V.test(n||"")||se.error("unsupported lang: "+n),n=n.replace(te,ne).toLowerCase(),function(e){var t;do{if(t=E?e.lang:e.getAttribute("xml:lang")||e.getAttribute("lang"))return(t=t.toLowerCase())===n||0===t.indexOf(n+"-")}while((e=e.parentNode)&&1===e.nodeType);return!1}}),target:function(e){var t=n.location&&n.location.hash;return t&&t.slice(1)===e.id},root:function(e){return e===a},focus:function(e){return e===C.activeElement&&(!C.hasFocus||C.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},enabled:ge(!1),disabled:ge(!0),checked:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&!!e.checked||"option"===t&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,!0===e.selected},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeType<6)return!1;return!0},parent:function(e){return!b.pseudos.empty(e)},header:function(e){return J.test(e.nodeName)},input:function(e){return Q.test(e.nodeName)},button:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&"button"===e.type||"button"===t},text:function(e){var t;return"input"===e.nodeName.toLowerCase()&&"text"===e.type&&(null==(t=e.getAttribute("type"))||"text"===t.toLowerCase())},first:ve(function(){return[0]}),last:ve(function(e,t){return[t-1]}),eq:ve(function(e,t,n){return[n<0?n+t:n]}),even:ve(function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e}),odd:ve(function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e}),lt:ve(function(e,t,n){for(var r=n<0?n+t:t<n?t:n;0<=--r;)e.push(r);return e}),gt:ve(function(e,t,n){for(var r=n<0?n+t:n;++r<t;)e.push(r);return e})}}).pseudos.nth=b.pseudos.eq,{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})b.pseudos[e]=de(e);for(e in{submit:!0,reset:!0})b.pseudos[e]=he(e);function me(){}function xe(e){for(var t=0,n=e.length,r="";t<n;t++)r+=e[t].value;return r}function be(s,e,t){var u=e.dir,l=e.next,c=l||u,f=t&&"parentNode"===c,p=r++;return e.first?function(e,t,n){while(e=e[u])if(1===e.nodeType||f)return s(e,t,n);return!1}:function(e,t,n){var r,i,o,a=[S,p];if(n){while(e=e[u])if((1===e.nodeType||f)&&s(e,t,n))return!0}else while(e=e[u])if(1===e.nodeType||f)if(i=(o=e[k]||(e[k]={}))[e.uniqueID]||(o[e.uniqueID]={}),l&&l===e.nodeName.toLowerCase())e=e[u]||e;else{if((r=i[c])&&r[0]===S&&r[1]===p)return a[2]=r[2];if((i[c]=a)[2]=s(e,t,n))return!0}return!1}}function we(i){return 1<i.length?function(e,t,n){var r=i.length;while(r--)if(!i[r](e,t,n))return!1;return!0}:i[0]}function Te(e,t,n,r,i){for(var o,a=[],s=0,u=e.length,l=null!=t;s<u;s++)(o=e[s])&&(n&&!n(o,r,i)||(a.push(o),l&&t.push(s)));return a}function Ce(d,h,g,v,y,e){return v&&!v[k]&&(v=Ce(v)),y&&!y[k]&&(y=Ce(y,e)),le(function(e,t,n,r){var i,o,a,s=[],u=[],l=t.length,c=e||function(e,t,n){for(var r=0,i=t.length;r<i;r++)se(e,t[r],n);return n}(h||"*",n.nodeType?[n]:n,[]),f=!d||!e&&h?c:Te(c,s,d,n,r),p=g?y||(e?d:l||v)?[]:t:f;if(g&&g(f,p,n,r),v){i=Te(p,u),v(i,[],n,r),o=i.length;while(o--)(a=i[o])&&(p[u[o]]=!(f[u[o]]=a))}if(e){if(y||d){if(y){i=[],o=p.length;while(o--)(a=p[o])&&i.push(f[o]=a);y(null,p=[],i,r)}o=p.length;while(o--)(a=p[o])&&-1<(i=y?P(e,a):s[o])&&(e[i]=!(t[i]=a))}}else p=Te(p===t?p.splice(l,p.length):p),y?y(null,t,p,r):H.apply(t,p)})}function Ee(e){for(var i,t,n,r=e.length,o=b.relative[e[0].type],a=o||b.relative[" "],s=o?1:0,u=be(function(e){return e===i},a,!0),l=be(function(e){return-1<P(i,e)},a,!0),c=[function(e,t,n){var r=!o&&(n||t!==w)||((i=t).nodeType?u(e,t,n):l(e,t,n));return i=null,r}];s<r;s++)if(t=b.relative[e[s].type])c=[be(we(c),t)];else{if((t=b.filter[e[s].type].apply(null,e[s].matches))[k]){for(n=++s;n<r;n++)if(b.relative[e[n].type])break;return Ce(1<s&&we(c),1<s&&xe(e.slice(0,s-1).concat({value:" "===e[s-2].type?"*":""})).replace(B,"$1"),t,s<n&&Ee(e.slice(s,n)),n<r&&Ee(e=e.slice(n)),n<r&&xe(e))}c.push(t)}return we(c)}return me.prototype=b.filters=b.pseudos,b.setFilters=new me,h=se.tokenize=function(e,t){var n,r,i,o,a,s,u,l=x[e+" "];if(l)return t?0:l.slice(0);a=e,s=[],u=b.preFilter;while(a){for(o in n&&!(r=_.exec(a))||(r&&(a=a.slice(r[0].length)||a),s.push(i=[])),n=!1,(r=z.exec(a))&&(n=r.shift(),i.push({value:n,type:r[0].replace(B," ")}),a=a.slice(n.length)),b.filter)!(r=G[o].exec(a))||u[o]&&!(r=u[o](r))||(n=r.shift(),i.push({value:n,type:o,matches:r}),a=a.slice(n.length));if(!n)break}return t?a.length:a?se.error(e):x(e,s).slice(0)},f=se.compile=function(e,t){var n,v,y,m,x,r,i=[],o=[],a=N[e+" "];if(!a){t||(t=h(e)),n=t.length;while(n--)(a=Ee(t[n]))[k]?i.push(a):o.push(a);(a=N(e,(v=o,m=0<(y=i).length,x=0<v.length,r=function(e,t,n,r,i){var o,a,s,u=0,l="0",c=e&&[],f=[],p=w,d=e||x&&b.find.TAG("*",i),h=S+=null==p?1:Math.random()||.1,g=d.length;for(i&&(w=t===C||t||i);l!==g&&null!=(o=d[l]);l++){if(x&&o){a=0,t||o.ownerDocument===C||(T(o),n=!E);while(s=v[a++])if(s(o,t||C,n)){r.push(o);break}i&&(S=h)}m&&((o=!s&&o)&&u--,e&&c.push(o))}if(u+=l,m&&l!==u){a=0;while(s=y[a++])s(c,f,t,n);if(e){if(0<u)while(l--)c[l]||f[l]||(f[l]=q.call(r));f=Te(f)}H.apply(r,f),i&&!e&&0<f.length&&1<u+y.length&&se.uniqueSort(r)}return i&&(S=h,w=p),c},m?le(r):r))).selector=e}return a},g=se.select=function(e,t,n,r){var i,o,a,s,u,l="function"==typeof e&&e,c=!r&&h(e=l.selector||e);if(n=n||[],1===c.length){if(2<(o=c[0]=c[0].slice(0)).length&&"ID"===(a=o[0]).type&&9===t.nodeType&&E&&b.relative[o[1].type]){if(!(t=(b.find.ID(a.matches[0].replace(te,ne),t)||[])[0]))return n;l&&(t=t.parentNode),e=e.slice(o.shift().value.length)}i=G.needsContext.test(e)?0:o.length;while(i--){if(a=o[i],b.relative[s=a.type])break;if((u=b.find[s])&&(r=u(a.matches[0].replace(te,ne),ee.test(o[0].type)&&ye(t.parentNode)||t))){if(o.splice(i,1),!(e=r.length&&xe(o)))return H.apply(n,r),n;break}}}return(l||f(e,c))(r,t,!E,n,!t||ee.test(e)&&ye(t.parentNode)||t),n},d.sortStable=k.split("").sort(D).join("")===k,d.detectDuplicates=!!l,T(),d.sortDetached=ce(function(e){return 1&e.compareDocumentPosition(C.createElement("fieldset"))}),ce(function(e){return e.innerHTML="<a href='#'></a>","#"===e.firstChild.getAttribute("href")})||fe("type|href|height|width",function(e,t,n){if(!n)return e.getAttribute(t,"type"===t.toLowerCase()?1:2)}),d.attributes&&ce(function(e){return e.innerHTML="<input/>",e.firstChild.setAttribute("value",""),""===e.firstChild.getAttribute("value")})||fe("value",function(e,t,n){if(!n&&"input"===e.nodeName.toLowerCase())return e.defaultValue}),ce(function(e){return null==e.getAttribute("disabled")})||fe(R,function(e,t,n){var r;if(!n)return!0===e[t]?t.toLowerCase():(r=e.getAttributeNode(t))&&r.specified?r.value:null}),se}(C);k.find=h,k.expr=h.selectors,k.expr[":"]=k.expr.pseudos,k.uniqueSort=k.unique=h.uniqueSort,k.text=h.getText,k.isXMLDoc=h.isXML,k.contains=h.contains,k.escapeSelector=h.escape;var T=function(e,t,n){var r=[],i=void 0!==n;while((e=e[t])&&9!==e.nodeType)if(1===e.nodeType){if(i&&k(e).is(n))break;r.push(e)}return r},S=function(e,t){for(var n=[];e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n},N=k.expr.match.needsContext;function A(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()}var D=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?k.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?k.grep(e,function(e){return e===n!==r}):"string"!=typeof n?k.grep(e,function(e){return-1<i.call(n,e)!==r}):k.filter(n,e,r)}k.filter=function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?k.find.matchesSelector(r,e)?[r]:[]:k.find.matches(e,k.grep(t,function(e){return 1===e.nodeType}))},k.fn.extend({find:function(e){var t,n,r=this.length,i=this;if("string"!=typeof e)return this.pushStack(k(e).filter(function(){for(t=0;t<r;t++)if(k.contains(i[t],this))return!0}));for(n=this.pushStack([]),t=0;t<r;t++)k.find(e,i[t],n);return 1<r?k.uniqueSort(n):n},filter:function(e){return this.pushStack(j(this,e||[],!1))},not:function(e){return this.pushStack(j(this,e||[],!0))},is:function(e){return!!j(this,"string"==typeof e&&N.test(e)?k(e):e||[],!1).length}});var q,L=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/;(k.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||q,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:L.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof k?t[0]:t,k.merge(this,k.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),D.test(r[1])&&k.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(k):k.makeArray(e,this)}).prototype=k.fn,q=k(E);var H=/^(?:parents|prev(?:Until|All))/,O={children:!0,contents:!0,next:!0,prev:!0};function P(e,t){while((e=e[t])&&1!==e.nodeType);return e}k.fn.extend({has:function(e){var t=k(e,this),n=t.length;return this.filter(function(){for(var e=0;e<n;e++)if(k.contains(this,t[e]))return!0})},closest:function(e,t){var n,r=0,i=this.length,o=[],a="string"!=typeof e&&k(e);if(!N.test(e))for(;r<i;r++)for(n=this[r];n&&n!==t;n=n.parentNode)if(n.nodeType<11&&(a?-1<a.index(n):1===n.nodeType&&k.find.matchesSelector(n,e))){o.push(n);break}return this.pushStack(1<o.length?k.uniqueSort(o):o)},index:function(e){return e?"string"==typeof e?i.call(k(e),this[0]):i.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){return this.pushStack(k.uniqueSort(k.merge(this.get(),k(e,t))))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}}),k.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return T(e,"parentNode")},parentsUntil:function(e,t,n){return T(e,"parentNode",n)},next:function(e){return P(e,"nextSibling")},prev:function(e){return P(e,"previousSibling")},nextAll:function(e){return T(e,"nextSibling")},prevAll:function(e){return T(e,"previousSibling")},nextUntil:function(e,t,n){return T(e,"nextSibling",n)},prevUntil:function(e,t,n){return T(e,"previousSibling",n)},siblings:function(e){return S((e.parentNode||{}).firstChild,e)},children:function(e){return S(e.firstChild)},contents:function(e){return"undefined"!=typeof e.contentDocument?e.contentDocument:(A(e,"template")&&(e=e.content||e),k.merge([],e.childNodes))}},function(r,i){k.fn[r]=function(e,t){var n=k.map(this,i,e);return"Until"!==r.slice(-5)&&(t=e),t&&"string"==typeof t&&(n=k.filter(t,n)),1<this.length&&(O[r]||k.uniqueSort(n),H.test(r)&&n.reverse()),this.pushStack(n)}});var R=/[^\x20\t\r\n\f]+/g;function M(e){return e}function I(e){throw e}function W(e,t,n,r){var i;try{e&&m(i=e.promise)?i.call(e).done(t).fail(n):e&&m(i=e.then)?i.call(e,t,n):t.apply(void 0,[e].slice(r))}catch(e){n.apply(void 0,[e])}}k.Callbacks=function(r){var e,n;r="string"==typeof r?(e=r,n={},k.each(e.match(R)||[],function(e,t){n[t]=!0}),n):k.extend({},r);var i,t,o,a,s=[],u=[],l=-1,c=function(){for(a=a||r.once,o=i=!0;u.length;l=-1){t=u.shift();while(++l<s.length)!1===s[l].apply(t[0],t[1])&&r.stopOnFalse&&(l=s.length,t=!1)}r.memory||(t=!1),i=!1,a&&(s=t?[]:"")},f={add:function(){return s&&(t&&!i&&(l=s.length-1,u.push(t)),function n(e){k.each(e,function(e,t){m(t)?r.unique&&f.has(t)||s.push(t):t&&t.length&&"string"!==w(t)&&n(t)})}(arguments),t&&!i&&c()),this},remove:function(){return k.each(arguments,function(e,t){var n;while(-1<(n=k.inArray(t,s,n)))s.splice(n,1),n<=l&&l--}),this},has:function(e){return e?-1<k.inArray(e,s):0<s.length},empty:function(){return s&&(s=[]),this},disable:function(){return a=u=[],s=t="",this},disabled:function(){return!s},lock:function(){return a=u=[],t||i||(s=t=""),this},locked:function(){return!!a},fireWith:function(e,t){return a||(t=[e,(t=t||[]).slice?t.slice():t],u.push(t),i||c()),this},fire:function(){return f.fireWith(this,arguments),this},fired:function(){return!!o}};return f},k.extend({Deferred:function(e){var o=[["notify","progress",k.Callbacks("memory"),k.Callbacks("memory"),2],["resolve","done",k.Callbacks("once memory"),k.Callbacks("once memory"),0,"resolved"],["reject","fail",k.Callbacks("once memory"),k.Callbacks("once memory"),1,"rejected"]],i="pending",a={state:function(){return i},always:function(){return s.done(arguments).fail(arguments),this},"catch":function(e){return a.then(null,e)},pipe:function(){var i=arguments;return k.Deferred(function(r){k.each(o,function(e,t){var n=m(i[t[4]])&&i[t[4]];s[t[1]](function(){var e=n&&n.apply(this,arguments);e&&m(e.promise)?e.promise().progress(r.notify).done(r.resolve).fail(r.reject):r[t[0]+"With"](this,n?[e]:arguments)})}),i=null}).promise()},then:function(t,n,r){var u=0;function l(i,o,a,s){return function(){var n=this,r=arguments,e=function(){var e,t;if(!(i<u)){if((e=a.apply(n,r))===o.promise())throw new TypeError("Thenable self-resolution");t=e&&("object"==typeof e||"function"==typeof e)&&e.then,m(t)?s?t.call(e,l(u,o,M,s),l(u,o,I,s)):(u++,t.call(e,l(u,o,M,s),l(u,o,I,s),l(u,o,M,o.notifyWith))):(a!==M&&(n=void 0,r=[e]),(s||o.resolveWith)(n,r))}},t=s?e:function(){try{e()}catch(e){k.Deferred.exceptionHook&&k.Deferred.exceptionHook(e,t.stackTrace),u<=i+1&&(a!==I&&(n=void 0,r=[e]),o.rejectWith(n,r))}};i?t():(k.Deferred.getStackHook&&(t.stackTrace=k.Deferred.getStackHook()),C.setTimeout(t))}}return k.Deferred(function(e){o[0][3].add(l(0,e,m(r)?r:M,e.notifyWith)),o[1][3].add(l(0,e,m(t)?t:M)),o[2][3].add(l(0,e,m(n)?n:I))}).promise()},promise:function(e){return null!=e?k.extend(e,a):a}},s={};return k.each(o,function(e,t){var n=t[2],r=t[5];a[t[1]]=n.add,r&&n.add(function(){i=r},o[3-e][2].disable,o[3-e][3].disable,o[0][2].lock,o[0][3].lock),n.add(t[3].fire),s[t[0]]=function(){return s[t[0]+"With"](this===s?void 0:this,arguments),this},s[t[0]+"With"]=n.fireWith}),a.promise(s),e&&e.call(s,s),s},when:function(e){var n=arguments.length,t=n,r=Array(t),i=s.call(arguments),o=k.Deferred(),a=function(t){return function(e){r[t]=this,i[t]=1<arguments.length?s.call(arguments):e,--n||o.resolveWith(r,i)}};if(n<=1&&(W(e,o.done(a(t)).resolve,o.reject,!n),"pending"===o.state()||m(i[t]&&i[t].then)))return o.then();while(t--)W(i[t],a(t),o.reject);return o.promise()}});var $=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;k.Deferred.exceptionHook=function(e,t){C.console&&C.console.warn&&e&&$.test(e.name)&&C.console.warn("jQuery.Deferred exception: "+e.message,e.stack,t)},k.readyException=function(e){C.setTimeout(function(){throw e})};var F=k.Deferred();function B(){E.removeEventListener("DOMContentLoaded",B),C.removeEventListener("load",B),k.ready()}k.fn.ready=function(e){return F.then(e)["catch"](function(e){k.readyException(e)}),this},k.extend({isReady:!1,readyWait:1,ready:function(e){(!0===e?--k.readyWait:k.isReady)||(k.isReady=!0)!==e&&0<--k.readyWait||F.resolveWith(E,[k])}}),k.ready.then=F.then,"complete"===E.readyState||"loading"!==E.readyState&&!E.documentElement.doScroll?C.setTimeout(k.ready):(E.addEventListener("DOMContentLoaded",B),C.addEventListener("load",B));var _=function(e,t,n,r,i,o,a){var s=0,u=e.length,l=null==n;if("object"===w(n))for(s in i=!0,n)_(e,t,s,n[s],!0,o,a);else if(void 0!==r&&(i=!0,m(r)||(a=!0),l&&(a?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(k(e),n)})),t))for(;s<u;s++)t(e[s],n,a?r:r.call(e[s],s,t(e[s],n)));return i?e:l?t.call(e):u?t(e[0],n):o},z=/^-ms-/,U=/-([a-z])/g;function X(e,t){return t.toUpperCase()}function V(e){return e.replace(z,"ms-").replace(U,X)}var G=function(e){return 1===e.nodeType||9===e.nodeType||!+e.nodeType};function Y(){this.expando=k.expando+Y.uid++}Y.uid=1,Y.prototype={cache:function(e){var t=e[this.expando];return t||(t={},G(e)&&(e.nodeType?e[this.expando]=t:Object.defineProperty(e,this.expando,{value:t,configurable:!0}))),t},set:function(e,t,n){var r,i=this.cache(e);if("string"==typeof t)i[V(t)]=n;else for(r in t)i[V(r)]=t[r];return i},get:function(e,t){return void 0===t?this.cache(e):e[this.expando]&&e[this.expando][V(t)]},access:function(e,t,n){return void 0===t||t&&"string"==typeof t&&void 0===n?this.get(e,t):(this.set(e,t,n),void 0!==n?n:t)},remove:function(e,t){var n,r=e[this.expando];if(void 0!==r){if(void 0!==t){n=(t=Array.isArray(t)?t.map(V):(t=V(t))in r?[t]:t.match(R)||[]).length;while(n--)delete r[t[n]]}(void 0===t||k.isEmptyObject(r))&&(e.nodeType?e[this.expando]=void 0:delete e[this.expando])}},hasData:function(e){var t=e[this.expando];return void 0!==t&&!k.isEmptyObject(t)}};var Q=new Y,J=new Y,K=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,Z=/[A-Z]/g;function ee(e,t,n){var r,i;if(void 0===n&&1===e.nodeType)if(r="data-"+t.replace(Z,"-$&").toLowerCase(),"string"==typeof(n=e.getAttribute(r))){try{n="true"===(i=n)||"false"!==i&&("null"===i?null:i===+i+""?+i:K.test(i)?JSON.parse(i):i)}catch(e){}J.set(e,t,n)}else n=void 0;return n}k.extend({hasData:function(e){return J.hasData(e)||Q.hasData(e)},data:function(e,t,n){return J.access(e,t,n)},removeData:function(e,t){J.remove(e,t)},_data:function(e,t,n){return Q.access(e,t,n)},_removeData:function(e,t){Q.remove(e,t)}}),k.fn.extend({data:function(n,e){var t,r,i,o=this[0],a=o&&o.attributes;if(void 0===n){if(this.length&&(i=J.get(o),1===o.nodeType&&!Q.get(o,"hasDataAttrs"))){t=a.length;while(t--)a[t]&&0===(r=a[t].name).indexOf("data-")&&(r=V(r.slice(5)),ee(o,r,i[r]));Q.set(o,"hasDataAttrs",!0)}return i}return"object"==typeof n?this.each(function(){J.set(this,n)}):_(this,function(e){var t;if(o&&void 0===e)return void 0!==(t=J.get(o,n))?t:void 0!==(t=ee(o,n))?t:void 0;this.each(function(){J.set(this,n,e)})},null,e,1<arguments.length,null,!0)},removeData:function(e){return this.each(function(){J.remove(this,e)})}}),k.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=Q.get(e,t),n&&(!r||Array.isArray(n)?r=Q.access(e,t,k.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=k.queue(e,t),r=n.length,i=n.shift(),o=k._queueHooks(e,t);"inprogress"===i&&(i=n.shift(),r--),i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,function(){k.dequeue(e,t)},o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return Q.get(e,n)||Q.access(e,n,{empty:k.Callbacks("once memory").add(function(){Q.remove(e,[t+"queue",n])})})}}),k.fn.extend({queue:function(t,n){var e=2;return"string"!=typeof t&&(n=t,t="fx",e--),arguments.length<e?k.queue(this[0],t):void 0===n?this:this.each(function(){var e=k.queue(this,t,n);k._queueHooks(this,t),"fx"===t&&"inprogress"!==e[0]&&k.dequeue(this,t)})},dequeue:function(e){return this.each(function(){k.dequeue(this,e)})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,t){var n,r=1,i=k.Deferred(),o=this,a=this.length,s=function(){--r||i.resolveWith(o,[o])};"string"!=typeof e&&(t=e,e=void 0),e=e||"fx";while(a--)(n=Q.get(o[a],e+"queueHooks"))&&n.empty&&(r++,n.empty.add(s));return s(),i.promise(t)}});var te=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,ne=new RegExp("^(?:([+-])=|)("+te+")([a-z%]*)$","i"),re=["Top","Right","Bottom","Left"],ie=E.documentElement,oe=function(e){return k.contains(e.ownerDocument,e)},ae={composed:!0};ie.getRootNode&&(oe=function(e){return k.contains(e.ownerDocument,e)||e.getRootNode(ae)===e.ownerDocument});var se=function(e,t){return"none"===(e=t||e).style.display||""===e.style.display&&oe(e)&&"none"===k.css(e,"display")},ue=function(e,t,n,r){var i,o,a={};for(o in t)a[o]=e.style[o],e.style[o]=t[o];for(o in i=n.apply(e,r||[]),t)e.style[o]=a[o];return i};function le(e,t,n,r){var i,o,a=20,s=r?function(){return r.cur()}:function(){return k.css(e,t,"")},u=s(),l=n&&n[3]||(k.cssNumber[t]?"":"px"),c=e.nodeType&&(k.cssNumber[t]||"px"!==l&&+u)&&ne.exec(k.css(e,t));if(c&&c[3]!==l){u/=2,l=l||c[3],c=+u||1;while(a--)k.style(e,t,c+l),(1-o)*(1-(o=s()/u||.5))<=0&&(a=0),c/=o;c*=2,k.style(e,t,c+l),n=n||[]}return n&&(c=+c||+u||0,i=n[1]?c+(n[1]+1)*n[2]:+n[2],r&&(r.unit=l,r.start=c,r.end=i)),i}var ce={};function fe(e,t){for(var n,r,i,o,a,s,u,l=[],c=0,f=e.length;c<f;c++)(r=e[c]).style&&(n=r.style.display,t?("none"===n&&(l[c]=Q.get(r,"display")||null,l[c]||(r.style.display="")),""===r.style.display&&se(r)&&(l[c]=(u=a=o=void 0,a=(i=r).ownerDocument,s=i.nodeName,(u=ce[s])||(o=a.body.appendChild(a.createElement(s)),u=k.css(o,"display"),o.parentNode.removeChild(o),"none"===u&&(u="block"),ce[s]=u)))):"none"!==n&&(l[c]="none",Q.set(r,"display",n)));for(c=0;c<f;c++)null!=l[c]&&(e[c].style.display=l[c]);return e}k.fn.extend({show:function(){return fe(this,!0)},hide:function(){return fe(this)},toggle:function(e){return"boolean"==typeof e?e?this.show():this.hide():this.each(function(){se(this)?k(this).show():k(this).hide()})}});var pe=/^(?:checkbox|radio)$/i,de=/<([a-z][^\/\0>\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i,ge={option:[1,"<select multiple='multiple'>","</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?k.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n<r;n++)Q.set(e[n],"globalEval",!t||Q.get(t[n],"globalEval"))}ge.optgroup=ge.option,ge.tbody=ge.tfoot=ge.colgroup=ge.caption=ge.thead,ge.th=ge.td;var me,xe,be=/<|&#?\w+;/;function we(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d<h;d++)if((o=e[d])||0===o)if("object"===w(o))k.merge(p,o.nodeType?[o]:o);else if(be.test(o)){a=a||f.appendChild(t.createElement("div")),s=(de.exec(o)||["",""])[1].toLowerCase(),u=ge[s]||ge._default,a.innerHTML=u[1]+k.htmlPrefilter(o)+u[2],c=u[0];while(c--)a=a.lastChild;k.merge(p,a.childNodes),(a=f.firstChild).textContent=""}else p.push(t.createTextNode(o));f.textContent="",d=0;while(o=p[d++])if(r&&-1<k.inArray(o,r))i&&i.push(o);else if(l=oe(o),a=ve(f.appendChild(o),"script"),l&&ye(a),n){c=0;while(o=a[c++])he.test(o.type||"")&&n.push(o)}return f}me=E.createDocumentFragment().appendChild(E.createElement("div")),(xe=E.createElement("input")).setAttribute("type","radio"),xe.setAttribute("checked","checked"),xe.setAttribute("name","t"),me.appendChild(xe),y.checkClone=me.cloneNode(!0).cloneNode(!0).lastChild.checked,me.innerHTML="<textarea>x</textarea>",y.noCloneChecked=!!me.cloneNode(!0).lastChild.defaultValue;var Te=/^key/,Ce=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,Ee=/^([^.]*)(?:\.(.+)|)/;function ke(){return!0}function Se(){return!1}function Ne(e,t){return e===function(){try{return E.activeElement}catch(e){}}()==("focus"===t)}function Ae(e,t,n,r,i,o){var a,s;if("object"==typeof t){for(s in"string"!=typeof n&&(r=r||n,n=void 0),t)Ae(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=Se;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return k().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=k.guid++)),e.each(function(){k.event.add(this,t,i,r,n)})}function De(e,i,o){o?(Q.set(e,i,!1),k.event.add(e,i,{namespace:!1,handler:function(e){var t,n,r=Q.get(this,i);if(1&e.isTrigger&&this[i]){if(r.length)(k.event.special[i]||{}).delegateType&&e.stopPropagation();else if(r=s.call(arguments),Q.set(this,i,r),t=o(this,i),this[i](),r!==(n=Q.get(this,i))||t?Q.set(this,i,!1):n={},r!==n)return e.stopImmediatePropagation(),e.preventDefault(),n.value}else r.length&&(Q.set(this,i,{value:k.event.trigger(k.extend(r[0],k.Event.prototype),r.slice(1),this)}),e.stopImmediatePropagation())}})):void 0===Q.get(e,i)&&k.event.add(e,i,ke)}k.event={global:{},add:function(t,e,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.get(t);if(v){n.handler&&(n=(o=n).handler,i=o.selector),i&&k.find.matchesSelector(ie,i),n.guid||(n.guid=k.guid++),(u=v.events)||(u=v.events={}),(a=v.handle)||(a=v.handle=function(e){return"undefined"!=typeof k&&k.event.triggered!==e.type?k.event.dispatch.apply(t,arguments):void 0}),l=(e=(e||"").match(R)||[""]).length;while(l--)d=g=(s=Ee.exec(e[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=k.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=k.event.special[d]||{},c=k.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&k.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(t,r,h,a)||t.addEventListener&&t.addEventListener(d,a)),f.add&&(f.add.call(t,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),k.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.hasData(e)&&Q.get(e);if(v&&(u=v.events)){l=(t=(t||"").match(R)||[""]).length;while(l--)if(d=g=(s=Ee.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d){f=k.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,v.handle)||k.removeEvent(e,d,v.handle),delete u[d])}else for(d in u)k.event.remove(e,d+t[l],n,r,!0);k.isEmptyObject(u)&&Q.remove(e,"handle events")}},dispatch:function(e){var t,n,r,i,o,a,s=k.event.fix(e),u=new Array(arguments.length),l=(Q.get(this,"events")||{})[s.type]||[],c=k.event.special[s.type]||{};for(u[0]=s,t=1;t<arguments.length;t++)u[t]=arguments[t];if(s.delegateTarget=this,!c.preDispatch||!1!==c.preDispatch.call(this,s)){a=k.event.handlers.call(this,s,l),t=0;while((i=a[t++])&&!s.isPropagationStopped()){s.currentTarget=i.elem,n=0;while((o=i.handlers[n++])&&!s.isImmediatePropagationStopped())s.rnamespace&&!1!==o.namespace&&!s.rnamespace.test(o.namespace)||(s.handleObj=o,s.data=o.data,void 0!==(r=((k.event.special[o.origType]||{}).handle||o.handler).apply(i.elem,u))&&!1===(s.result=r)&&(s.preventDefault(),s.stopPropagation()))}return c.postDispatch&&c.postDispatch.call(this,s),s.result}},handlers:function(e,t){var n,r,i,o,a,s=[],u=t.delegateCount,l=e.target;if(u&&l.nodeType&&!("click"===e.type&&1<=e.button))for(;l!==this;l=l.parentNode||this)if(1===l.nodeType&&("click"!==e.type||!0!==l.disabled)){for(o=[],a={},n=0;n<u;n++)void 0===a[i=(r=t[n]).selector+" "]&&(a[i]=r.needsContext?-1<k(i,this).index(l):k.find(i,this,null,[l]).length),a[i]&&o.push(r);o.length&&s.push({elem:l,handlers:o})}return l=this,u<t.length&&s.push({elem:l,handlers:t.slice(u)}),s},addProp:function(t,e){Object.defineProperty(k.Event.prototype,t,{enumerable:!0,configurable:!0,get:m(e)?function(){if(this.originalEvent)return e(this.originalEvent)}:function(){if(this.originalEvent)return this.originalEvent[t]},set:function(e){Object.defineProperty(this,t,{enumerable:!0,configurable:!0,writable:!0,value:e})}})},fix:function(e){return e[k.expando]?e:new k.Event(e)},special:{load:{noBubble:!0},click:{setup:function(e){var t=this||e;return pe.test(t.type)&&t.click&&A(t,"input")&&De(t,"click",ke),!1},trigger:function(e){var t=this||e;return pe.test(t.type)&&t.click&&A(t,"input")&&De(t,"click"),!0},_default:function(e){var t=e.target;return pe.test(t.type)&&t.click&&A(t,"input")&&Q.get(t,"click")||A(t,"a")}},beforeunload:{postDispatch:function(e){void 0!==e.result&&e.originalEvent&&(e.originalEvent.returnValue=e.result)}}}},k.removeEvent=function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n)},k.Event=function(e,t){if(!(this instanceof k.Event))return new k.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||void 0===e.defaultPrevented&&!1===e.returnValue?ke:Se,this.target=e.target&&3===e.target.nodeType?e.target.parentNode:e.target,this.currentTarget=e.currentTarget,this.relatedTarget=e.relatedTarget):this.type=e,t&&k.extend(this,t),this.timeStamp=e&&e.timeStamp||Date.now(),this[k.expando]=!0},k.Event.prototype={constructor:k.Event,isDefaultPrevented:Se,isPropagationStopped:Se,isImmediatePropagationStopped:Se,isSimulated:!1,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=ke,e&&!this.isSimulated&&e.preventDefault()},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=ke,e&&!this.isSimulated&&e.stopPropagation()},stopImmediatePropagation:function(){var e=this.originalEvent;this.isImmediatePropagationStopped=ke,e&&!this.isSimulated&&e.stopImmediatePropagation(),this.stopPropagation()}},k.each({altKey:!0,bubbles:!0,cancelable:!0,changedTouches:!0,ctrlKey:!0,detail:!0,eventPhase:!0,metaKey:!0,pageX:!0,pageY:!0,shiftKey:!0,view:!0,"char":!0,code:!0,charCode:!0,key:!0,keyCode:!0,button:!0,buttons:!0,clientX:!0,clientY:!0,offsetX:!0,offsetY:!0,pointerId:!0,pointerType:!0,screenX:!0,screenY:!0,targetTouches:!0,toElement:!0,touches:!0,which:function(e){var t=e.button;return null==e.which&&Te.test(e.type)?null!=e.charCode?e.charCode:e.keyCode:!e.which&&void 0!==t&&Ce.test(e.type)?1&t?1:2&t?3:4&t?2:0:e.which}},k.event.addProp),k.each({focus:"focusin",blur:"focusout"},function(e,t){k.event.special[e]={setup:function(){return De(this,e,Ne),!1},trigger:function(){return De(this,e),!0},delegateType:t}}),k.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(e,i){k.event.special[e]={delegateType:i,bindType:i,handle:function(e){var t,n=e.relatedTarget,r=e.handleObj;return n&&(n===this||k.contains(this,n))||(e.type=r.origType,t=r.handler.apply(this,arguments),e.type=i),t}}}),k.fn.extend({on:function(e,t,n,r){return Ae(this,e,t,n,r)},one:function(e,t,n,r){return Ae(this,e,t,n,r,1)},off:function(e,t,n){var r,i;if(e&&e.preventDefault&&e.handleObj)return r=e.handleObj,k(e.delegateTarget).off(r.namespace?r.origType+"."+r.namespace:r.origType,r.selector,r.handler),this;if("object"==typeof e){for(i in e)this.off(i,t,e[i]);return this}return!1!==t&&"function"!=typeof t||(n=t,t=void 0),!1===n&&(n=Se),this.each(function(){k.event.remove(this,e,n,t)})}});var je=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([a-z][^\/\0>\x20\t\r\n\f]*)[^>]*)\/>/gi,qe=/<script|<style|<link/i,Le=/checked\s*(?:[^=]|=\s*.checked.)/i,He=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g;function Oe(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&k(e).children("tbody")[0]||e}function Pe(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Re(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Me(e,t){var n,r,i,o,a,s,u,l;if(1===t.nodeType){if(Q.hasData(e)&&(o=Q.access(e),a=Q.set(t,o),l=o.events))for(i in delete a.handle,a.events={},l)for(n=0,r=l[i].length;n<r;n++)k.event.add(t,i,l[i][n]);J.hasData(e)&&(s=J.access(e),u=k.extend({},s),J.set(t,u))}}function Ie(n,r,i,o){r=g.apply([],r);var e,t,a,s,u,l,c=0,f=n.length,p=f-1,d=r[0],h=m(d);if(h||1<f&&"string"==typeof d&&!y.checkClone&&Le.test(d))return n.each(function(e){var t=n.eq(e);h&&(r[0]=d.call(this,e,t.html())),Ie(t,r,i,o)});if(f&&(t=(e=we(r,n[0].ownerDocument,!1,n,o)).firstChild,1===e.childNodes.length&&(e=t),t||o)){for(s=(a=k.map(ve(e,"script"),Pe)).length;c<f;c++)u=e,c!==p&&(u=k.clone(u,!0,!0),s&&k.merge(a,ve(u,"script"))),i.call(n[c],u,c);if(s)for(l=a[a.length-1].ownerDocument,k.map(a,Re),c=0;c<s;c++)u=a[c],he.test(u.type||"")&&!Q.access(u,"globalEval")&&k.contains(l,u)&&(u.src&&"module"!==(u.type||"").toLowerCase()?k._evalUrl&&!u.noModule&&k._evalUrl(u.src,{nonce:u.nonce||u.getAttribute("nonce")}):b(u.textContent.replace(He,""),u,l))}return n}function We(e,t,n){for(var r,i=t?k.filter(t,e):e,o=0;null!=(r=i[o]);o++)n||1!==r.nodeType||k.cleanData(ve(r)),r.parentNode&&(n&&oe(r)&&ye(ve(r,"script")),r.parentNode.removeChild(r));return e}k.extend({htmlPrefilter:function(e){return e.replace(je,"<$1></$2>")},clone:function(e,t,n){var r,i,o,a,s,u,l,c=e.cloneNode(!0),f=oe(e);if(!(y.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||k.isXMLDoc(e)))for(a=ve(c),r=0,i=(o=ve(e)).length;r<i;r++)s=o[r],u=a[r],void 0,"input"===(l=u.nodeName.toLowerCase())&&pe.test(s.type)?u.checked=s.checked:"input"!==l&&"textarea"!==l||(u.defaultValue=s.defaultValue);if(t)if(n)for(o=o||ve(e),a=a||ve(c),r=0,i=o.length;r<i;r++)Me(o[r],a[r]);else Me(e,c);return 0<(a=ve(c,"script")).length&&ye(a,!f&&ve(e,"script")),c},cleanData:function(e){for(var t,n,r,i=k.event.special,o=0;void 0!==(n=e[o]);o++)if(G(n)){if(t=n[Q.expando]){if(t.events)for(r in t.events)i[r]?k.event.remove(n,r):k.removeEvent(n,r,t.handle);n[Q.expando]=void 0}n[J.expando]&&(n[J.expando]=void 0)}}}),k.fn.extend({detach:function(e){return We(this,e,!0)},remove:function(e){return We(this,e)},text:function(e){return _(this,function(e){return void 0===e?k.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=e)})},null,e,arguments.length)},append:function(){return Ie(this,arguments,function(e){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||Oe(this,e).appendChild(e)})},prepend:function(){return Ie(this,arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Oe(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return Ie(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return Ie(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},empty:function(){for(var e,t=0;null!=(e=this[t]);t++)1===e.nodeType&&(k.cleanData(ve(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null!=e&&e,t=null==t?e:t,this.map(function(){return k.clone(this,e,t)})},html:function(e){return _(this,function(e){var t=this[0]||{},n=0,r=this.length;if(void 0===e&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!qe.test(e)&&!ge[(de.exec(e)||["",""])[1].toLowerCase()]){e=k.htmlPrefilter(e);try{for(;n<r;n++)1===(t=this[n]||{}).nodeType&&(k.cleanData(ve(t,!1)),t.innerHTML=e);t=0}catch(e){}}t&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(){var n=[];return Ie(this,arguments,function(e){var t=this.parentNode;k.inArray(this,n)<0&&(k.cleanData(ve(this)),t&&t.replaceChild(e,this))},n)}}),k.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,a){k.fn[e]=function(e){for(var t,n=[],r=k(e),i=r.length-1,o=0;o<=i;o++)t=o===i?this:this.clone(!0),k(r[o])[a](t),u.apply(n,t.get());return this.pushStack(n)}});var $e=new RegExp("^("+te+")(?!px)[a-z%]+$","i"),Fe=function(e){var t=e.ownerDocument.defaultView;return t&&t.opener||(t=C),t.getComputedStyle(e)},Be=new RegExp(re.join("|"),"i");function _e(e,t,n){var r,i,o,a,s=e.style;return(n=n||Fe(e))&&(""!==(a=n.getPropertyValue(t)||n[t])||oe(e)||(a=k.style(e,t)),!y.pixelBoxStyles()&&$e.test(a)&&Be.test(t)&&(r=s.width,i=s.minWidth,o=s.maxWidth,s.minWidth=s.maxWidth=s.width=a,a=n.width,s.width=r,s.minWidth=i,s.maxWidth=o)),void 0!==a?a+"":a}function ze(e,t){return{get:function(){if(!e())return(this.get=t).apply(this,arguments);delete this.get}}}!function(){function e(){if(u){s.style.cssText="position:absolute;left:-11111px;width:60px;margin-top:1px;padding:0;border:0",u.style.cssText="position:relative;display:block;box-sizing:border-box;overflow:scroll;margin:auto;border:1px;padding:1px;width:60%;top:1%",ie.appendChild(s).appendChild(u);var e=C.getComputedStyle(u);n="1%"!==e.top,a=12===t(e.marginLeft),u.style.right="60%",o=36===t(e.right),r=36===t(e.width),u.style.position="absolute",i=12===t(u.offsetWidth/3),ie.removeChild(s),u=null}}function t(e){return Math.round(parseFloat(e))}var n,r,i,o,a,s=E.createElement("div"),u=E.createElement("div");u.style&&(u.style.backgroundClip="content-box",u.cloneNode(!0).style.backgroundClip="",y.clearCloneStyle="content-box"===u.style.backgroundClip,k.extend(y,{boxSizingReliable:function(){return e(),r},pixelBoxStyles:function(){return e(),o},pixelPosition:function(){return e(),n},reliableMarginLeft:function(){return e(),a},scrollboxSize:function(){return e(),i}}))}();var Ue=["Webkit","Moz","ms"],Xe=E.createElement("div").style,Ve={};function Ge(e){var t=k.cssProps[e]||Ve[e];return t||(e in Xe?e:Ve[e]=function(e){var t=e[0].toUpperCase()+e.slice(1),n=Ue.length;while(n--)if((e=Ue[n]+t)in Xe)return e}(e)||e)}var Ye=/^(none|table(?!-c[ea]).+)/,Qe=/^--/,Je={position:"absolute",visibility:"hidden",display:"block"},Ke={letterSpacing:"0",fontWeight:"400"};function Ze(e,t,n){var r=ne.exec(t);return r?Math.max(0,r[2]-(n||0))+(r[3]||"px"):t}function et(e,t,n,r,i,o){var a="width"===t?1:0,s=0,u=0;if(n===(r?"border":"content"))return 0;for(;a<4;a+=2)"margin"===n&&(u+=k.css(e,n+re[a],!0,i)),r?("content"===n&&(u-=k.css(e,"padding"+re[a],!0,i)),"margin"!==n&&(u-=k.css(e,"border"+re[a]+"Width",!0,i))):(u+=k.css(e,"padding"+re[a],!0,i),"padding"!==n?u+=k.css(e,"border"+re[a]+"Width",!0,i):s+=k.css(e,"border"+re[a]+"Width",!0,i));return!r&&0<=o&&(u+=Math.max(0,Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-o-u-s-.5))||0),u}function tt(e,t,n){var r=Fe(e),i=(!y.boxSizingReliable()||n)&&"border-box"===k.css(e,"boxSizing",!1,r),o=i,a=_e(e,t,r),s="offset"+t[0].toUpperCase()+t.slice(1);if($e.test(a)){if(!n)return a;a="auto"}return(!y.boxSizingReliable()&&i||"auto"===a||!parseFloat(a)&&"inline"===k.css(e,"display",!1,r))&&e.getClientRects().length&&(i="border-box"===k.css(e,"boxSizing",!1,r),(o=s in e)&&(a=e[s])),(a=parseFloat(a)||0)+et(e,t,n||(i?"border":"content"),o,r,a)+"px"}function nt(e,t,n,r,i){return new nt.prototype.init(e,t,n,r,i)}k.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=_e(e,"opacity");return""===n?"1":n}}}},cssNumber:{animationIterationCount:!0,columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,gridArea:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnStart:!0,gridRow:!0,gridRowEnd:!0,gridRowStart:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{},style:function(e,t,n,r){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var i,o,a,s=V(t),u=Qe.test(t),l=e.style;if(u||(t=Ge(s)),a=k.cssHooks[t]||k.cssHooks[s],void 0===n)return a&&"get"in a&&void 0!==(i=a.get(e,!1,r))?i:l[t];"string"===(o=typeof n)&&(i=ne.exec(n))&&i[1]&&(n=le(e,t,i),o="number"),null!=n&&n==n&&("number"!==o||u||(n+=i&&i[3]||(k.cssNumber[s]?"":"px")),y.clearCloneStyle||""!==n||0!==t.indexOf("background")||(l[t]="inherit"),a&&"set"in a&&void 0===(n=a.set(e,n,r))||(u?l.setProperty(t,n):l[t]=n))}},css:function(e,t,n,r){var i,o,a,s=V(t);return Qe.test(t)||(t=Ge(s)),(a=k.cssHooks[t]||k.cssHooks[s])&&"get"in a&&(i=a.get(e,!0,n)),void 0===i&&(i=_e(e,t,r)),"normal"===i&&t in Ke&&(i=Ke[t]),""===n||n?(o=parseFloat(i),!0===n||isFinite(o)?o||0:i):i}}),k.each(["height","width"],function(e,u){k.cssHooks[u]={get:function(e,t,n){if(t)return!Ye.test(k.css(e,"display"))||e.getClientRects().length&&e.getBoundingClientRect().width?tt(e,u,n):ue(e,Je,function(){return tt(e,u,n)})},set:function(e,t,n){var r,i=Fe(e),o=!y.scrollboxSize()&&"absolute"===i.position,a=(o||n)&&"border-box"===k.css(e,"boxSizing",!1,i),s=n?et(e,u,n,a,i):0;return a&&o&&(s-=Math.ceil(e["offset"+u[0].toUpperCase()+u.slice(1)]-parseFloat(i[u])-et(e,u,"border",!1,i)-.5)),s&&(r=ne.exec(t))&&"px"!==(r[3]||"px")&&(e.style[u]=t,t=k.css(e,u)),Ze(0,t,s)}}}),k.cssHooks.marginLeft=ze(y.reliableMarginLeft,function(e,t){if(t)return(parseFloat(_e(e,"marginLeft"))||e.getBoundingClientRect().left-ue(e,{marginLeft:0},function(){return e.getBoundingClientRect().left}))+"px"}),k.each({margin:"",padding:"",border:"Width"},function(i,o){k.cssHooks[i+o]={expand:function(e){for(var t=0,n={},r="string"==typeof e?e.split(" "):[e];t<4;t++)n[i+re[t]+o]=r[t]||r[t-2]||r[0];return n}},"margin"!==i&&(k.cssHooks[i+o].set=Ze)}),k.fn.extend({css:function(e,t){return _(this,function(e,t,n){var r,i,o={},a=0;if(Array.isArray(t)){for(r=Fe(e),i=t.length;a<i;a++)o[t[a]]=k.css(e,t[a],!1,r);return o}return void 0!==n?k.style(e,t,n):k.css(e,t)},e,t,1<arguments.length)}}),((k.Tween=nt).prototype={constructor:nt,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||k.easing._default,this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(k.cssNumber[n]?"":"px")},cur:function(){var e=nt.propHooks[this.prop];return e&&e.get?e.get(this):nt.propHooks._default.get(this)},run:function(e){var t,n=nt.propHooks[this.prop];return this.options.duration?this.pos=t=k.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):nt.propHooks._default.set(this),this}}).init.prototype=nt.prototype,(nt.propHooks={_default:{get:function(e){var t;return 1!==e.elem.nodeType||null!=e.elem[e.prop]&&null==e.elem.style[e.prop]?e.elem[e.prop]:(t=k.css(e.elem,e.prop,""))&&"auto"!==t?t:0},set:function(e){k.fx.step[e.prop]?k.fx.step[e.prop](e):1!==e.elem.nodeType||!k.cssHooks[e.prop]&&null==e.elem.style[Ge(e.prop)]?e.elem[e.prop]=e.now:k.style(e.elem,e.prop,e.now+e.unit)}}}).scrollTop=nt.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},k.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2},_default:"swing"},k.fx=nt.prototype.init,k.fx.step={};var rt,it,ot,at,st=/^(?:toggle|show|hide)$/,ut=/queueHooks$/;function lt(){it&&(!1===E.hidden&&C.requestAnimationFrame?C.requestAnimationFrame(lt):C.setTimeout(lt,k.fx.interval),k.fx.tick())}function ct(){return C.setTimeout(function(){rt=void 0}),rt=Date.now()}function ft(e,t){var n,r=0,i={height:e};for(t=t?1:0;r<4;r+=2-t)i["margin"+(n=re[r])]=i["padding"+n]=e;return t&&(i.opacity=i.width=e),i}function pt(e,t,n){for(var r,i=(dt.tweeners[t]||[]).concat(dt.tweeners["*"]),o=0,a=i.length;o<a;o++)if(r=i[o].call(n,t,e))return r}function dt(o,e,t){var n,a,r=0,i=dt.prefilters.length,s=k.Deferred().always(function(){delete u.elem}),u=function(){if(a)return!1;for(var e=rt||ct(),t=Math.max(0,l.startTime+l.duration-e),n=1-(t/l.duration||0),r=0,i=l.tweens.length;r<i;r++)l.tweens[r].run(n);return s.notifyWith(o,[l,n,t]),n<1&&i?t:(i||s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l]),!1)},l=s.promise({elem:o,props:k.extend({},e),opts:k.extend(!0,{specialEasing:{},easing:k.easing._default},t),originalProperties:e,originalOptions:t,startTime:rt||ct(),duration:t.duration,tweens:[],createTween:function(e,t){var n=k.Tween(o,l.opts,e,t,l.opts.specialEasing[e]||l.opts.easing);return l.tweens.push(n),n},stop:function(e){var t=0,n=e?l.tweens.length:0;if(a)return this;for(a=!0;t<n;t++)l.tweens[t].run(1);return e?(s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l,e])):s.rejectWith(o,[l,e]),this}}),c=l.props;for(!function(e,t){var n,r,i,o,a;for(n in e)if(i=t[r=V(n)],o=e[n],Array.isArray(o)&&(i=o[1],o=e[n]=o[0]),n!==r&&(e[r]=o,delete e[n]),(a=k.cssHooks[r])&&"expand"in a)for(n in o=a.expand(o),delete e[r],o)n in e||(e[n]=o[n],t[n]=i);else t[r]=i}(c,l.opts.specialEasing);r<i;r++)if(n=dt.prefilters[r].call(l,o,c,l.opts))return m(n.stop)&&(k._queueHooks(l.elem,l.opts.queue).stop=n.stop.bind(n)),n;return k.map(c,pt,l),m(l.opts.start)&&l.opts.start.call(o,l),l.progress(l.opts.progress).done(l.opts.done,l.opts.complete).fail(l.opts.fail).always(l.opts.always),k.fx.timer(k.extend(u,{elem:o,anim:l,queue:l.opts.queue})),l}k.Animation=k.extend(dt,{tweeners:{"*":[function(e,t){var n=this.createTween(e,t);return le(n.elem,e,ne.exec(t),n),n}]},tweener:function(e,t){m(e)?(t=e,e=["*"]):e=e.match(R);for(var n,r=0,i=e.length;r<i;r++)n=e[r],dt.tweeners[n]=dt.tweeners[n]||[],dt.tweeners[n].unshift(t)},prefilters:[function(e,t,n){var r,i,o,a,s,u,l,c,f="width"in t||"height"in t,p=this,d={},h=e.style,g=e.nodeType&&se(e),v=Q.get(e,"fxshow");for(r in n.queue||(null==(a=k._queueHooks(e,"fx")).unqueued&&(a.unqueued=0,s=a.empty.fire,a.empty.fire=function(){a.unqueued||s()}),a.unqueued++,p.always(function(){p.always(function(){a.unqueued--,k.queue(e,"fx").length||a.empty.fire()})})),t)if(i=t[r],st.test(i)){if(delete t[r],o=o||"toggle"===i,i===(g?"hide":"show")){if("show"!==i||!v||void 0===v[r])continue;g=!0}d[r]=v&&v[r]||k.style(e,r)}if((u=!k.isEmptyObject(t))||!k.isEmptyObject(d))for(r in f&&1===e.nodeType&&(n.overflow=[h.overflow,h.overflowX,h.overflowY],null==(l=v&&v.display)&&(l=Q.get(e,"display")),"none"===(c=k.css(e,"display"))&&(l?c=l:(fe([e],!0),l=e.style.display||l,c=k.css(e,"display"),fe([e]))),("inline"===c||"inline-block"===c&&null!=l)&&"none"===k.css(e,"float")&&(u||(p.done(function(){h.display=l}),null==l&&(c=h.display,l="none"===c?"":c)),h.display="inline-block")),n.overflow&&(h.overflow="hidden",p.always(function(){h.overflow=n.overflow[0],h.overflowX=n.overflow[1],h.overflowY=n.overflow[2]})),u=!1,d)u||(v?"hidden"in v&&(g=v.hidden):v=Q.access(e,"fxshow",{display:l}),o&&(v.hidden=!g),g&&fe([e],!0),p.done(function(){for(r in g||fe([e]),Q.remove(e,"fxshow"),d)k.style(e,r,d[r])})),u=pt(g?v[r]:0,r,p),r in v||(v[r]=u.start,g&&(u.end=u.start,u.start=0))}],prefilter:function(e,t){t?dt.prefilters.unshift(e):dt.prefilters.push(e)}}),k.speed=function(e,t,n){var r=e&&"object"==typeof e?k.extend({},e):{complete:n||!n&&t||m(e)&&e,duration:e,easing:n&&t||t&&!m(t)&&t};return k.fx.off?r.duration=0:"number"!=typeof r.duration&&(r.duration in k.fx.speeds?r.duration=k.fx.speeds[r.duration]:r.duration=k.fx.speeds._default),null!=r.queue&&!0!==r.queue||(r.queue="fx"),r.old=r.complete,r.complete=function(){m(r.old)&&r.old.call(this),r.queue&&k.dequeue(this,r.queue)},r},k.fn.extend({fadeTo:function(e,t,n,r){return this.filter(se).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(t,e,n,r){var i=k.isEmptyObject(t),o=k.speed(e,n,r),a=function(){var e=dt(this,k.extend({},t),o);(i||Q.get(this,"finish"))&&e.stop(!0)};return a.finish=a,i||!1===o.queue?this.each(a):this.queue(o.queue,a)},stop:function(i,e,o){var a=function(e){var t=e.stop;delete e.stop,t(o)};return"string"!=typeof i&&(o=e,e=i,i=void 0),e&&!1!==i&&this.queue(i||"fx",[]),this.each(function(){var e=!0,t=null!=i&&i+"queueHooks",n=k.timers,r=Q.get(this);if(t)r[t]&&r[t].stop&&a(r[t]);else for(t in r)r[t]&&r[t].stop&&ut.test(t)&&a(r[t]);for(t=n.length;t--;)n[t].elem!==this||null!=i&&n[t].queue!==i||(n[t].anim.stop(o),e=!1,n.splice(t,1));!e&&o||k.dequeue(this,i)})},finish:function(a){return!1!==a&&(a=a||"fx"),this.each(function(){var e,t=Q.get(this),n=t[a+"queue"],r=t[a+"queueHooks"],i=k.timers,o=n?n.length:0;for(t.finish=!0,k.queue(this,a,[]),r&&r.stop&&r.stop.call(this,!0),e=i.length;e--;)i[e].elem===this&&i[e].queue===a&&(i[e].anim.stop(!0),i.splice(e,1));for(e=0;e<o;e++)n[e]&&n[e].finish&&n[e].finish.call(this);delete t.finish})}}),k.each(["toggle","show","hide"],function(e,r){var i=k.fn[r];k.fn[r]=function(e,t,n){return null==e||"boolean"==typeof e?i.apply(this,arguments):this.animate(ft(r,!0),e,t,n)}}),k.each({slideDown:ft("show"),slideUp:ft("hide"),slideToggle:ft("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,r){k.fn[e]=function(e,t,n){return this.animate(r,e,t,n)}}),k.timers=[],k.fx.tick=function(){var e,t=0,n=k.timers;for(rt=Date.now();t<n.length;t++)(e=n[t])()||n[t]!==e||n.splice(t--,1);n.length||k.fx.stop(),rt=void 0},k.fx.timer=function(e){k.timers.push(e),k.fx.start()},k.fx.interval=13,k.fx.start=function(){it||(it=!0,lt())},k.fx.stop=function(){it=null},k.fx.speeds={slow:600,fast:200,_default:400},k.fn.delay=function(r,e){return r=k.fx&&k.fx.speeds[r]||r,e=e||"fx",this.queue(e,function(e,t){var n=C.setTimeout(e,r);t.stop=function(){C.clearTimeout(n)}})},ot=E.createElement("input"),at=E.createElement("select").appendChild(E.createElement("option")),ot.type="checkbox",y.checkOn=""!==ot.value,y.optSelected=at.selected,(ot=E.createElement("input")).value="t",ot.type="radio",y.radioValue="t"===ot.value;var ht,gt=k.expr.attrHandle;k.fn.extend({attr:function(e,t){return _(this,k.attr,e,t,1<arguments.length)},removeAttr:function(e){return this.each(function(){k.removeAttr(this,e)})}}),k.extend({attr:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return"undefined"==typeof e.getAttribute?k.prop(e,t,n):(1===o&&k.isXMLDoc(e)||(i=k.attrHooks[t.toLowerCase()]||(k.expr.match.bool.test(t)?ht:void 0)),void 0!==n?null===n?void k.removeAttr(e,t):i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:(e.setAttribute(t,n+""),n):i&&"get"in i&&null!==(r=i.get(e,t))?r:null==(r=k.find.attr(e,t))?void 0:r)},attrHooks:{type:{set:function(e,t){if(!y.radioValue&&"radio"===t&&A(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},removeAttr:function(e,t){var n,r=0,i=t&&t.match(R);if(i&&1===e.nodeType)while(n=i[r++])e.removeAttribute(n)}}),ht={set:function(e,t,n){return!1===t?k.removeAttr(e,n):e.setAttribute(n,n),n}},k.each(k.expr.match.bool.source.match(/\w+/g),function(e,t){var a=gt[t]||k.find.attr;gt[t]=function(e,t,n){var r,i,o=t.toLowerCase();return n||(i=gt[o],gt[o]=r,r=null!=a(e,t,n)?o:null,gt[o]=i),r}});var vt=/^(?:input|select|textarea|button)$/i,yt=/^(?:a|area)$/i;function mt(e){return(e.match(R)||[]).join(" ")}function xt(e){return e.getAttribute&&e.getAttribute("class")||""}function bt(e){return Array.isArray(e)?e:"string"==typeof e&&e.match(R)||[]}k.fn.extend({prop:function(e,t){return _(this,k.prop,e,t,1<arguments.length)},removeProp:function(e){return this.each(function(){delete this[k.propFix[e]||e]})}}),k.extend({prop:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return 1===o&&k.isXMLDoc(e)||(t=k.propFix[t]||t,i=k.propHooks[t]),void 0!==n?i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:e[t]=n:i&&"get"in i&&null!==(r=i.get(e,t))?r:e[t]},propHooks:{tabIndex:{get:function(e){var t=k.find.attr(e,"tabindex");return t?parseInt(t,10):vt.test(e.nodeName)||yt.test(e.nodeName)&&e.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),y.optSelected||(k.propHooks.selected={get:function(e){var t=e.parentNode;return t&&t.parentNode&&t.parentNode.selectedIndex,null},set:function(e){var t=e.parentNode;t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex)}}),k.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){k.propFix[this.toLowerCase()]=this}),k.fn.extend({addClass:function(t){var e,n,r,i,o,a,s,u=0;if(m(t))return this.each(function(e){k(this).addClass(t.call(this,e,xt(this)))});if((e=bt(t)).length)while(n=this[u++])if(i=xt(n),r=1===n.nodeType&&" "+mt(i)+" "){a=0;while(o=e[a++])r.indexOf(" "+o+" ")<0&&(r+=o+" ");i!==(s=mt(r))&&n.setAttribute("class",s)}return this},removeClass:function(t){var e,n,r,i,o,a,s,u=0;if(m(t))return this.each(function(e){k(this).removeClass(t.call(this,e,xt(this)))});if(!arguments.length)return this.attr("class","");if((e=bt(t)).length)while(n=this[u++])if(i=xt(n),r=1===n.nodeType&&" "+mt(i)+" "){a=0;while(o=e[a++])while(-1<r.indexOf(" "+o+" "))r=r.replace(" "+o+" "," ");i!==(s=mt(r))&&n.setAttribute("class",s)}return this},toggleClass:function(i,t){var o=typeof i,a="string"===o||Array.isArray(i);return"boolean"==typeof t&&a?t?this.addClass(i):this.removeClass(i):m(i)?this.each(function(e){k(this).toggleClass(i.call(this,e,xt(this),t),t)}):this.each(function(){var e,t,n,r;if(a){t=0,n=k(this),r=bt(i);while(e=r[t++])n.hasClass(e)?n.removeClass(e):n.addClass(e)}else void 0!==i&&"boolean"!==o||((e=xt(this))&&Q.set(this,"__className__",e),this.setAttribute&&this.setAttribute("class",e||!1===i?"":Q.get(this,"__className__")||""))})},hasClass:function(e){var t,n,r=0;t=" "+e+" ";while(n=this[r++])if(1===n.nodeType&&-1<(" "+mt(xt(n))+" ").indexOf(t))return!0;return!1}});var wt=/\r/g;k.fn.extend({val:function(n){var r,e,i,t=this[0];return arguments.length?(i=m(n),this.each(function(e){var t;1===this.nodeType&&(null==(t=i?n.call(this,e,k(this).val()):n)?t="":"number"==typeof t?t+="":Array.isArray(t)&&(t=k.map(t,function(e){return null==e?"":e+""})),(r=k.valHooks[this.type]||k.valHooks[this.nodeName.toLowerCase()])&&"set"in r&&void 0!==r.set(this,t,"value")||(this.value=t))})):t?(r=k.valHooks[t.type]||k.valHooks[t.nodeName.toLowerCase()])&&"get"in r&&void 0!==(e=r.get(t,"value"))?e:"string"==typeof(e=t.value)?e.replace(wt,""):null==e?"":e:void 0}}),k.extend({valHooks:{option:{get:function(e){var t=k.find.attr(e,"value");return null!=t?t:mt(k.text(e))}},select:{get:function(e){var t,n,r,i=e.options,o=e.selectedIndex,a="select-one"===e.type,s=a?null:[],u=a?o+1:i.length;for(r=o<0?u:a?o:0;r<u;r++)if(((n=i[r]).selected||r===o)&&!n.disabled&&(!n.parentNode.disabled||!A(n.parentNode,"optgroup"))){if(t=k(n).val(),a)return t;s.push(t)}return s},set:function(e,t){var n,r,i=e.options,o=k.makeArray(t),a=i.length;while(a--)((r=i[a]).selected=-1<k.inArray(k.valHooks.option.get(r),o))&&(n=!0);return n||(e.selectedIndex=-1),o}}}}),k.each(["radio","checkbox"],function(){k.valHooks[this]={set:function(e,t){if(Array.isArray(t))return e.checked=-1<k.inArray(k(e).val(),t)}},y.checkOn||(k.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})}),y.focusin="onfocusin"in C;var Tt=/^(?:focusinfocus|focusoutblur)$/,Ct=function(e){e.stopPropagation()};k.extend(k.event,{trigger:function(e,t,n,r){var i,o,a,s,u,l,c,f,p=[n||E],d=v.call(e,"type")?e.type:e,h=v.call(e,"namespace")?e.namespace.split("."):[];if(o=f=a=n=n||E,3!==n.nodeType&&8!==n.nodeType&&!Tt.test(d+k.event.triggered)&&(-1<d.indexOf(".")&&(d=(h=d.split(".")).shift(),h.sort()),u=d.indexOf(":")<0&&"on"+d,(e=e[k.expando]?e:new k.Event(d,"object"==typeof e&&e)).isTrigger=r?2:3,e.namespace=h.join("."),e.rnamespace=e.namespace?new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,e.result=void 0,e.target||(e.target=n),t=null==t?[e]:k.makeArray(t,[e]),c=k.event.special[d]||{},r||!c.trigger||!1!==c.trigger.apply(n,t))){if(!r&&!c.noBubble&&!x(n)){for(s=c.delegateType||d,Tt.test(s+d)||(o=o.parentNode);o;o=o.parentNode)p.push(o),a=o;a===(n.ownerDocument||E)&&p.push(a.defaultView||a.parentWindow||C)}i=0;while((o=p[i++])&&!e.isPropagationStopped())f=o,e.type=1<i?s:c.bindType||d,(l=(Q.get(o,"events")||{})[e.type]&&Q.get(o,"handle"))&&l.apply(o,t),(l=u&&o[u])&&l.apply&&G(o)&&(e.result=l.apply(o,t),!1===e.result&&e.preventDefault());return e.type=d,r||e.isDefaultPrevented()||c._default&&!1!==c._default.apply(p.pop(),t)||!G(n)||u&&m(n[d])&&!x(n)&&((a=n[u])&&(n[u]=null),k.event.triggered=d,e.isPropagationStopped()&&f.addEventListener(d,Ct),n[d](),e.isPropagationStopped()&&f.removeEventListener(d,Ct),k.event.triggered=void 0,a&&(n[u]=a)),e.result}},simulate:function(e,t,n){var r=k.extend(new k.Event,n,{type:e,isSimulated:!0});k.event.trigger(r,null,t)}}),k.fn.extend({trigger:function(e,t){return this.each(function(){k.event.trigger(e,t,this)})},triggerHandler:function(e,t){var n=this[0];if(n)return k.event.trigger(e,t,n,!0)}}),y.focusin||k.each({focus:"focusin",blur:"focusout"},function(n,r){var i=function(e){k.event.simulate(r,e.target,k.event.fix(e))};k.event.special[r]={setup:function(){var e=this.ownerDocument||this,t=Q.access(e,r);t||e.addEventListener(n,i,!0),Q.access(e,r,(t||0)+1)},teardown:function(){var e=this.ownerDocument||this,t=Q.access(e,r)-1;t?Q.access(e,r,t):(e.removeEventListener(n,i,!0),Q.remove(e,r))}}});var Et=C.location,kt=Date.now(),St=/\?/;k.parseXML=function(e){var t;if(!e||"string"!=typeof e)return null;try{t=(new C.DOMParser).parseFromString(e,"text/xml")}catch(e){t=void 0}return t&&!t.getElementsByTagName("parsererror").length||k.error("Invalid XML: "+e),t};var Nt=/\[\]$/,At=/\r?\n/g,Dt=/^(?:submit|button|image|reset|file)$/i,jt=/^(?:input|select|textarea|keygen)/i;function qt(n,e,r,i){var t;if(Array.isArray(e))k.each(e,function(e,t){r||Nt.test(n)?i(n,t):qt(n+"["+("object"==typeof t&&null!=t?e:"")+"]",t,r,i)});else if(r||"object"!==w(e))i(n,e);else for(t in e)qt(n+"["+t+"]",e[t],r,i)}k.param=function(e,t){var n,r=[],i=function(e,t){var n=m(t)?t():t;r[r.length]=encodeURIComponent(e)+"="+encodeURIComponent(null==n?"":n)};if(null==e)return"";if(Array.isArray(e)||e.jquery&&!k.isPlainObject(e))k.each(e,function(){i(this.name,this.value)});else for(n in e)qt(n,e[n],t,i);return r.join("&")},k.fn.extend({serialize:function(){return k.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=k.prop(this,"elements");return e?k.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!k(this).is(":disabled")&&jt.test(this.nodeName)&&!Dt.test(e)&&(this.checked||!pe.test(e))}).map(function(e,t){var n=k(this).val();return null==n?null:Array.isArray(n)?k.map(n,function(e){return{name:t.name,value:e.replace(At,"\r\n")}}):{name:t.name,value:n.replace(At,"\r\n")}}).get()}});var Lt=/%20/g,Ht=/#.*$/,Ot=/([?&])_=[^&]*/,Pt=/^(.*?):[ \t]*([^\r\n]*)$/gm,Rt=/^(?:GET|HEAD)$/,Mt=/^\/\//,It={},Wt={},$t="*/".concat("*"),Ft=E.createElement("a");function Bt(o){return function(e,t){"string"!=typeof e&&(t=e,e="*");var n,r=0,i=e.toLowerCase().match(R)||[];if(m(t))while(n=i[r++])"+"===n[0]?(n=n.slice(1)||"*",(o[n]=o[n]||[]).unshift(t)):(o[n]=o[n]||[]).push(t)}}function _t(t,i,o,a){var s={},u=t===Wt;function l(e){var r;return s[e]=!0,k.each(t[e]||[],function(e,t){var n=t(i,o,a);return"string"!=typeof n||u||s[n]?u?!(r=n):void 0:(i.dataTypes.unshift(n),l(n),!1)}),r}return l(i.dataTypes[0])||!s["*"]&&l("*")}function zt(e,t){var n,r,i=k.ajaxSettings.flatOptions||{};for(n in t)void 0!==t[n]&&((i[n]?e:r||(r={}))[n]=t[n]);return r&&k.extend(!0,e,r),e}Ft.href=Et.href,k.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Et.href,type:"GET",isLocal:/^(?:about|app|app-storage|.+-extension|file|res|widget):$/.test(Et.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":$t,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":k.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?zt(zt(e,k.ajaxSettings),t):zt(k.ajaxSettings,e)},ajaxPrefilter:Bt(It),ajaxTransport:Bt(Wt),ajax:function(e,t){"object"==typeof e&&(t=e,e=void 0),t=t||{};var c,f,p,n,d,r,h,g,i,o,v=k.ajaxSetup({},t),y=v.context||v,m=v.context&&(y.nodeType||y.jquery)?k(y):k.event,x=k.Deferred(),b=k.Callbacks("once memory"),w=v.statusCode||{},a={},s={},u="canceled",T={readyState:0,getResponseHeader:function(e){var t;if(h){if(!n){n={};while(t=Pt.exec(p))n[t[1].toLowerCase()+" "]=(n[t[1].toLowerCase()+" "]||[]).concat(t[2])}t=n[e.toLowerCase()+" "]}return null==t?null:t.join(", ")},getAllResponseHeaders:function(){return h?p:null},setRequestHeader:function(e,t){return null==h&&(e=s[e.toLowerCase()]=s[e.toLowerCase()]||e,a[e]=t),this},overrideMimeType:function(e){return null==h&&(v.mimeType=e),this},statusCode:function(e){var t;if(e)if(h)T.always(e[T.status]);else for(t in e)w[t]=[w[t],e[t]];return this},abort:function(e){var t=e||u;return c&&c.abort(t),l(0,t),this}};if(x.promise(T),v.url=((e||v.url||Et.href)+"").replace(Mt,Et.protocol+"//"),v.type=t.method||t.type||v.method||v.type,v.dataTypes=(v.dataType||"*").toLowerCase().match(R)||[""],null==v.crossDomain){r=E.createElement("a");try{r.href=v.url,r.href=r.href,v.crossDomain=Ft.protocol+"//"+Ft.host!=r.protocol+"//"+r.host}catch(e){v.crossDomain=!0}}if(v.data&&v.processData&&"string"!=typeof v.data&&(v.data=k.param(v.data,v.traditional)),_t(It,v,t,T),h)return T;for(i in(g=k.event&&v.global)&&0==k.active++&&k.event.trigger("ajaxStart"),v.type=v.type.toUpperCase(),v.hasContent=!Rt.test(v.type),f=v.url.replace(Ht,""),v.hasContent?v.data&&v.processData&&0===(v.contentType||"").indexOf("application/x-www-form-urlencoded")&&(v.data=v.data.replace(Lt,"+")):(o=v.url.slice(f.length),v.data&&(v.processData||"string"==typeof v.data)&&(f+=(St.test(f)?"&":"?")+v.data,delete v.data),!1===v.cache&&(f=f.replace(Ot,"$1"),o=(St.test(f)?"&":"?")+"_="+kt+++o),v.url=f+o),v.ifModified&&(k.lastModified[f]&&T.setRequestHeader("If-Modified-Since",k.lastModified[f]),k.etag[f]&&T.setRequestHeader("If-None-Match",k.etag[f])),(v.data&&v.hasContent&&!1!==v.contentType||t.contentType)&&T.setRequestHeader("Content-Type",v.contentType),T.setRequestHeader("Accept",v.dataTypes[0]&&v.accepts[v.dataTypes[0]]?v.accepts[v.dataTypes[0]]+("*"!==v.dataTypes[0]?", "+$t+"; q=0.01":""):v.accepts["*"]),v.headers)T.setRequestHeader(i,v.headers[i]);if(v.beforeSend&&(!1===v.beforeSend.call(y,T,v)||h))return T.abort();if(u="abort",b.add(v.complete),T.done(v.success),T.fail(v.error),c=_t(Wt,v,t,T)){if(T.readyState=1,g&&m.trigger("ajaxSend",[T,v]),h)return T;v.async&&0<v.timeout&&(d=C.setTimeout(function(){T.abort("timeout")},v.timeout));try{h=!1,c.send(a,l)}catch(e){if(h)throw e;l(-1,e)}}else l(-1,"No Transport");function l(e,t,n,r){var i,o,a,s,u,l=t;h||(h=!0,d&&C.clearTimeout(d),c=void 0,p=r||"",T.readyState=0<e?4:0,i=200<=e&&e<300||304===e,n&&(s=function(e,t,n){var r,i,o,a,s=e.contents,u=e.dataTypes;while("*"===u[0])u.shift(),void 0===r&&(r=e.mimeType||t.getResponseHeader("Content-Type"));if(r)for(i in s)if(s[i]&&s[i].test(r)){u.unshift(i);break}if(u[0]in n)o=u[0];else{for(i in n){if(!u[0]||e.converters[i+" "+u[0]]){o=i;break}a||(a=i)}o=o||a}if(o)return o!==u[0]&&u.unshift(o),n[o]}(v,T,n)),s=function(e,t,n,r){var i,o,a,s,u,l={},c=e.dataTypes.slice();if(c[1])for(a in e.converters)l[a.toLowerCase()]=e.converters[a];o=c.shift();while(o)if(e.responseFields[o]&&(n[e.responseFields[o]]=t),!u&&r&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u=o,o=c.shift())if("*"===o)o=u;else if("*"!==u&&u!==o){if(!(a=l[u+" "+o]||l["* "+o]))for(i in l)if((s=i.split(" "))[1]===o&&(a=l[u+" "+s[0]]||l["* "+s[0]])){!0===a?a=l[i]:!0!==l[i]&&(o=s[0],c.unshift(s[1]));break}if(!0!==a)if(a&&e["throws"])t=a(t);else try{t=a(t)}catch(e){return{state:"parsererror",error:a?e:"No conversion from "+u+" to "+o}}}return{state:"success",data:t}}(v,s,T,i),i?(v.ifModified&&((u=T.getResponseHeader("Last-Modified"))&&(k.lastModified[f]=u),(u=T.getResponseHeader("etag"))&&(k.etag[f]=u)),204===e||"HEAD"===v.type?l="nocontent":304===e?l="notmodified":(l=s.state,o=s.data,i=!(a=s.error))):(a=l,!e&&l||(l="error",e<0&&(e=0))),T.status=e,T.statusText=(t||l)+"",i?x.resolveWith(y,[o,l,T]):x.rejectWith(y,[T,l,a]),T.statusCode(w),w=void 0,g&&m.trigger(i?"ajaxSuccess":"ajaxError",[T,v,i?o:a]),b.fireWith(y,[T,l]),g&&(m.trigger("ajaxComplete",[T,v]),--k.active||k.event.trigger("ajaxStop")))}return T},getJSON:function(e,t,n){return k.get(e,t,n,"json")},getScript:function(e,t){return k.get(e,void 0,t,"script")}}),k.each(["get","post"],function(e,i){k[i]=function(e,t,n,r){return m(t)&&(r=r||n,n=t,t=void 0),k.ajax(k.extend({url:e,type:i,dataType:r,data:t,success:n},k.isPlainObject(e)&&e))}}),k._evalUrl=function(e,t){return k.ajax({url:e,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,converters:{"text script":function(){}},dataFilter:function(e){k.globalEval(e,t)}})},k.fn.extend({wrapAll:function(e){var t;return this[0]&&(m(e)&&(e=e.call(this[0])),t=k(e,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstElementChild)e=e.firstElementChild;return e}).append(this)),this},wrapInner:function(n){return m(n)?this.each(function(e){k(this).wrapInner(n.call(this,e))}):this.each(function(){var e=k(this),t=e.contents();t.length?t.wrapAll(n):e.append(n)})},wrap:function(t){var n=m(t);return this.each(function(e){k(this).wrapAll(n?t.call(this,e):t)})},unwrap:function(e){return this.parent(e).not("body").each(function(){k(this).replaceWith(this.childNodes)}),this}}),k.expr.pseudos.hidden=function(e){return!k.expr.pseudos.visible(e)},k.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},k.ajaxSettings.xhr=function(){try{return new C.XMLHttpRequest}catch(e){}};var Ut={0:200,1223:204},Xt=k.ajaxSettings.xhr();y.cors=!!Xt&&"withCredentials"in Xt,y.ajax=Xt=!!Xt,k.ajaxTransport(function(i){var o,a;if(y.cors||Xt&&!i.crossDomain)return{send:function(e,t){var n,r=i.xhr();if(r.open(i.type,i.url,i.async,i.username,i.password),i.xhrFields)for(n in i.xhrFields)r[n]=i.xhrFields[n];for(n in i.mimeType&&r.overrideMimeType&&r.overrideMimeType(i.mimeType),i.crossDomain||e["X-Requested-With"]||(e["X-Requested-With"]="XMLHttpRequest"),e)r.setRequestHeader(n,e[n]);o=function(e){return function(){o&&(o=a=r.onload=r.onerror=r.onabort=r.ontimeout=r.onreadystatechange=null,"abort"===e?r.abort():"error"===e?"number"!=typeof r.status?t(0,"error"):t(r.status,r.statusText):t(Ut[r.status]||r.status,r.statusText,"text"!==(r.responseType||"text")||"string"!=typeof r.responseText?{binary:r.response}:{text:r.responseText},r.getAllResponseHeaders()))}},r.onload=o(),a=r.onerror=r.ontimeout=o("error"),void 0!==r.onabort?r.onabort=a:r.onreadystatechange=function(){4===r.readyState&&C.setTimeout(function(){o&&a()})},o=o("abort");try{r.send(i.hasContent&&i.data||null)}catch(e){if(o)throw e}},abort:function(){o&&o()}}}),k.ajaxPrefilter(function(e){e.crossDomain&&(e.contents.script=!1)}),k.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(e){return k.globalEval(e),e}}}),k.ajaxPrefilter("script",function(e){void 0===e.cache&&(e.cache=!1),e.crossDomain&&(e.type="GET")}),k.ajaxTransport("script",function(n){var r,i;if(n.crossDomain||n.scriptAttrs)return{send:function(e,t){r=k("<script>").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var Vt,Gt=[],Yt=/(=)\?(?=&|$)|\?\?/;k.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Gt.pop()||k.expando+"_"+kt++;return this[e]=!0,e}}),k.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Yt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Yt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Yt,"$1"+r):!1!==e.jsonp&&(e.url+=(St.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||k.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?k(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Gt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((Vt=E.implementation.createHTMLDocument("").body).innerHTML="<form></form><form></form>",2===Vt.childNodes.length),k.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=D.exec(e))?[t.createElement(i[1])]:(i=we([e],t,o),o&&o.length&&k(o).remove(),k.merge([],i.childNodes)));var r,i,o},k.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1<s&&(r=mt(e.slice(s)),e=e.slice(0,s)),m(t)?(n=t,t=void 0):t&&"object"==typeof t&&(i="POST"),0<a.length&&k.ajax({url:e,type:i||"GET",dataType:"html",data:t}).done(function(e){o=arguments,a.html(r?k("<div>").append(k.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},k.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){k.fn[t]=function(e){return this.on(t,e)}}),k.expr.pseudos.animated=function(t){return k.grep(k.timers,function(e){return t===e.elem}).length},k.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=k.css(e,"position"),c=k(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=k.css(e,"top"),u=k.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,k.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},k.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){k.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===k.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===k.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=k(e).offset()).top+=k.css(e,"borderTopWidth",!0),i.left+=k.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-k.css(r,"marginTop",!0),left:t.left-i.left-k.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===k.css(e,"position"))e=e.offsetParent;return e||ie})}}),k.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;k.fn[t]=function(e){return _(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),k.each(["top","left"],function(e,n){k.cssHooks[n]=ze(y.pixelPosition,function(e,t){if(t)return t=_e(e,n),$e.test(t)?k(e).position()[n]+"px":t})}),k.each({Height:"height",Width:"width"},function(a,s){k.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){k.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return _(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?k.css(e,t,i):k.style(e,t,n,i)},s,n?e:void 0,n)}})}),k.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){k.fn[n]=function(e,t){return 0<arguments.length?this.on(n,null,e,t):this.trigger(n)}}),k.fn.extend({hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),k.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)}}),k.proxy=function(e,t){var n,r,i;if("string"==typeof t&&(n=e[t],t=e,e=n),m(e))return r=s.call(arguments,2),(i=function(){return e.apply(t||this,r.concat(s.call(arguments)))}).guid=e.guid=e.guid||k.guid++,i},k.holdReady=function(e){e?k.readyWait++:k.ready(!0)},k.isArray=Array.isArray,k.parseJSON=JSON.parse,k.nodeName=A,k.isFunction=m,k.isWindow=x,k.camelCase=V,k.type=w,k.now=Date.now,k.isNumeric=function(e){var t=k.type(e);return("number"===t||"string"===t)&&!isNaN(e-parseFloat(e))},"function"==typeof define&&define.amd&&define("jquery",[],function(){return k});var Qt=C.jQuery,Jt=C.$;return k.noConflict=function(e){return C.$===k&&(C.$=Jt),e&&C.jQuery===k&&(C.jQuery=Qt),k},e||(C.jQuery=C.$=k),k}); diff --git a/scala3doc/dotty-docs/docs/js/sidebar.js b/scala3doc/dotty-docs/docs/js/sidebar.js new file mode 100644 index 000000000000..aa377ed8aa0e --- /dev/null +++ b/scala3doc/dotty-docs/docs/js/sidebar.js @@ -0,0 +1,6 @@ +// Toggles a sidebar section +function toggleSection(titleElement) { + const title = $(titleElement); + title.siblings("ul").toggleClass("toggled"); + title.children("i.fas").toggleClass("fa-angle-right").toggleClass("fa-angle-down"); +} diff --git a/scala3doc/dotty-docs/docs/js/toolbar.js b/scala3doc/dotty-docs/docs/js/toolbar.js new file mode 100644 index 000000000000..be132e7db4a9 --- /dev/null +++ b/scala3doc/dotty-docs/docs/js/toolbar.js @@ -0,0 +1,20 @@ +$(document).ready(function() { + $("#menu-icon").click(() => { + $(".sidebar").toggleClass("toggled"); + }) + $("#search-icon").click(() => { + $("#searchbar").toggleClass("shown"); + $("#search-api-input").focus(); + }) + const searchInput = $("#search-api-input"); + searchInput.keydown(evt => { + if (evt.which == 13) { + const baseUrl = $("#baseurl-input").val(); + window.location = ( + baseUrl + "/api/search.html?" + + "searchTerm=" + searchInput.val() + + "&previousUrl=" + encodeURI(window.location) + ); + } + }) +}) diff --git a/scala3doc/dotty-docs/docs/sidebar.yml b/scala3doc/dotty-docs/docs/sidebar.yml new file mode 100644 index 000000000000..b8f627760c8c --- /dev/null +++ b/scala3doc/dotty-docs/docs/sidebar.yml @@ -0,0 +1,239 @@ +sidebar: + - title: Blog + url: blog/index.html + - title: Usage + subsection: + - title: Getting Started + url: docs/usage/getting-started.html + - title: sbt-projects + url: docs/usage/sbt-projects.html + - title: IDE support for Dotty + url: docs/usage/ide-support.html + - title: Worksheet mode in Dotty IDE + url: docs/usage/worksheet-mode.html + - title: Language Versions + url: docs/usage/language-versions.html + - title: cbt-projects + url: docs/usage/cbt-projects.html + - title: Dottydoc + url: docs/usage/dottydoc.html + - title: Reference + subsection: + - title: Overview + url: docs/reference/overview.html + - title: New Types + subsection: + - title: Intersection types + url: docs/reference/new-types/intersection-types.html + - title: Union types + url: docs/reference/new-types/union-types.html + - title: Type lambdas + url: docs/reference/new-types/type-lambdas.html + - title: Match types + url: docs/reference/new-types/match-types.html + - title: Dependent Function Types + url: docs/reference/new-types/dependent-function-types.html + - title: Enums + subsection: + - title: Enumerations + url: docs/reference/enums/enums.html + - title: Algebraic Data Types + url: docs/reference/enums/adts.html + - title: Translation + url: docs/reference/enums/desugarEnums.html + - title: Contextual Abstractions + subsection: + - title: Overview + url: docs/reference/contextual/motivation.html + - title: Given Instances + url: docs/reference/contextual/givens.html + - title: Using Clauses + url: docs/reference/contextual/using-clauses.html + - title: Context Bounds + url: docs/reference/contextual/context-bounds.html + - title: Given Imports + url: docs/reference/contextual/given-imports.html + - title: Extension Methods + url: docs/reference/contextual/extension-methods.html + - title: Implementing Type classes + url: docs/reference/contextual/type-classes.html + - title: Type class Derivation + url: docs/reference/contextual/derivation.html + - title: Multiversal Equality + url: docs/reference/contextual/multiversal-equality.html + - title: Context Functions + url: docs/reference/contextual/context-functions.html + - title: Implicit Conversions + url: docs/reference/contextual/conversions.html + - title: By-Name Context Parameters + url: docs/reference/contextual/by-name-context-parameters.html + - title: Relationship with Scala 2 Implicits + url: docs/reference/contextual/relationship-implicits.html + - title: Metaprogramming + subsection: + - title: Overview + url: docs/reference/metaprogramming/toc.html + - title: Inline + url: docs/reference/metaprogramming/inline.html + - title: Macros + url: docs/reference/metaprogramming/macros.html + - title: Staging + url: docs/reference/metaprogramming/staging.html + - title: TASTy Reflection + url: docs/reference/metaprogramming/tasty-reflect.html + - title: TASTy Inspection + url: docs/reference/metaprogramming/tasty-inspect.html + - title: Other New Features + subsection: + - title: Trait Parameters + url: docs/reference/other-new-features/trait-parameters.html + - title: Super Traits + url: docs/reference/other-new-features/super-traits.html + - title: Creator Applications + url: docs/reference/other-new-features/creator-applications.html + - title: Export Clauses + url: docs/reference/other-new-features/export.html + - title: Opaque Type Aliases + url: docs/reference/other-new-features/opaques.html + - title: Open Classes + url: docs/reference/other-new-features/open-classes.html + - title: Parameter Untupling + url: docs/reference/other-new-features/parameter-untupling.html + - title: Kind Polymorphism + url: docs/reference/other-new-features/kind-polymorphism.html + - title: Tupled Function + url: docs/reference/other-new-features/tupled-function.html + - title: threadUnsafe Annotation + url: docs/reference/other-new-features/threadUnsafe-annotation.html + - title: New Control Syntax + url: docs/reference/other-new-features/control-syntax.html + - title: Optional Braces + url: docs/reference/other-new-features/indentation.html + - title: Explicit Nulls + url: docs/reference/other-new-features/explicit-nulls.html + - title: Safe Initialization + url: docs/reference/other-new-features/safe-initialization.html + - title: Other Changed Features + subsection: + - title: Numeric Literals + url: docs/reference/changed-features/numeric-literals.html + - title: Structural Types + url: docs/reference/changed-features/structural-types.html + - title: Operators + url: docs/reference/changed-features/operators.html + - title: Wildcard Types + url: docs/reference/changed-features/wildcards.html + - title: Type Checking + url: docs/reference/changed-features/type-checking.html + - title: Type Inference + url: docs/reference/changed-features/type-inference.html + - title: Implicit Resolution + url: docs/reference/changed-features/implicit-resolution.html + - title: Implicit Conversions + url: docs/reference/changed-features/implicit-conversions.html + - title: Overload Resolution + url: docs/reference/changed-features/overload-resolution.html + - title: Match Expressions + url: docs/reference/changed-features/match-syntax.html + - title: Vararg Patterns + url: docs/reference/changed-features/vararg-patterns.html + - title: Pattern Bindings + url: docs/reference/changed-features/pattern-bindings.html + - title: Pattern Matching + url: docs/reference/changed-features/pattern-matching.html + - title: Eta Expansion + url: docs/reference/changed-features/eta-expansion.html + - title: Compiler Plugins + url: docs/reference/changed-features/compiler-plugins.html + - title: Lazy Vals initialization + url: docs/reference/changed-features/lazy-vals-init.html + - title: Main Functions + url: docs/reference/changed-features/main-functions.html + - title: Dropped Features + subsection: + - title: DelayedInit + url: docs/reference/dropped-features/delayed-init.html + - title: Macros + url: docs/reference/dropped-features/macros.html + - title: Existential Types + url: docs/reference/dropped-features/existential-types.html + - title: Type Projection + url: docs/reference/dropped-features/type-projection.html + - title: Do-While + url: docs/reference/dropped-features/do-while.html + - title: Procedure Syntax + url: docs/reference/dropped-features/procedure-syntax.html + - title: Package Objects + url: docs/reference/dropped-features/package-objects.html + - title: Early Initializers + url: docs/reference/dropped-features/early-initializers.html + - title: Class Shadowing + url: docs/reference/dropped-features/class-shadowing.html + - title: Limit 22 + url: docs/reference/dropped-features/limit22.html + - title: XML literals + url: docs/reference/dropped-features/xml.html + - title: Symbol Literals + url: docs/reference/dropped-features/symlits.html + - title: Auto-Application + url: docs/reference/dropped-features/auto-apply.html + - title: Weak Conformance + url: docs/reference/dropped-features/weak-conformance.html + - title: Nonlocal Returns + url: docs/reference/dropped-features/nonlocal-returns.html + - title: "[this] Qualifier" + url: docs/reference/dropped-features/this-qualifier.html + - title: Contributing + subsection: + - title: Contribute Knowledge + url: docs/contributing/contribute-knowledge.html + - title: Getting Started + url: docs/contributing/getting-started.html + - title: Workflow + url: docs/contributing/workflow.html + - title: Testing + url: docs/contributing/testing.html + - title: Debugging + url: docs/contributing/debugging.html + - title: IDEs and Tools + subsection: + - title: Mill + url: docs/contributing/tools/mill.html + - title: Scalafix + url: docs/contributing/tools/scalafix.html + - title: Procedures + subsection: + - title: Release Model + url: docs/contributing/procedures/release.html + - title: Modifying the Test Framework + url: docs/contributing/procedures/vulpix.html + - title: Internals + subsection: + - title: Backend + url: docs/internals/backend.html + - title: Classpaths + url: docs/internals/classpaths.html + - title: Core Data Structrues + url: docs/internals/core-data-structures.html + - title: Contexts + url: docs/internals/contexts.html + - title: Dotc vs Scalac + url: docs/internals/dotc-scalac.html + - title: Higher-Kinded Types + url: docs/internals/higher-kinded-v2.html + - title: Overall Structure + url: docs/internals/overall-structure.html + - title: Periods + url: docs/internals/periods.html + - title: Syntax + url: docs/internals/syntax.html + - title: Type System + url: docs/internals/type-system.html + - title: "Dotty Internals 1: Trees & Symbols (Meeting Notes)" + url: docs/internals/dotty-internals-1-notes.html + - title: Debug Macros + url: docs/internals/debug-macros.html + - title: Resources + subsection: + - title: Talks + url: docs/resources/talks.html diff --git a/scala3doc/resources/META-INF/services/org.jetbrains.dokka.plugability.DokkaPlugin b/scala3doc/resources/META-INF/services/org.jetbrains.dokka.plugability.DokkaPlugin new file mode 100644 index 000000000000..1c1de05b36b3 --- /dev/null +++ b/scala3doc/resources/META-INF/services/org.jetbrains.dokka.plugability.DokkaPlugin @@ -0,0 +1 @@ +dotty.dokka.DottyDokkaPlugin \ No newline at end of file diff --git a/scala3doc/resources/dotty_res/fonts/dotty-icons.ttf b/scala3doc/resources/dotty_res/fonts/dotty-icons.ttf new file mode 100644 index 000000000000..0b0f38f353f8 Binary files /dev/null and b/scala3doc/resources/dotty_res/fonts/dotty-icons.ttf differ diff --git a/scala3doc/resources/dotty_res/fonts/dotty-icons.woff b/scala3doc/resources/dotty_res/fonts/dotty-icons.woff new file mode 100644 index 000000000000..169e35c25675 Binary files /dev/null and b/scala3doc/resources/dotty_res/fonts/dotty-icons.woff differ diff --git a/scala3doc/resources/dotty_res/hljs/LICENSE b/scala3doc/resources/dotty_res/hljs/LICENSE new file mode 100644 index 000000000000..2250cc7eca9b --- /dev/null +++ b/scala3doc/resources/dotty_res/hljs/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2006, Ivan Sagalaev. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/scala3doc/resources/dotty_res/hljs/highlight.pack.js b/scala3doc/resources/dotty_res/hljs/highlight.pack.js new file mode 100644 index 000000000000..8c2e012c1556 --- /dev/null +++ b/scala3doc/resources/dotty_res/hljs/highlight.pack.js @@ -0,0 +1,6 @@ +/* + Highlight.js 10.1.2 (edd73d24) + License: BSD-3-Clause + Copyright (c) 2006-2020, Ivan Sagalaev +*/ +var hljs=function(){"use strict";function e(n){Object.freeze(n);var t="function"==typeof n;return Object.getOwnPropertyNames(n).forEach((function(r){!Object.hasOwnProperty.call(n,r)||null===n[r]||"object"!=typeof n[r]&&"function"!=typeof n[r]||t&&("caller"===r||"callee"===r||"arguments"===r)||Object.isFrozen(n[r])||e(n[r])})),n}class n{constructor(e){void 0===e.data&&(e.data={}),this.data=e.data}ignoreMatch(){this.ignore=!0}}function t(e){return e.replace(/&/g,"&").replace(/</g,"<").replace(/>/g,">").replace(/"/g,""").replace(/'/g,"'")}function r(e,...n){var t={};for(const n in e)t[n]=e[n];return n.forEach((function(e){for(const n in e)t[n]=e[n]})),t}function a(e){return e.nodeName.toLowerCase()}var i=Object.freeze({__proto__:null,escapeHTML:t,inherit:r,nodeStream:function(e){var n=[];return function e(t,r){for(var i=t.firstChild;i;i=i.nextSibling)3===i.nodeType?r+=i.nodeValue.length:1===i.nodeType&&(n.push({event:"start",offset:r,node:i}),r=e(i,r),a(i).match(/br|hr|img|input/)||n.push({event:"stop",offset:r,node:i}));return r}(e,0),n},mergeStreams:function(e,n,r){var i=0,s="",o=[];function l(){return e.length&&n.length?e[0].offset!==n[0].offset?e[0].offset<n[0].offset?e:n:"start"===n[0].event?e:n:e.length?e:n}function c(e){s+="<"+a(e)+[].map.call(e.attributes,(function(e){return" "+e.nodeName+'="'+t(e.value)+'"'})).join("")+">"}function u(e){s+="</"+a(e)+">"}function d(e){("start"===e.event?c:u)(e.node)}for(;e.length||n.length;){var g=l();if(s+=t(r.substring(i,g[0].offset)),i=g[0].offset,g===e){o.reverse().forEach(u);do{d(g.splice(0,1)[0]),g=l()}while(g===e&&g.length&&g[0].offset===i);o.reverse().forEach(c)}else"start"===g[0].event?o.push(g[0].node):o.pop(),d(g.splice(0,1)[0])}return s+t(r.substr(i))}});const s="</span>",o=e=>!!e.kind;class l{constructor(e,n){this.buffer="",this.classPrefix=n.classPrefix,e.walk(this)}addText(e){this.buffer+=t(e)}openNode(e){if(!o(e))return;let n=e.kind;e.sublanguage||(n=`${this.classPrefix}${n}`),this.span(n)}closeNode(e){o(e)&&(this.buffer+=s)}value(){return this.buffer}span(e){this.buffer+=`<span class="${e}">`}}class c{constructor(){this.rootNode={children:[]},this.stack=[this.rootNode]}get top(){return this.stack[this.stack.length-1]}get root(){return this.rootNode}add(e){this.top.children.push(e)}openNode(e){const n={kind:e,children:[]};this.add(n),this.stack.push(n)}closeNode(){if(this.stack.length>1)return this.stack.pop()}closeAllNodes(){for(;this.closeNode(););}toJSON(){return JSON.stringify(this.rootNode,null,4)}walk(e){return this.constructor._walk(e,this.rootNode)}static _walk(e,n){return"string"==typeof n?e.addText(n):n.children&&(e.openNode(n),n.children.forEach(n=>this._walk(e,n)),e.closeNode(n)),e}static _collapse(e){"string"!=typeof e&&e.children&&(e.children.every(e=>"string"==typeof e)?e.children=[e.children.join("")]:e.children.forEach(e=>{c._collapse(e)}))}}class u extends c{constructor(e){super(),this.options=e}addKeyword(e,n){""!==e&&(this.openNode(n),this.addText(e),this.closeNode())}addText(e){""!==e&&this.add(e)}addSublanguage(e,n){const t=e.root;t.kind=n,t.sublanguage=!0,this.add(t)}toHTML(){return new l(this,this.options).value()}finalize(){return!0}}function d(e){return e?"string"==typeof e?e:e.source:null}const g="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",h={begin:"\\\\[\\s\\S]",relevance:0},f={className:"string",begin:"'",end:"'",illegal:"\\n",contains:[h]},p={className:"string",begin:'"',end:'"',illegal:"\\n",contains:[h]},b={begin:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/},m=function(e,n,t={}){var a=r({className:"comment",begin:e,end:n,contains:[]},t);return a.contains.push(b),a.contains.push({className:"doctag",begin:"(?:TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):",relevance:0}),a},v=m("//","$"),x=m("/\\*","\\*/"),E=m("#","$");var _=Object.freeze({__proto__:null,IDENT_RE:"[a-zA-Z]\\w*",UNDERSCORE_IDENT_RE:"[a-zA-Z_]\\w*",NUMBER_RE:"\\b\\d+(\\.\\d+)?",C_NUMBER_RE:g,BINARY_NUMBER_RE:"\\b(0b[01]+)",RE_STARTERS_RE:"!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",SHEBANG:(e={})=>{const n=/^#![ ]*\//;return e.binary&&(e.begin=function(...e){return e.map(e=>d(e)).join("")}(n,/.*\b/,e.binary,/\b.*/)),r({className:"meta",begin:n,end:/$/,relevance:0,"on:begin":(e,n)=>{0!==e.index&&n.ignoreMatch()}},e)},BACKSLASH_ESCAPE:h,APOS_STRING_MODE:f,QUOTE_STRING_MODE:p,PHRASAL_WORDS_MODE:b,COMMENT:m,C_LINE_COMMENT_MODE:v,C_BLOCK_COMMENT_MODE:x,HASH_COMMENT_MODE:E,NUMBER_MODE:{className:"number",begin:"\\b\\d+(\\.\\d+)?",relevance:0},C_NUMBER_MODE:{className:"number",begin:g,relevance:0},BINARY_NUMBER_MODE:{className:"number",begin:"\\b(0b[01]+)",relevance:0},CSS_NUMBER_MODE:{className:"number",begin:"\\b\\d+(\\.\\d+)?(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",relevance:0},REGEXP_MODE:{begin:/(?=\/[^/\n]*\/)/,contains:[{className:"regexp",begin:/\//,end:/\/[gimuy]*/,illegal:/\n/,contains:[h,{begin:/\[/,end:/\]/,relevance:0,contains:[h]}]}]},TITLE_MODE:{className:"title",begin:"[a-zA-Z]\\w*",relevance:0},UNDERSCORE_TITLE_MODE:{className:"title",begin:"[a-zA-Z_]\\w*",relevance:0},METHOD_GUARD:{begin:"\\.\\s*[a-zA-Z_]\\w*",relevance:0},END_SAME_AS_BEGIN:function(e){return Object.assign(e,{"on:begin":(e,n)=>{n.data._beginMatch=e[1]},"on:end":(e,n)=>{n.data._beginMatch!==e[1]&&n.ignoreMatch()}})}}),N="of and for in not or if then".split(" ");function w(e,n){return n?+n:function(e){return N.includes(e.toLowerCase())}(e)?0:1}const R=t,y=r,{nodeStream:O,mergeStreams:k}=i,M=Symbol("nomatch");return function(t){var a=[],i=Object.create(null),s=Object.create(null),o=[],l=!0,c=/(^(<[^>]+>|\t|)+|\n)/gm,g="Could not find the language '{}', did you forget to load/include a language module?";const h={disableAutodetect:!0,name:"Plain text",contains:[]};var f={noHighlightRe:/^(no-?highlight)$/i,languageDetectRe:/\blang(?:uage)?-([\w-]+)\b/i,classPrefix:"hljs-",tabReplace:null,useBR:!1,languages:null,__emitter:u};function p(e){return f.noHighlightRe.test(e)}function b(e,n,t,r){var a={code:n,language:e};S("before:highlight",a);var i=a.result?a.result:m(a.language,a.code,t,r);return i.code=a.code,S("after:highlight",i),i}function m(e,t,a,s){var o=t;function c(e,n){var t=E.case_insensitive?n[0].toLowerCase():n[0];return Object.prototype.hasOwnProperty.call(e.keywords,t)&&e.keywords[t]}function u(){null!=y.subLanguage?function(){if(""!==A){var e=null;if("string"==typeof y.subLanguage){if(!i[y.subLanguage])return void k.addText(A);e=m(y.subLanguage,A,!0,O[y.subLanguage]),O[y.subLanguage]=e.top}else e=v(A,y.subLanguage.length?y.subLanguage:null);y.relevance>0&&(I+=e.relevance),k.addSublanguage(e.emitter,e.language)}}():function(){if(!y.keywords)return void k.addText(A);let e=0;y.keywordPatternRe.lastIndex=0;let n=y.keywordPatternRe.exec(A),t="";for(;n;){t+=A.substring(e,n.index);const r=c(y,n);if(r){const[e,a]=r;k.addText(t),t="",I+=a,k.addKeyword(n[0],e)}else t+=n[0];e=y.keywordPatternRe.lastIndex,n=y.keywordPatternRe.exec(A)}t+=A.substr(e),k.addText(t)}(),A=""}function h(e){return e.className&&k.openNode(e.className),y=Object.create(e,{parent:{value:y}})}function p(e){return 0===y.matcher.regexIndex?(A+=e[0],1):(L=!0,0)}var b={};function x(t,r){var i=r&&r[0];if(A+=t,null==i)return u(),0;if("begin"===b.type&&"end"===r.type&&b.index===r.index&&""===i){if(A+=o.slice(r.index,r.index+1),!l){const n=Error("0 width match regex");throw n.languageName=e,n.badRule=b.rule,n}return 1}if(b=r,"begin"===r.type)return function(e){var t=e[0],r=e.rule;const a=new n(r),i=[r.__beforeBegin,r["on:begin"]];for(const n of i)if(n&&(n(e,a),a.ignore))return p(t);return r&&r.endSameAsBegin&&(r.endRe=RegExp(t.replace(/[-/\\^$*+?.()|[\]{}]/g,"\\$&"),"m")),r.skip?A+=t:(r.excludeBegin&&(A+=t),u(),r.returnBegin||r.excludeBegin||(A=t)),h(r),r.returnBegin?0:t.length}(r);if("illegal"===r.type&&!a){const e=Error('Illegal lexeme "'+i+'" for mode "'+(y.className||"<unnamed>")+'"');throw e.mode=y,e}if("end"===r.type){var s=function(e){var t=e[0],r=o.substr(e.index),a=function e(t,r,a){let i=function(e,n){var t=e&&e.exec(n);return t&&0===t.index}(t.endRe,a);if(i){if(t["on:end"]){const e=new n(t);t["on:end"](r,e),e.ignore&&(i=!1)}if(i){for(;t.endsParent&&t.parent;)t=t.parent;return t}}if(t.endsWithParent)return e(t.parent,r,a)}(y,e,r);if(!a)return M;var i=y;i.skip?A+=t:(i.returnEnd||i.excludeEnd||(A+=t),u(),i.excludeEnd&&(A=t));do{y.className&&k.closeNode(),y.skip||y.subLanguage||(I+=y.relevance),y=y.parent}while(y!==a.parent);return a.starts&&(a.endSameAsBegin&&(a.starts.endRe=a.endRe),h(a.starts)),i.returnEnd?0:t.length}(r);if(s!==M)return s}if("illegal"===r.type&&""===i)return 1;if(B>1e5&&B>3*r.index)throw Error("potential infinite loop, way more iterations than matches");return A+=i,i.length}var E=T(e);if(!E)throw console.error(g.replace("{}",e)),Error('Unknown language: "'+e+'"');var _=function(e){function n(n,t){return RegExp(d(n),"m"+(e.case_insensitive?"i":"")+(t?"g":""))}class t{constructor(){this.matchIndexes={},this.regexes=[],this.matchAt=1,this.position=0}addRule(e,n){n.position=this.position++,this.matchIndexes[this.matchAt]=n,this.regexes.push([n,e]),this.matchAt+=function(e){return RegExp(e.toString()+"|").exec("").length-1}(e)+1}compile(){0===this.regexes.length&&(this.exec=()=>null);const e=this.regexes.map(e=>e[1]);this.matcherRe=n(function(e,n="|"){for(var t=/\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./,r=0,a="",i=0;i<e.length;i++){var s=r+=1,o=d(e[i]);for(i>0&&(a+=n),a+="(";o.length>0;){var l=t.exec(o);if(null==l){a+=o;break}a+=o.substring(0,l.index),o=o.substring(l.index+l[0].length),"\\"===l[0][0]&&l[1]?a+="\\"+(+l[1]+s):(a+=l[0],"("===l[0]&&r++)}a+=")"}return a}(e),!0),this.lastIndex=0}exec(e){this.matcherRe.lastIndex=this.lastIndex;const n=this.matcherRe.exec(e);if(!n)return null;const t=n.findIndex((e,n)=>n>0&&void 0!==e),r=this.matchIndexes[t];return n.splice(0,t),Object.assign(n,r)}}class a{constructor(){this.rules=[],this.multiRegexes=[],this.count=0,this.lastIndex=0,this.regexIndex=0}getMatcher(e){if(this.multiRegexes[e])return this.multiRegexes[e];const n=new t;return this.rules.slice(e).forEach(([e,t])=>n.addRule(e,t)),n.compile(),this.multiRegexes[e]=n,n}considerAll(){this.regexIndex=0}addRule(e,n){this.rules.push([e,n]),"begin"===n.type&&this.count++}exec(e){const n=this.getMatcher(this.regexIndex);n.lastIndex=this.lastIndex;const t=n.exec(e);return t&&(this.regexIndex+=t.position+1,this.regexIndex===this.count&&(this.regexIndex=0)),t}}function i(e,n){const t=e.input[e.index-1],r=e.input[e.index+e[0].length];"."!==t&&"."!==r||n.ignoreMatch()}if(e.contains&&e.contains.includes("self"))throw Error("ERR: contains `self` is not supported at the top-level of a language. See documentation.");return function t(s,o){const l=s;if(s.compiled)return l;s.compiled=!0,s.__beforeBegin=null,s.keywords=s.keywords||s.beginKeywords;let c=null;if("object"==typeof s.keywords&&(c=s.keywords.$pattern,delete s.keywords.$pattern),s.keywords&&(s.keywords=function(e,n){var t={};return"string"==typeof e?r("keyword",e):Object.keys(e).forEach((function(n){r(n,e[n])})),t;function r(e,r){n&&(r=r.toLowerCase()),r.split(" ").forEach((function(n){var r=n.split("|");t[r[0]]=[e,w(r[0],r[1])]}))}}(s.keywords,e.case_insensitive)),s.lexemes&&c)throw Error("ERR: Prefer `keywords.$pattern` to `mode.lexemes`, BOTH are not allowed. (see mode reference) ");return l.keywordPatternRe=n(s.lexemes||c||/\w+/,!0),o&&(s.beginKeywords&&(s.begin="\\b("+s.beginKeywords.split(" ").join("|")+")(?=\\b|\\s)",s.__beforeBegin=i),s.begin||(s.begin=/\B|\b/),l.beginRe=n(s.begin),s.endSameAsBegin&&(s.end=s.begin),s.end||s.endsWithParent||(s.end=/\B|\b/),s.end&&(l.endRe=n(s.end)),l.terminator_end=d(s.end)||"",s.endsWithParent&&o.terminator_end&&(l.terminator_end+=(s.end?"|":"")+o.terminator_end)),s.illegal&&(l.illegalRe=n(s.illegal)),void 0===s.relevance&&(s.relevance=1),s.contains||(s.contains=[]),s.contains=[].concat(...s.contains.map((function(e){return function(e){return e.variants&&!e.cached_variants&&(e.cached_variants=e.variants.map((function(n){return r(e,{variants:null},n)}))),e.cached_variants?e.cached_variants:function e(n){return!!n&&(n.endsWithParent||e(n.starts))}(e)?r(e,{starts:e.starts?r(e.starts):null}):Object.isFrozen(e)?r(e):e}("self"===e?s:e)}))),s.contains.forEach((function(e){t(e,l)})),s.starts&&t(s.starts,o),l.matcher=function(e){const n=new a;return e.contains.forEach(e=>n.addRule(e.begin,{rule:e,type:"begin"})),e.terminator_end&&n.addRule(e.terminator_end,{type:"end"}),e.illegal&&n.addRule(e.illegal,{type:"illegal"}),n}(l),l}(e)}(E),N="",y=s||_,O={},k=new f.__emitter(f);!function(){for(var e=[],n=y;n!==E;n=n.parent)n.className&&e.unshift(n.className);e.forEach(e=>k.openNode(e))}();var A="",I=0,S=0,B=0,L=!1;try{for(y.matcher.considerAll();;){B++,L?L=!1:(y.matcher.lastIndex=S,y.matcher.considerAll());const e=y.matcher.exec(o);if(!e)break;const n=x(o.substring(S,e.index),e);S=e.index+n}return x(o.substr(S)),k.closeAllNodes(),k.finalize(),N=k.toHTML(),{relevance:I,value:N,language:e,illegal:!1,emitter:k,top:y}}catch(n){if(n.message&&n.message.includes("Illegal"))return{illegal:!0,illegalBy:{msg:n.message,context:o.slice(S-100,S+100),mode:n.mode},sofar:N,relevance:0,value:R(o),emitter:k};if(l)return{illegal:!1,relevance:0,value:R(o),emitter:k,language:e,top:y,errorRaised:n};throw n}}function v(e,n){n=n||f.languages||Object.keys(i);var t=function(e){const n={relevance:0,emitter:new f.__emitter(f),value:R(e),illegal:!1,top:h};return n.emitter.addText(e),n}(e),r=t;return n.filter(T).filter(I).forEach((function(n){var a=m(n,e,!1);a.language=n,a.relevance>r.relevance&&(r=a),a.relevance>t.relevance&&(r=t,t=a)})),r.language&&(t.second_best=r),t}function x(e){return f.tabReplace||f.useBR?e.replace(c,e=>"\n"===e?f.useBR?"<br>":e:f.tabReplace?e.replace(/\t/g,f.tabReplace):e):e}function E(e){let n=null;const t=function(e){var n=e.className+" ";n+=e.parentNode?e.parentNode.className:"";const t=f.languageDetectRe.exec(n);if(t){var r=T(t[1]);return r||(console.warn(g.replace("{}",t[1])),console.warn("Falling back to no-highlight mode for this block.",e)),r?t[1]:"no-highlight"}return n.split(/\s+/).find(e=>p(e)||T(e))}(e);if(p(t))return;S("before:highlightBlock",{block:e,language:t}),f.useBR?(n=document.createElement("div")).innerHTML=e.innerHTML.replace(/\n/g,"").replace(/<br[ /]*>/g,"\n"):n=e;const r=n.textContent,a=t?b(t,r,!0):v(r),i=O(n);if(i.length){const e=document.createElement("div");e.innerHTML=a.value,a.value=k(i,O(e),r)}a.value=x(a.value),S("after:highlightBlock",{block:e,result:a}),e.innerHTML=a.value,e.className=function(e,n,t){var r=n?s[n]:t,a=[e.trim()];return e.match(/\bhljs\b/)||a.push("hljs"),e.includes(r)||a.push(r),a.join(" ").trim()}(e.className,t,a.language),e.result={language:a.language,re:a.relevance,relavance:a.relevance},a.second_best&&(e.second_best={language:a.second_best.language,re:a.second_best.relevance,relavance:a.second_best.relevance})}const N=()=>{if(!N.called){N.called=!0;var e=document.querySelectorAll("pre code");a.forEach.call(e,E)}};function T(e){return e=(e||"").toLowerCase(),i[e]||i[s[e]]}function A(e,{languageName:n}){"string"==typeof e&&(e=[e]),e.forEach(e=>{s[e]=n})}function I(e){var n=T(e);return n&&!n.disableAutodetect}function S(e,n){var t=e;o.forEach((function(e){e[t]&&e[t](n)}))}Object.assign(t,{highlight:b,highlightAuto:v,fixMarkup:x,highlightBlock:E,configure:function(e){f=y(f,e)},initHighlighting:N,initHighlightingOnLoad:function(){window.addEventListener("DOMContentLoaded",N,!1)},registerLanguage:function(e,n){var r=null;try{r=n(t)}catch(n){if(console.error("Language definition for '{}' could not be registered.".replace("{}",e)),!l)throw n;console.error(n),r=h}r.name||(r.name=e),i[e]=r,r.rawDefinition=n.bind(null,t),r.aliases&&A(r.aliases,{languageName:e})},listLanguages:function(){return Object.keys(i)},getLanguage:T,registerAliases:A,requireLanguage:function(e){var n=T(e);if(n)return n;throw Error("The '{}' language is required, but not loaded.".replace("{}",e))},autoDetection:I,inherit:y,addPlugin:function(e){o.push(e)}}),t.debugMode=function(){l=!1},t.safeMode=function(){l=!0},t.versionString="10.1.2";for(const n in _)"object"==typeof _[n]&&e(_[n]);return Object.assign(t,_),t}({})}();"object"==typeof exports&&"undefined"!=typeof module&&(module.exports=hljs);hljs.registerLanguage("ebnf",function(){"use strict";return function(a){var e=a.COMMENT(/\(\*/,/\*\)/);return{name:"Extended Backus-Naur Form",illegal:/\S/,contains:[e,{className:"attribute",begin:/^[ ]*[a-zA-Z][a-zA-Z-_]*([\s-_]+[a-zA-Z][a-zA-Z]*)*/},{begin:/=/,end:/[.;]/,contains:[e,{className:"meta",begin:/\?.*\?/},{className:"string",variants:[a.APOS_STRING_MODE,a.QUOTE_STRING_MODE,{begin:"`",end:"`"}]}]}]}}}());hljs.registerLanguage("css",function(){"use strict";return function(e){var n={begin:/(?:[A-Z\_\.\-]+|--[a-zA-Z0-9_-]+)\s*:/,returnBegin:!0,end:";",endsWithParent:!0,contains:[{className:"attribute",begin:/\S/,end:":",excludeEnd:!0,starts:{endsWithParent:!0,excludeEnd:!0,contains:[{begin:/[\w-]+\(/,returnBegin:!0,contains:[{className:"built_in",begin:/[\w-]+/},{begin:/\(/,end:/\)/,contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_BLOCK_COMMENT_MODE,{className:"number",begin:"#[0-9A-Fa-f]+"},{className:"meta",begin:"!important"}]}}]};return{name:"CSS",case_insensitive:!0,illegal:/[=\/|'\$]/,contains:[e.C_BLOCK_COMMENT_MODE,{className:"selector-id",begin:/#[A-Za-z0-9_-]+/},{className:"selector-class",begin:/\.[A-Za-z0-9_-]+/},{className:"selector-attr",begin:/\[/,end:/\]/,illegal:"$",contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},{className:"selector-pseudo",begin:/:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/},{begin:"@(page|font-face)",lexemes:"@[a-z-]+",keywords:"@page @font-face"},{begin:"@",end:"[{;]",illegal:/:/,returnBegin:!0,contains:[{className:"keyword",begin:/@\-?\w[\w]*(\-\w+)*/},{begin:/\s/,endsWithParent:!0,excludeEnd:!0,relevance:0,keywords:"and or not only",contains:[{begin:/[a-z-]+:/,className:"attribute"},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},{className:"selector-tag",begin:"[a-zA-Z-][a-zA-Z0-9_-]*",relevance:0},{begin:"{",end:"}",illegal:/\S/,contains:[e.C_BLOCK_COMMENT_MODE,n]}]}}}());hljs.registerLanguage("java",function(){"use strict";function e(e){return e?"string"==typeof e?e:e.source:null}function n(e){return a("(",e,")?")}function a(...n){return n.map(n=>e(n)).join("")}function s(...n){return"("+n.map(n=>e(n)).join("|")+")"}return function(e){var t="false synchronized int abstract float private char boolean var static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do",i={className:"meta",begin:"@[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*",contains:[{begin:/\(/,end:/\)/,contains:["self"]}]},r=e=>a("[",e,"]+([",e,"_]*[",e,"]+)?"),c={className:"number",variants:[{begin:`\\b(0[bB]${r("01")})[lL]?`},{begin:`\\b(0${r("0-7")})[dDfFlL]?`},{begin:a(/\b0[xX]/,s(a(r("a-fA-F0-9"),/\./,r("a-fA-F0-9")),a(r("a-fA-F0-9"),/\.?/),a(/\./,r("a-fA-F0-9"))),/([pP][+-]?(\d+))?/,/[fFdDlL]?/)},{begin:a(/\b/,s(a(/\d*\./,r("\\d")),r("\\d")),/[eE][+-]?[\d]+[dDfF]?/)},{begin:a(/\b/,r(/\d/),n(/\.?/),n(r(/\d/)),/[dDfFlL]?/)}],relevance:0};return{name:"Java",aliases:["jsp"],keywords:t,illegal:/<\/|#/,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{begin:/\w+@/,relevance:0},{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"class",beginKeywords:"class interface",end:/[{;=]/,excludeEnd:!0,keywords:"class interface",illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"new throw return else",relevance:0},{className:"function",begin:"([À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(<[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(\\s*,\\s*[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*)*>)?\\s+)+"+e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:t,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"params",begin:/\(/,end:/\)/,keywords:t,relevance:0,contains:[i,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},c,i]}}}()); \ No newline at end of file diff --git a/scala3doc/resources/dotty_res/images/dotty-logo-white.svg b/scala3doc/resources/dotty_res/images/dotty-logo-white.svg new file mode 100644 index 000000000000..8fd33e25e3aa --- /dev/null +++ b/scala3doc/resources/dotty_res/images/dotty-logo-white.svg @@ -0,0 +1,30 @@ +<svg width="64px" height="109px" viewBox="0 0 64 109" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> + <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd"> + <g id="logo-background" transform="translate(0.000000, 16.000000)" fill="#CCC"> + <path d="M0.5,59.5 C0.5,59.5 63.4,65.8 63.4,76.3 L63.4,51.1 C63.4,51.1 63.4,40.6 0.5,34.3 L0.5,59.5 L0.5,59.5 Z" id="logo-background-bottom"></path> + <path d="M0.5,25.9 C0.5,25.9 63.4,32.2 63.4,42.7 L63.4,17.5 C63.4,17.5 63.4,7 0.5,0.7 L0.5,10.5 L0.5,25.9 L0.5,25.9 Z" id="logo-background-top"></path> + </g> + <g id="logo-foreground" fill="#FFF"> + <path d="M0.5,109 L0.5,83.8 C0.5,83.8 63.4,77.5 63.4,67 L63.4,92.2 C63.5,92.3 63.5,102.7 0.5,109" id="Logo_Foreground_Bottom"></path> + <path d="M0.5,50.3 C0.5,50.3 63.4,44 63.4,33.5 L63.4,58.7 C63.4,58.7 63.4,69.2 0.5,75.5 L0.5,50.3 L0.5,50.3 Z" id="Logo_Foreground_Middle"></path> + <path d="M63.5,0 L63.5,25.2 C63.5,25.2 63.5,35.7 0.6,42 L0.6,16.7 C0.5,16.7 63.5,10.5 63.5,0" id="logo-foreground-top"></path> + </g> + <g id="dots" transform="translate(4.000000, 10.000000)" fill="#B6B6B6"> + <g id="bottom" transform="translate(0.000000, 67.500000)"> + <path d="M1.4471104,12.2885536 C1.45019313,12.2881864 1.45019313,12.2881864 1.45327704,12.2878189 C1.91768952,12.2324787 2.24930811,11.8111359 2.1939679,11.3467234 C2.13862768,10.8823109 1.71728488,10.5506923 1.2528724,10.6060325 C1.24980572,10.606398 1.24980572,10.606398 1.24674021,10.6067632 C0.782326598,10.6620939 0.450699376,11.0834299 0.506030077,11.5478435 C0.561360778,12.0122571 0.982696791,12.3438844 1.4471104,12.2885536 L1.4471104,12.2885536 Z M9.34858965,12.1304907 C9.35409106,12.129756 9.35409106,12.129756 9.35959333,12.129021 C10.1807278,12.019341 10.7574756,11.2647668 10.6477955,10.4436323 C10.5381155,9.62249789 9.78354128,9.04575011 8.96240683,9.15543014 C8.95693539,9.15616095 8.95693539,9.15616095 8.95146479,9.15689157 C8.13032806,9.26655457 7.55356464,10.0211168 7.66322765,10.8422536 C7.77289065,11.6633903 8.52745292,12.2401537 9.34858965,12.1304907 L9.34858965,12.1304907 Z M17.0066591,11.0493128 C17.0121188,11.0484984 17.0121188,11.0484984 17.0175789,11.0476838 C17.8369369,10.9254384 18.4020584,10.1621181 18.279813,9.34276003 C18.1575677,8.52340201 17.3942473,7.95828052 16.5748893,8.08052587 C16.5694641,8.08133528 16.5694641,8.08133528 16.5640392,8.08214454 C15.7446783,8.20437064 15.1795389,8.96767771 15.301765,9.78703861 C15.4239911,10.6063995 16.1872982,11.1715389 17.0066591,11.0493128 L17.0066591,11.0493128 Z M24.8225082,10.4232459 C24.8297684,10.4220275 24.8297684,10.4220275 24.8370287,10.420809 C25.922252,10.2386672 26.6543441,9.21126476 26.4722023,8.12604147 C26.2900605,7.04081818 25.2626581,6.30872601 24.1774348,6.49086783 C24.1702314,6.4920768 24.1702314,6.4920768 24.1630282,6.49328559 C23.0777996,6.67539597 22.3456777,7.70277717 22.5277881,8.78800574 C22.7098984,9.87323431 23.7372796,10.6053563 24.8225082,10.4232459 L24.8225082,10.4232459 Z M32.4725265,9.05261002 C33.5532108,8.84532993 34.2612448,7.80122849 34.0539647,6.72054419 C33.8466846,5.63985989 32.8025831,4.93182589 31.7218988,5.13910599 C31.714724,5.14048211 31.714724,5.14048211 31.7075489,5.14185807 C30.6268567,5.34909665 29.9187826,6.39317088 30.1260211,7.47386314 C30.3332597,8.5545554 31.3773339,9.26262952 32.4580262,9.05539095 C32.4652764,9.05400057 32.4652764,9.05400057 32.4725265,9.05261002 Z M39.8723866,6.89476879 C39.877909,6.8935242 39.877909,6.8935242 39.883431,6.89227947 C40.6915794,6.71010471 41.1990314,5.90728916 41.0168566,5.09914075 C40.8346818,4.29099233 40.0318663,3.78354042 39.2237179,3.96571517 C39.218279,3.96694116 39.218279,3.96694116 39.2128397,3.96816703 C38.404681,4.15029588 37.8971834,4.9530826 38.0793123,5.76124136 C38.2614411,6.56940012 39.0642279,7.07689764 39.8723866,6.89476879 L39.8723866,6.89476879 Z M47.4276119,5.00828445 C47.4329844,5.00678549 47.4329844,5.00678549 47.4383562,5.00528637 C48.2362896,4.78258973 48.7026111,3.95520561 48.4799145,3.15727221 C48.2572179,2.35933881 47.4298337,1.89301728 46.6319003,2.11571391 C46.6266645,2.1171751 46.6266645,2.1171751 46.6214279,2.11863616 C45.8234736,2.34125773 45.3570742,3.16859798 45.5796958,3.96655233 C45.8023173,4.76450667 46.6296576,5.23090603 47.4276119,5.00828445 L47.4276119,5.00828445 Z M54.419759,2.30643871 C54.8556933,2.13695884 55.0716973,1.6461737 54.9022174,1.21023941 C54.7327376,0.774305114 54.2419524,0.558301127 53.8060181,0.727780997 C53.8032127,0.728871549 53.8032127,0.728871549 53.8004064,0.729962021 C53.3644359,0.89934874 53.148327,1.39008772 53.3177137,1.82605822 C53.4871005,2.26202871 53.9778394,2.47813756 54.4138099,2.30875084 C54.4167849,2.30759485 54.4167849,2.30759485 54.419759,2.30643871 Z" id="Shape"></path> + <path d="M1.67760355,20.096503 C1.68306395,20.0958524 1.68306395,20.0958524 1.68852642,20.0952015 C2.51113381,19.9971782 3.09852524,19.2508595 3.00050189,18.4282521 C2.90247854,17.6056447 2.15615986,17.0182533 1.33355246,17.1162767 C1.3281205,17.1169239 1.3281205,17.1169239 1.3226906,17.1175709 C0.500081196,17.2155774 -0.0873255124,17.961884 0.0106809923,18.7844934 C0.108687497,19.6071028 0.854994145,20.1945095 1.67760355,20.096503 L1.67760355,20.096503 Z M9.34858965,19.1274206 C9.35409106,19.1266859 9.35409106,19.1266859 9.35959333,19.1259509 C10.1807278,19.0162709 10.7574756,18.2616967 10.6477955,17.4405622 C10.5381155,16.6194278 9.78354128,16.04268 8.96240683,16.15236 C8.95693539,16.1530908 8.95693539,16.1530908 8.95146479,16.1538215 C8.13032806,16.2634845 7.55356464,17.0180467 7.66322765,17.8391835 C7.77289065,18.6603202 8.52745292,19.2370836 9.34858965,19.1274206 L9.34858965,19.1274206 Z M17.1767435,18.6256231 C17.1839958,18.6245412 17.1839958,18.6245412 17.1912486,18.6234592 C18.27963,18.4610765 19.0303002,17.4471319 18.8679175,16.3587506 C18.7055348,15.2703693 17.6915903,14.519699 16.6032089,14.6820817 C16.5960024,14.6831569 16.5960024,14.6831569 16.5887964,14.6842319 C15.5004112,14.846589 14.7497172,15.8605159 14.9120743,16.948901 C15.0744314,18.0372862 16.0883584,18.7879802 17.1767435,18.6256231 L17.1767435,18.6256231 Z M24.8370287,17.4177379 C25.922252,17.2355961 26.6543441,16.2081937 26.4722023,15.1229704 C26.2900605,14.0377471 25.2626581,13.3056549 24.1774348,13.4877968 C24.1702314,13.4890057 24.1702314,13.4890057 24.1630282,13.4902145 C23.0777996,13.6723249 22.3456777,14.6997061 22.5277881,15.7849347 C22.7098984,16.8701632 23.7372796,17.6022852 24.8225082,17.4201748 C24.8297684,17.4189565 24.8297684,17.4189565 24.8370287,17.4177379 Z M32.4725265,16.061085 C33.5532108,15.853199 34.2612448,14.8060455 34.0539647,13.7222022 C33.8466846,12.6383589 32.8025831,11.9282552 31.7218988,12.1361412 C31.714724,12.1375214 31.714724,12.1375214 31.7075489,12.1389013 C30.6268567,12.3467457 29.9187826,13.3938719 30.1260211,14.4777232 C30.3332597,15.5615745 31.3773339,16.2717185 32.4580262,16.0638741 C32.4652764,16.0624797 32.4652764,16.0624797 32.4725265,16.061085 Z M40.0707225,14.4695476 C40.0780573,14.4678946 40.0780573,14.4678946 40.0853916,14.4662413 C41.158768,14.2242783 41.8327617,13.1579849 41.5907986,12.0846085 C41.3488355,11.011232 40.2825422,10.3372384 39.2091657,10.5792015 C39.2019419,10.5808298 39.2019419,10.5808298 39.1947175,10.582458 C38.1213273,10.8243601 37.447273,11.8906152 37.6891752,12.9640053 C37.9310773,14.0373955 38.9973324,14.7114498 40.0707225,14.4695476 L40.0707225,14.4695476 Z M47.4276119,12.0082845 C47.4329844,12.0067855 47.4329844,12.0067855 47.4383562,12.0052864 C48.2362896,11.7825897 48.7026111,10.9552056 48.4799145,10.1572722 C48.2572179,9.35933881 47.4298337,8.89301728 46.6319003,9.11571391 C46.6266645,9.1171751 46.6266645,9.1171751 46.6214279,9.11863616 C45.8234736,9.34125773 45.3570742,10.168598 45.5796958,10.9665523 C45.8023173,11.7645067 46.6296576,12.230906 47.4276119,12.0082845 L47.4276119,12.0082845 Z M54.8999721,9.57562965 C54.9052414,9.57358217 54.9052414,9.57358217 54.9105092,9.57153441 C55.6826371,9.27135123 56.0652239,8.40207131 55.7650408,7.62994336 C55.4648576,6.85781542 54.5955777,6.4752286 53.8234497,6.77541179 C53.8184808,6.77734338 53.8184808,6.77734338 53.8135101,6.77927482 C53.0413181,7.07929302 52.6585455,7.94849117 52.9585637,8.72068323 C53.2585819,9.4928753 54.12778,9.87564785 54.8999721,9.57562965 L54.8999721,9.57562965 Z" id="Shape"></path> + <path d="M1.45327704,26.6978168 C1.54647464,26.6867112 1.63432439,26.660866 1.7147722,26.6228911 C2.03520341,26.4716332 2.23820252,26.1279362 2.1939679,25.7567213 C2.13862768,25.2923089 1.71728488,24.9606903 1.2528724,25.0160305 C1.24980572,25.0163959 1.24980572,25.0163959 1.24674021,25.0167611 C0.782326598,25.0720918 0.450699376,25.4934278 0.506030077,25.9578415 C0.561360778,26.4222551 0.982696791,26.7538823 1.4471104,26.6985516 C1.45019313,26.6981843 1.45019313,26.6981843 1.45327704,26.6978168 Z M9.34858965,26.1274206 C9.35409106,26.1266859 9.35409106,26.1266859 9.35959333,26.1259509 C10.1807278,26.0162709 10.7574756,25.2616967 10.6477955,24.4405622 C10.5381155,23.6194278 9.78354128,23.04268 8.96240683,23.15236 C8.95693539,23.1530908 8.95693539,23.1530908 8.95146479,23.1538215 C8.13032806,23.2634845 7.55356464,24.0180467 7.66322765,24.8391835 C7.77289065,25.6603202 8.52745292,26.2370836 9.34858965,26.1274206 L9.34858965,26.1274206 Z M17.0066591,25.0462427 C17.0121188,25.0454283 17.0121188,25.0454283 17.0175789,25.0446136 C17.8369369,24.9223683 18.4020584,24.1590479 18.279813,23.3396899 C18.1575677,22.5203319 17.3942473,21.9552104 16.5748893,22.0774558 C16.5694641,22.0782652 16.5694641,22.0782652 16.5640392,22.0790744 C15.7446783,22.2013005 15.1795389,22.9646076 15.301765,23.7839685 C15.4239911,24.6033294 16.1872982,25.1684688 17.0066591,25.0462427 L17.0066591,25.0462427 Z M24.8225082,24.4201748 C24.8297684,24.4189565 24.8297684,24.4189565 24.8370287,24.4177379 C25.922252,24.2355961 26.6543441,23.2081937 26.4722023,22.1229704 C26.2900605,21.0377471 25.2626581,20.3056549 24.1774348,20.4877968 C24.1702314,20.4890057 24.1702314,20.4890057 24.1630282,20.4902145 C23.0777996,20.6723249 22.3456777,21.6997061 22.5277881,22.7849347 C22.7098984,23.8701632 23.7372796,24.6022852 24.8225082,24.4201748 L24.8225082,24.4201748 Z M32.4725265,23.0495399 C33.5532108,22.8422598 34.2612448,21.7981584 34.0539647,20.7174741 C33.8466846,19.6367898 32.8025831,18.9287558 31.7218988,19.1360359 C31.714724,19.137412 31.714724,19.137412 31.7075489,19.138788 C30.6268567,19.3460265 29.9187826,20.3901008 30.1260211,21.470793 C30.3332597,22.5514853 31.3773339,23.2595594 32.4580262,23.0523208 C32.4652764,23.0509305 32.4652764,23.0509305 32.4725265,23.0495399 Z M39.8723866,20.8947688 C39.877909,20.8935242 39.877909,20.8935242 39.883431,20.8922795 C40.6915794,20.7101047 41.1990314,19.9072892 41.0168566,19.0991407 C40.8346818,18.2909923 40.0318663,17.7835404 39.2237179,17.9657152 C39.218279,17.9669412 39.218279,17.9669412 39.2128397,17.968167 C38.404681,18.1502959 37.8971834,18.9530826 38.0793123,19.7612414 C38.2614411,20.5694001 39.0642279,21.0768976 39.8723866,20.8947688 L39.8723866,20.8947688 Z M47.4276119,19.0082845 C47.4329844,19.0067855 47.4329844,19.0067855 47.4383562,19.0052864 C48.2362896,18.7825897 48.7026111,17.9552056 48.4799145,17.1572722 C48.2572179,16.3593388 47.4298337,15.8930173 46.6319003,16.1157139 C46.6266645,16.1171751 46.6266645,16.1171751 46.6214279,16.1186362 C45.8234736,16.3412577 45.3570742,17.168598 45.5796958,17.9665523 C45.8023173,18.7645067 46.6296576,19.230906 47.4276119,19.0082845 L47.4276119,19.0082845 Z M54.4138099,15.7087505 C54.4167849,15.7075945 54.4167849,15.7075945 54.419759,15.7064383 C54.8556933,15.5369585 55.0716973,15.0461733 54.9022174,14.610239 C54.7327376,14.1743047 54.2419524,13.9583007 53.8060181,14.1277806 C53.8032127,14.1288712 53.8032127,14.1288712 53.8004064,14.1299616 C53.3644359,14.2993484 53.148327,14.7900873 53.3177137,15.2260578 C53.4871005,15.6620283 53.9778394,15.8781372 54.4138099,15.7087505 L54.4138099,15.7087505 Z" id="Shape"></path> + </g> + <g id="middle" transform="translate(0.000000, 33.900002)"> + <path d="M1.4471104,12.2885536 C1.45019313,12.2881864 1.45019313,12.2881864 1.45327704,12.2878189 C1.91768952,12.2324787 2.24930811,11.8111359 2.1939679,11.3467234 C2.13862768,10.8823109 1.71728488,10.5506923 1.2528724,10.6060325 C1.24980572,10.606398 1.24980572,10.606398 1.24674021,10.6067632 C0.782326598,10.6620939 0.450699376,11.0834299 0.506030077,11.5478435 C0.561360778,12.0122571 0.982696791,12.3438844 1.4471104,12.2885536 L1.4471104,12.2885536 Z M9.34858965,12.1304907 C9.35409106,12.129756 9.35409106,12.129756 9.35959333,12.129021 C10.1807278,12.019341 10.7574756,11.2647668 10.6477955,10.4436323 C10.5381155,9.62249789 9.78354128,9.04575011 8.96240683,9.15543014 C8.95693539,9.15616095 8.95693539,9.15616095 8.95146479,9.15689157 C8.13032806,9.26655457 7.55356464,10.0211168 7.66322765,10.8422536 C7.77289065,11.6633903 8.52745292,12.2401537 9.34858965,12.1304907 L9.34858965,12.1304907 Z M17.0066591,11.0493128 C17.0121188,11.0484984 17.0121188,11.0484984 17.0175789,11.0476838 C17.8369369,10.9254384 18.4020584,10.1621181 18.279813,9.34276003 C18.1575677,8.52340201 17.3942473,7.95828052 16.5748893,8.08052587 C16.5694641,8.08133528 16.5694641,8.08133528 16.5640392,8.08214454 C15.7446783,8.20437064 15.1795389,8.96767771 15.301765,9.78703861 C15.4239911,10.6063995 16.1872982,11.1715389 17.0066591,11.0493128 L17.0066591,11.0493128 Z M24.8225082,10.4232459 C24.8297684,10.4220275 24.8297684,10.4220275 24.8370287,10.420809 C25.922252,10.2386672 26.6543441,9.21126476 26.4722023,8.12604147 C26.2900605,7.04081818 25.2626581,6.30872601 24.1774348,6.49086783 C24.1702314,6.4920768 24.1702314,6.4920768 24.1630282,6.49328559 C23.0777996,6.67539597 22.3456777,7.70277717 22.5277881,8.78800574 C22.7098984,9.87323431 23.7372796,10.6053563 24.8225082,10.4232459 L24.8225082,10.4232459 Z M32.4725265,9.05261002 C33.5532108,8.84532993 34.2612448,7.80122849 34.0539647,6.72054419 C33.8466846,5.63985989 32.8025831,4.93182589 31.7218988,5.13910599 C31.714724,5.14048211 31.714724,5.14048211 31.7075489,5.14185807 C30.6268567,5.34909665 29.9187826,6.39317088 30.1260211,7.47386314 C30.3332597,8.5545554 31.3773339,9.26262952 32.4580262,9.05539095 C32.4652764,9.05400057 32.4652764,9.05400057 32.4725265,9.05261002 Z M39.8723866,6.89476879 C39.877909,6.8935242 39.877909,6.8935242 39.883431,6.89227947 C40.6915794,6.71010471 41.1990314,5.90728916 41.0168566,5.09914075 C40.8346818,4.29099233 40.0318663,3.78354042 39.2237179,3.96571517 C39.218279,3.96694116 39.218279,3.96694116 39.2128397,3.96816703 C38.404681,4.15029588 37.8971834,4.9530826 38.0793123,5.76124136 C38.2614411,6.56940012 39.0642279,7.07689764 39.8723866,6.89476879 L39.8723866,6.89476879 Z M47.4276119,5.00828445 C47.4329844,5.00678549 47.4329844,5.00678549 47.4383562,5.00528637 C48.2362896,4.78258973 48.7026111,3.95520561 48.4799145,3.15727221 C48.2572179,2.35933881 47.4298337,1.89301728 46.6319003,2.11571391 C46.6266645,2.1171751 46.6266645,2.1171751 46.6214279,2.11863616 C45.8234736,2.34125773 45.3570742,3.16859798 45.5796958,3.96655233 C45.8023173,4.76450667 46.6296576,5.23090603 47.4276119,5.00828445 L47.4276119,5.00828445 Z M54.419759,2.30643871 C54.8556933,2.13695884 55.0716973,1.6461737 54.9022174,1.21023941 C54.7327376,0.774305114 54.2419524,0.558301127 53.8060181,0.727780997 C53.8032127,0.728871549 53.8032127,0.728871549 53.8004064,0.729962021 C53.3644359,0.89934874 53.148327,1.39008772 53.3177137,1.82605822 C53.4871005,2.26202871 53.9778394,2.47813756 54.4138099,2.30875084 C54.4167849,2.30759485 54.4167849,2.30759485 54.419759,2.30643871 Z" id="Shape"></path> + <path d="M1.67760355,20.096503 C1.68306395,20.0958524 1.68306395,20.0958524 1.68852642,20.0952015 C2.51113381,19.9971782 3.09852524,19.2508595 3.00050189,18.4282521 C2.90247854,17.6056447 2.15615986,17.0182533 1.33355246,17.1162767 C1.3281205,17.1169239 1.3281205,17.1169239 1.3226906,17.1175709 C0.500081196,17.2155774 -0.0873255124,17.961884 0.0106809923,18.7844934 C0.108687497,19.6071028 0.854994145,20.1945095 1.67760355,20.096503 L1.67760355,20.096503 Z M9.34858965,19.1274206 C9.35409106,19.1266859 9.35409106,19.1266859 9.35959333,19.1259509 C10.1807278,19.0162709 10.7574756,18.2616967 10.6477955,17.4405622 C10.5381155,16.6194278 9.78354128,16.04268 8.96240683,16.15236 C8.95693539,16.1530908 8.95693539,16.1530908 8.95146479,16.1538215 C8.13032806,16.2634845 7.55356464,17.0180467 7.66322765,17.8391835 C7.77289065,18.6603202 8.52745292,19.2370836 9.34858965,19.1274206 L9.34858965,19.1274206 Z M17.1767435,18.6256231 C17.1839958,18.6245412 17.1839958,18.6245412 17.1912486,18.6234592 C18.27963,18.4610765 19.0303002,17.4471319 18.8679175,16.3587506 C18.7055348,15.2703693 17.6915903,14.519699 16.6032089,14.6820817 C16.5960024,14.6831569 16.5960024,14.6831569 16.5887964,14.6842319 C15.5004112,14.846589 14.7497172,15.8605159 14.9120743,16.948901 C15.0744314,18.0372862 16.0883584,18.7879802 17.1767435,18.6256231 L17.1767435,18.6256231 Z M24.8370287,17.4177379 C25.922252,17.2355961 26.6543441,16.2081937 26.4722023,15.1229704 C26.2900605,14.0377471 25.2626581,13.3056549 24.1774348,13.4877968 C24.1702314,13.4890057 24.1702314,13.4890057 24.1630282,13.4902145 C23.0777996,13.6723249 22.3456777,14.6997061 22.5277881,15.7849347 C22.7098984,16.8701632 23.7372796,17.6022852 24.8225082,17.4201748 C24.8297684,17.4189565 24.8297684,17.4189565 24.8370287,17.4177379 Z M32.4725265,16.061085 C33.5532108,15.853199 34.2612448,14.8060455 34.0539647,13.7222022 C33.8466846,12.6383589 32.8025831,11.9282552 31.7218988,12.1361412 C31.714724,12.1375214 31.714724,12.1375214 31.7075489,12.1389013 C30.6268567,12.3467457 29.9187826,13.3938719 30.1260211,14.4777232 C30.3332597,15.5615745 31.3773339,16.2717185 32.4580262,16.0638741 C32.4652764,16.0624797 32.4652764,16.0624797 32.4725265,16.061085 Z M40.0707225,14.4695476 C40.0780573,14.4678946 40.0780573,14.4678946 40.0853916,14.4662413 C41.158768,14.2242783 41.8327617,13.1579849 41.5907986,12.0846085 C41.3488355,11.011232 40.2825422,10.3372384 39.2091657,10.5792015 C39.2019419,10.5808298 39.2019419,10.5808298 39.1947175,10.582458 C38.1213273,10.8243601 37.447273,11.8906152 37.6891752,12.9640053 C37.9310773,14.0373955 38.9973324,14.7114498 40.0707225,14.4695476 L40.0707225,14.4695476 Z M47.4276119,12.0082845 C47.4329844,12.0067855 47.4329844,12.0067855 47.4383562,12.0052864 C48.2362896,11.7825897 48.7026111,10.9552056 48.4799145,10.1572722 C48.2572179,9.35933881 47.4298337,8.89301728 46.6319003,9.11571391 C46.6266645,9.1171751 46.6266645,9.1171751 46.6214279,9.11863616 C45.8234736,9.34125773 45.3570742,10.168598 45.5796958,10.9665523 C45.8023173,11.7645067 46.6296576,12.230906 47.4276119,12.0082845 L47.4276119,12.0082845 Z M54.8999721,9.57562965 C54.9052414,9.57358217 54.9052414,9.57358217 54.9105092,9.57153441 C55.6826371,9.27135123 56.0652239,8.40207131 55.7650408,7.62994336 C55.4648576,6.85781542 54.5955777,6.4752286 53.8234497,6.77541179 C53.8184808,6.77734338 53.8184808,6.77734338 53.8135101,6.77927482 C53.0413181,7.07929302 52.6585455,7.94849117 52.9585637,8.72068323 C53.2585819,9.4928753 54.12778,9.87564785 54.8999721,9.57562965 L54.8999721,9.57562965 Z" id="Shape"></path> + <path d="M1.45327704,26.6978168 C1.54647464,26.6867112 1.63432439,26.660866 1.7147722,26.6228911 C2.03520341,26.4716332 2.23820252,26.1279362 2.1939679,25.7567213 C2.13862768,25.2923089 1.71728488,24.9606903 1.2528724,25.0160305 C1.24980572,25.0163959 1.24980572,25.0163959 1.24674021,25.0167611 C0.782326598,25.0720918 0.450699376,25.4934278 0.506030077,25.9578415 C0.561360778,26.4222551 0.982696791,26.7538823 1.4471104,26.6985516 C1.45019313,26.6981843 1.45019313,26.6981843 1.45327704,26.6978168 Z M9.34858965,26.1274206 C9.35409106,26.1266859 9.35409106,26.1266859 9.35959333,26.1259509 C10.1807278,26.0162709 10.7574756,25.2616967 10.6477955,24.4405622 C10.5381155,23.6194278 9.78354128,23.04268 8.96240683,23.15236 C8.95693539,23.1530908 8.95693539,23.1530908 8.95146479,23.1538215 C8.13032806,23.2634845 7.55356464,24.0180467 7.66322765,24.8391835 C7.77289065,25.6603202 8.52745292,26.2370836 9.34858965,26.1274206 L9.34858965,26.1274206 Z M17.0066591,25.0462427 C17.0121188,25.0454283 17.0121188,25.0454283 17.0175789,25.0446136 C17.8369369,24.9223683 18.4020584,24.1590479 18.279813,23.3396899 C18.1575677,22.5203319 17.3942473,21.9552104 16.5748893,22.0774558 C16.5694641,22.0782652 16.5694641,22.0782652 16.5640392,22.0790744 C15.7446783,22.2013005 15.1795389,22.9646076 15.301765,23.7839685 C15.4239911,24.6033294 16.1872982,25.1684688 17.0066591,25.0462427 L17.0066591,25.0462427 Z M24.8225082,24.4201748 C24.8297684,24.4189565 24.8297684,24.4189565 24.8370287,24.4177379 C25.922252,24.2355961 26.6543441,23.2081937 26.4722023,22.1229704 C26.2900605,21.0377471 25.2626581,20.3056549 24.1774348,20.4877968 C24.1702314,20.4890057 24.1702314,20.4890057 24.1630282,20.4902145 C23.0777996,20.6723249 22.3456777,21.6997061 22.5277881,22.7849347 C22.7098984,23.8701632 23.7372796,24.6022852 24.8225082,24.4201748 L24.8225082,24.4201748 Z M32.4725265,23.0495399 C33.5532108,22.8422598 34.2612448,21.7981584 34.0539647,20.7174741 C33.8466846,19.6367898 32.8025831,18.9287558 31.7218988,19.1360359 C31.714724,19.137412 31.714724,19.137412 31.7075489,19.138788 C30.6268567,19.3460265 29.9187826,20.3901008 30.1260211,21.470793 C30.3332597,22.5514853 31.3773339,23.2595594 32.4580262,23.0523208 C32.4652764,23.0509305 32.4652764,23.0509305 32.4725265,23.0495399 Z M39.8723866,20.8947688 C39.877909,20.8935242 39.877909,20.8935242 39.883431,20.8922795 C40.6915794,20.7101047 41.1990314,19.9072892 41.0168566,19.0991407 C40.8346818,18.2909923 40.0318663,17.7835404 39.2237179,17.9657152 C39.218279,17.9669412 39.218279,17.9669412 39.2128397,17.968167 C38.404681,18.1502959 37.8971834,18.9530826 38.0793123,19.7612414 C38.2614411,20.5694001 39.0642279,21.0768976 39.8723866,20.8947688 L39.8723866,20.8947688 Z M47.4276119,19.0082845 C47.4329844,19.0067855 47.4329844,19.0067855 47.4383562,19.0052864 C48.2362896,18.7825897 48.7026111,17.9552056 48.4799145,17.1572722 C48.2572179,16.3593388 47.4298337,15.8930173 46.6319003,16.1157139 C46.6266645,16.1171751 46.6266645,16.1171751 46.6214279,16.1186362 C45.8234736,16.3412577 45.3570742,17.168598 45.5796958,17.9665523 C45.8023173,18.7645067 46.6296576,19.230906 47.4276119,19.0082845 L47.4276119,19.0082845 Z M54.4138099,15.7087505 C54.4167849,15.7075945 54.4167849,15.7075945 54.419759,15.7064383 C54.8556933,15.5369585 55.0716973,15.0461733 54.9022174,14.610239 C54.7327376,14.1743047 54.2419524,13.9583007 53.8060181,14.1277806 C53.8032127,14.1288712 53.8032127,14.1288712 53.8004064,14.1299616 C53.3644359,14.2993484 53.148327,14.7900873 53.3177137,15.2260578 C53.4871005,15.6620283 53.9778394,15.8781372 54.4138099,15.7087505 L54.4138099,15.7087505 Z" id="Shape"></path> + </g> + <g id="top"> + <path d="M1.4471104,12.2885536 C1.45019313,12.2881864 1.45019313,12.2881864 1.45327704,12.2878189 C1.91768952,12.2324787 2.24930811,11.8111359 2.1939679,11.3467234 C2.13862768,10.8823109 1.71728488,10.5506923 1.2528724,10.6060325 C1.24980572,10.606398 1.24980572,10.606398 1.24674021,10.6067632 C0.782326598,10.6620939 0.450699376,11.0834299 0.506030077,11.5478435 C0.561360778,12.0122571 0.982696791,12.3438844 1.4471104,12.2885536 L1.4471104,12.2885536 Z M9.34858965,12.1304907 C9.35409106,12.129756 9.35409106,12.129756 9.35959333,12.129021 C10.1807278,12.019341 10.7574756,11.2647668 10.6477955,10.4436323 C10.5381155,9.62249789 9.78354128,9.04575011 8.96240683,9.15543014 C8.95693539,9.15616095 8.95693539,9.15616095 8.95146479,9.15689157 C8.13032806,9.26655457 7.55356464,10.0211168 7.66322765,10.8422536 C7.77289065,11.6633903 8.52745292,12.2401537 9.34858965,12.1304907 L9.34858965,12.1304907 Z M17.0066591,11.0493128 C17.0121188,11.0484984 17.0121188,11.0484984 17.0175789,11.0476838 C17.8369369,10.9254384 18.4020584,10.1621181 18.279813,9.34276003 C18.1575677,8.52340201 17.3942473,7.95828052 16.5748893,8.08052587 C16.5694641,8.08133528 16.5694641,8.08133528 16.5640392,8.08214454 C15.7446783,8.20437064 15.1795389,8.96767771 15.301765,9.78703861 C15.4239911,10.6063995 16.1872982,11.1715389 17.0066591,11.0493128 L17.0066591,11.0493128 Z M24.8225082,10.4232459 C24.8297684,10.4220275 24.8297684,10.4220275 24.8370287,10.420809 C25.922252,10.2386672 26.6543441,9.21126476 26.4722023,8.12604147 C26.2900605,7.04081818 25.2626581,6.30872601 24.1774348,6.49086783 C24.1702314,6.4920768 24.1702314,6.4920768 24.1630282,6.49328559 C23.0777996,6.67539597 22.3456777,7.70277717 22.5277881,8.78800574 C22.7098984,9.87323431 23.7372796,10.6053563 24.8225082,10.4232459 L24.8225082,10.4232459 Z M32.4725265,9.05261002 C33.5532108,8.84532993 34.2612448,7.80122849 34.0539647,6.72054419 C33.8466846,5.63985989 32.8025831,4.93182589 31.7218988,5.13910599 C31.714724,5.14048211 31.714724,5.14048211 31.7075489,5.14185807 C30.6268567,5.34909665 29.9187826,6.39317088 30.1260211,7.47386314 C30.3332597,8.5545554 31.3773339,9.26262952 32.4580262,9.05539095 C32.4652764,9.05400057 32.4652764,9.05400057 32.4725265,9.05261002 Z M39.8723866,6.89476879 C39.877909,6.8935242 39.877909,6.8935242 39.883431,6.89227947 C40.6915794,6.71010471 41.1990314,5.90728916 41.0168566,5.09914075 C40.8346818,4.29099233 40.0318663,3.78354042 39.2237179,3.96571517 C39.218279,3.96694116 39.218279,3.96694116 39.2128397,3.96816703 C38.404681,4.15029588 37.8971834,4.9530826 38.0793123,5.76124136 C38.2614411,6.56940012 39.0642279,7.07689764 39.8723866,6.89476879 L39.8723866,6.89476879 Z M47.4276119,5.00828445 C47.4329844,5.00678549 47.4329844,5.00678549 47.4383562,5.00528637 C48.2362896,4.78258973 48.7026111,3.95520561 48.4799145,3.15727221 C48.2572179,2.35933881 47.4298337,1.89301728 46.6319003,2.11571391 C46.6266645,2.1171751 46.6266645,2.1171751 46.6214279,2.11863616 C45.8234736,2.34125773 45.3570742,3.16859798 45.5796958,3.96655233 C45.8023173,4.76450667 46.6296576,5.23090603 47.4276119,5.00828445 L47.4276119,5.00828445 Z M54.419759,2.30643871 C54.8556933,2.13695884 55.0716973,1.6461737 54.9022174,1.21023941 C54.7327376,0.774305114 54.2419524,0.558301127 53.8060181,0.727780997 C53.8032127,0.728871549 53.8032127,0.728871549 53.8004064,0.729962021 C53.3644359,0.89934874 53.148327,1.39008772 53.3177137,1.82605822 C53.4871005,2.26202871 53.9778394,2.47813756 54.4138099,2.30875084 C54.4167849,2.30759485 54.4167849,2.30759485 54.419759,2.30643871 Z" id="Shape"></path> + <path d="M1.67760355,20.096503 C1.68306395,20.0958524 1.68306395,20.0958524 1.68852642,20.0952015 C2.51113381,19.9971782 3.09852524,19.2508595 3.00050189,18.4282521 C2.90247854,17.6056447 2.15615986,17.0182533 1.33355246,17.1162767 C1.3281205,17.1169239 1.3281205,17.1169239 1.3226906,17.1175709 C0.500081196,17.2155774 -0.0873255124,17.961884 0.0106809923,18.7844934 C0.108687497,19.6071028 0.854994145,20.1945095 1.67760355,20.096503 L1.67760355,20.096503 Z M9.34858965,19.1274206 C9.35409106,19.1266859 9.35409106,19.1266859 9.35959333,19.1259509 C10.1807278,19.0162709 10.7574756,18.2616967 10.6477955,17.4405622 C10.5381155,16.6194278 9.78354128,16.04268 8.96240683,16.15236 C8.95693539,16.1530908 8.95693539,16.1530908 8.95146479,16.1538215 C8.13032806,16.2634845 7.55356464,17.0180467 7.66322765,17.8391835 C7.77289065,18.6603202 8.52745292,19.2370836 9.34858965,19.1274206 L9.34858965,19.1274206 Z M17.1767435,18.6256231 C17.1839958,18.6245412 17.1839958,18.6245412 17.1912486,18.6234592 C18.27963,18.4610765 19.0303002,17.4471319 18.8679175,16.3587506 C18.7055348,15.2703693 17.6915903,14.519699 16.6032089,14.6820817 C16.5960024,14.6831569 16.5960024,14.6831569 16.5887964,14.6842319 C15.5004112,14.846589 14.7497172,15.8605159 14.9120743,16.948901 C15.0744314,18.0372862 16.0883584,18.7879802 17.1767435,18.6256231 L17.1767435,18.6256231 Z M24.8370287,17.4177379 C25.922252,17.2355961 26.6543441,16.2081937 26.4722023,15.1229704 C26.2900605,14.0377471 25.2626581,13.3056549 24.1774348,13.4877968 C24.1702314,13.4890057 24.1702314,13.4890057 24.1630282,13.4902145 C23.0777996,13.6723249 22.3456777,14.6997061 22.5277881,15.7849347 C22.7098984,16.8701632 23.7372796,17.6022852 24.8225082,17.4201748 C24.8297684,17.4189565 24.8297684,17.4189565 24.8370287,17.4177379 Z M32.4725265,16.061085 C33.5532108,15.853199 34.2612448,14.8060455 34.0539647,13.7222022 C33.8466846,12.6383589 32.8025831,11.9282552 31.7218988,12.1361412 C31.714724,12.1375214 31.714724,12.1375214 31.7075489,12.1389013 C30.6268567,12.3467457 29.9187826,13.3938719 30.1260211,14.4777232 C30.3332597,15.5615745 31.3773339,16.2717185 32.4580262,16.0638741 C32.4652764,16.0624797 32.4652764,16.0624797 32.4725265,16.061085 Z M40.0707225,14.4695476 C40.0780573,14.4678946 40.0780573,14.4678946 40.0853916,14.4662413 C41.158768,14.2242783 41.8327617,13.1579849 41.5907986,12.0846085 C41.3488355,11.011232 40.2825422,10.3372384 39.2091657,10.5792015 C39.2019419,10.5808298 39.2019419,10.5808298 39.1947175,10.582458 C38.1213273,10.8243601 37.447273,11.8906152 37.6891752,12.9640053 C37.9310773,14.0373955 38.9973324,14.7114498 40.0707225,14.4695476 L40.0707225,14.4695476 Z M47.4276119,12.0082845 C47.4329844,12.0067855 47.4329844,12.0067855 47.4383562,12.0052864 C48.2362896,11.7825897 48.7026111,10.9552056 48.4799145,10.1572722 C48.2572179,9.35933881 47.4298337,8.89301728 46.6319003,9.11571391 C46.6266645,9.1171751 46.6266645,9.1171751 46.6214279,9.11863616 C45.8234736,9.34125773 45.3570742,10.168598 45.5796958,10.9665523 C45.8023173,11.7645067 46.6296576,12.230906 47.4276119,12.0082845 L47.4276119,12.0082845 Z M54.8999721,9.57562965 C54.9052414,9.57358217 54.9052414,9.57358217 54.9105092,9.57153441 C55.6826371,9.27135123 56.0652239,8.40207131 55.7650408,7.62994336 C55.4648576,6.85781542 54.5955777,6.4752286 53.8234497,6.77541179 C53.8184808,6.77734338 53.8184808,6.77734338 53.8135101,6.77927482 C53.0413181,7.07929302 52.6585455,7.94849117 52.9585637,8.72068323 C53.2585819,9.4928753 54.12778,9.87564785 54.8999721,9.57562965 L54.8999721,9.57562965 Z" id="Shape"></path> + <path d="M1.45327704,26.6978168 C1.54647464,26.6867112 1.63432439,26.660866 1.7147722,26.6228911 C2.03520341,26.4716332 2.23820252,26.1279362 2.1939679,25.7567213 C2.13862768,25.2923089 1.71728488,24.9606903 1.2528724,25.0160305 C1.24980572,25.0163959 1.24980572,25.0163959 1.24674021,25.0167611 C0.782326598,25.0720918 0.450699376,25.4934278 0.506030077,25.9578415 C0.561360778,26.4222551 0.982696791,26.7538823 1.4471104,26.6985516 C1.45019313,26.6981843 1.45019313,26.6981843 1.45327704,26.6978168 Z M9.34858965,26.1274206 C9.35409106,26.1266859 9.35409106,26.1266859 9.35959333,26.1259509 C10.1807278,26.0162709 10.7574756,25.2616967 10.6477955,24.4405622 C10.5381155,23.6194278 9.78354128,23.04268 8.96240683,23.15236 C8.95693539,23.1530908 8.95693539,23.1530908 8.95146479,23.1538215 C8.13032806,23.2634845 7.55356464,24.0180467 7.66322765,24.8391835 C7.77289065,25.6603202 8.52745292,26.2370836 9.34858965,26.1274206 L9.34858965,26.1274206 Z M17.0066591,25.0462427 C17.0121188,25.0454283 17.0121188,25.0454283 17.0175789,25.0446136 C17.8369369,24.9223683 18.4020584,24.1590479 18.279813,23.3396899 C18.1575677,22.5203319 17.3942473,21.9552104 16.5748893,22.0774558 C16.5694641,22.0782652 16.5694641,22.0782652 16.5640392,22.0790744 C15.7446783,22.2013005 15.1795389,22.9646076 15.301765,23.7839685 C15.4239911,24.6033294 16.1872982,25.1684688 17.0066591,25.0462427 L17.0066591,25.0462427 Z M24.8225082,24.4201748 C24.8297684,24.4189565 24.8297684,24.4189565 24.8370287,24.4177379 C25.922252,24.2355961 26.6543441,23.2081937 26.4722023,22.1229704 C26.2900605,21.0377471 25.2626581,20.3056549 24.1774348,20.4877968 C24.1702314,20.4890057 24.1702314,20.4890057 24.1630282,20.4902145 C23.0777996,20.6723249 22.3456777,21.6997061 22.5277881,22.7849347 C22.7098984,23.8701632 23.7372796,24.6022852 24.8225082,24.4201748 L24.8225082,24.4201748 Z M32.4725265,23.0495399 C33.5532108,22.8422598 34.2612448,21.7981584 34.0539647,20.7174741 C33.8466846,19.6367898 32.8025831,18.9287558 31.7218988,19.1360359 C31.714724,19.137412 31.714724,19.137412 31.7075489,19.138788 C30.6268567,19.3460265 29.9187826,20.3901008 30.1260211,21.470793 C30.3332597,22.5514853 31.3773339,23.2595594 32.4580262,23.0523208 C32.4652764,23.0509305 32.4652764,23.0509305 32.4725265,23.0495399 Z M39.8723866,20.8947688 C39.877909,20.8935242 39.877909,20.8935242 39.883431,20.8922795 C40.6915794,20.7101047 41.1990314,19.9072892 41.0168566,19.0991407 C40.8346818,18.2909923 40.0318663,17.7835404 39.2237179,17.9657152 C39.218279,17.9669412 39.218279,17.9669412 39.2128397,17.968167 C38.404681,18.1502959 37.8971834,18.9530826 38.0793123,19.7612414 C38.2614411,20.5694001 39.0642279,21.0768976 39.8723866,20.8947688 L39.8723866,20.8947688 Z M47.4276119,19.0082845 C47.4329844,19.0067855 47.4329844,19.0067855 47.4383562,19.0052864 C48.2362896,18.7825897 48.7026111,17.9552056 48.4799145,17.1572722 C48.2572179,16.3593388 47.4298337,15.8930173 46.6319003,16.1157139 C46.6266645,16.1171751 46.6266645,16.1171751 46.6214279,16.1186362 C45.8234736,16.3412577 45.3570742,17.168598 45.5796958,17.9665523 C45.8023173,18.7645067 46.6296576,19.230906 47.4276119,19.0082845 L47.4276119,19.0082845 Z M54.4138099,15.7087505 C54.4167849,15.7075945 54.4167849,15.7075945 54.419759,15.7064383 C54.8556933,15.5369585 55.0716973,15.0461733 54.9022174,14.610239 C54.7327376,14.1743047 54.2419524,13.9583007 53.8060181,14.1277806 C53.8032127,14.1288712 53.8032127,14.1288712 53.8004064,14.1299616 C53.3644359,14.2993484 53.148327,14.7900873 53.3177137,15.2260578 C53.4871005,15.6620283 53.9778394,15.8781372 54.4138099,15.7087505 L54.4138099,15.7087505 Z" id="Shape"></path> + </g> + </g> + </g> +</svg> diff --git a/scala3doc/resources/dotty_res/images/scala_logo.svg b/scala3doc/resources/dotty_res/images/scala_logo.svg new file mode 100644 index 000000000000..6196f9200f00 --- /dev/null +++ b/scala3doc/resources/dotty_res/images/scala_logo.svg @@ -0,0 +1,32 @@ +<svg width="125" height="26" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> + <defs> + <linearGradient id="a"> + <stop stop-color="#656565" offset="0"/> + <stop stop-color="#010101" offset="1"/> + </linearGradient> + <linearGradient y2="0.49648" y1="0.49648" xlink:href="#a" x2="0.998187" x1="0.001707" id="b"/> + <linearGradient y2="0.4982" y1="0.4982" xlink:href="#a" x2="0.998187" x1="0.001707" id="c"/> + <linearGradient id="d"> + <stop stop-color="#9f1c20" offset="0"/> + <stop stop-color="#ed2224" offset="1"/> + </linearGradient> + <linearGradient y2="0.50052" y1="0.50052" xlink:href="#d" x2="0.998187" x1="0.001707" id="e"/> + <linearGradient y2="0.5026" y1="0.5026" xlink:href="#d" x2="0.998187" x1="0.001707" id="f"/> + <linearGradient y2="0.5044" y1="0.5044" xlink:href="#d" x2="0.998187" x1="0.001707" id="g"/> + </defs> + <g> + <title>background + + + + Layer 1 + + + + + + + + + + \ No newline at end of file diff --git a/scala3doc/resources/dotty_res/scripts/common/component.js b/scala3doc/resources/dotty_res/scripts/common/component.js new file mode 100644 index 000000000000..085aec3fe42d --- /dev/null +++ b/scala3doc/resources/dotty_res/scripts/common/component.js @@ -0,0 +1,28 @@ +class Component { + constructor(props = {}) { + this.props = props; + this.prevProps = {}; + this.state = {}; + } + + setState(nextState, cb = () => {}) { + const prevState = { ...this.state }; + if (typeof nextState === "function") { + this.state = { + ...this.state, + ...nextState(this.state), + }; + } else { + this.state = { + ...this.state, + ...nextState, + }; + } + + cb(); + + if (this.render) { + this.render(); + } + } +} diff --git a/scala3doc/resources/dotty_res/scripts/common/utils.js b/scala3doc/resources/dotty_res/scripts/common/utils.js new file mode 100644 index 000000000000..b61edc52d665 --- /dev/null +++ b/scala3doc/resources/dotty_res/scripts/common/utils.js @@ -0,0 +1,38 @@ +const findRef = (searchBy, element = document) => + element.querySelector(searchBy); + +const findRefs = (searchBy, element = document) => + element ? [...element.querySelectorAll(searchBy)] : []; + +const withEvent = (element, listener, callback) => { + element && element.addEventListener(listener, callback); + return () => element && element.removeEventListener(listener, callback); +}; + +const init = (cb) => window.addEventListener("DOMContentLoaded", cb); + +const attachDOM = (element, html) => { + if (element) { + element.innerHTML = htmlToString(html); + } +}; + +const startsWith = (str, character) => str.charAt(0) === character; + +const htmlToString = (html) => { + if (Array.isArray(html)) { + return html.join(""); + } + return html; +}; + +const attachListeners = (elementsRefs, type, callback) => + elementsRefs.map((elRef) => withEvent(elRef, type, callback)); + +const getElementTextContent = (element) => (element ? element.textContent : ""); + +const getElementDescription = (elementRef) => + findRef(".documentableBrief", elementRef); + +const getElementNameRef = (elementRef) => + findRef(".documentableName", elementRef); diff --git a/scala3doc/resources/dotty_res/scripts/components/DocumentableList.js b/scala3doc/resources/dotty_res/scripts/components/DocumentableList.js new file mode 100644 index 000000000000..36ca79dcce53 --- /dev/null +++ b/scala3doc/resources/dotty_res/scripts/components/DocumentableList.js @@ -0,0 +1,150 @@ +class DocumentableList extends Component { + constructor(props) { + super(props); + + this.refs = { + tabs: findRefs(".section-tab[data-togglable]", findRef(".tabbedcontent")), + sections: findRefs("div[data-togglable]", findRef(".tabbedcontent")), + }; + + this.state = { + list: new List(this.refs.tabs, this.refs.sections), + }; + + this.render(this.props); + } + + toggleElementDatasetVisibility(condition, element) { + if (condition) { + element.dataset.visibility = true; + } else { + element.dataset.visibility = false; + } + } + + toggleDisplayStyles(condition, ref, onVisibleStyle) { + if (condition) { + ref.style.display = onVisibleStyle; + } else { + ref.style.display = "none"; + } + } + + render({ filter }) { + this.state.list.sectionsRefs.map((sectionRef) => { + const tabRef = this.state.list.getTabRefFromSectionRef(sectionRef); + + const isTabVisible = this.state.list + .getSectionListRefs(sectionRef) + .filter((listRef) => { + const isListVisible = this.state.list + .getSectionListElementsRefs(listRef) + .map((elementRef) => this.state.list.mapListElementRef(elementRef)) + .filter((elementData) => { + const isElementVisible = this.state.list.isElementVisible( + elementData, + filter + ); + + this.toggleDisplayStyles( + isElementVisible, + elementData.ref, + "table" + ); + this.toggleElementDatasetVisibility( + isElementVisible, + elementData.ref + ); + return isElementVisible; + }).length; + + this.toggleDisplayStyles(isListVisible, listRef, "block"); + + return isListVisible; + }).length; + + this.toggleDisplayStyles(isTabVisible, tabRef, "inline-block"); + }); + } +} + +class List { + filterTab(name) { + return name !== "Linear supertypes" && name !== "Known subtypes" && name !== "Type hierarchy" + } + + constructor(tabsRef, sectionRefs) { + this._tabsRef = tabsRef; + this._sectionRefs = sectionRefs; + } + + get tabsRefs() { + return this._tabsRef.filter((tabRef) => this.filterTab(this._getTogglable(tabRef))); + } + + get sectionsRefs() { + return this._sectionRefs.filter( (sectionRef) => this.filterTab(this._getTogglable(sectionRef))); + } + + getTabRefFromSectionRef(sectionRef) { + return this.tabsRefs.find( + (tabRef) => this._getTogglable(tabRef) === this._getTogglable(sectionRef) + ); + } + + getSectionListRefs(sectionRef) { + return findRefs(".documentableList", sectionRef); + } + + getSectionListElementsRefs(listRef) { + return findRefs(".documentableElement", listRef); + } + + mapListElementRef(elementRef) { + return { + ref: elementRef, + name: getElementTextContent(getElementNameRef(elementRef)), + description: getElementTextContent(getElementDescription(elementRef)), + }; + } + + isElementVisible(elementData, filter) { + if (!this._areFiltersFromElementSelected(elementData, filter)) { + return false; + } + return this._includesInputValue(elementData, filter); + } + + _includesInputValue = (elementData, filter) => { + if (elementData.name.includes(filter.value)) { + return true; + } + return elementData.description.includes(filter.value); + }; + + _areFiltersFromElementSelected(elementRef, filter) { + const dataset = this._getCorrectDatasetFromElement(elementRef); + return dataset.length + ? this._hasCorrespodingFilters(dataset, filter.filters) + : true; + } + + _hasCorrespodingFilters = (dataset, filters) => + dataset.every(([key, value]) => { + const filterGroup = filters[key]; + return this._splitByComma(value).every( + (val) => filterGroup && filterGroup[val].selected + ); + }); + + _getCorrectDatasetFromElement = (elementRef) => + Object.entries(elementRef.ref.dataset).filter(([key]) => + this._startsWithF(key) + ); + + _splitByComma = (str) => str.split(","); + + _startsWithF = (str) => startsWith(str, "f"); + + _getTogglable = (elementRef) => elementRef.dataset.togglable; +} diff --git a/scala3doc/resources/dotty_res/scripts/components/Filter.js b/scala3doc/resources/dotty_res/scripts/components/Filter.js new file mode 100644 index 000000000000..beb89c5fdbea --- /dev/null +++ b/scala3doc/resources/dotty_res/scripts/components/Filter.js @@ -0,0 +1,146 @@ +const defaultFilterGroup = { + FOrdering: { Alphabetical: true }, +}; + +class Filter { + constructor(value, filters, elementsRefs, init = false) { + this._init = init; + this._value = value; + this._elementsRefs = elementsRefs; + + this._filters = this._init ? this._withNewFilters() : filters; + } + + get value() { + return this._value; + } + + get filters() { + return this._filters; + } + + get elementsRefs() { + return this._elementsRefs; + } + + onFilterToggle(key, value) { + return new Filter( + this.value, + this._withToggledFilter(key, value), + this.elementsRefs + ); + } + + onGroupSelectionChange(key, isActive) { + return new Filter( + this.value, + this._withNewSelectionOfGroup(key, isActive), + this.elementsRefs + ); + } + + onInputValueChange(value) { + return new Filter( + value, + this._generateFiltersOnTyping(value), + this.elementsRefs + ); + } + + _generateFiltersOnTyping(value) { + return this.elementsRefs + .filter((elRef) => { + const name = getElementTextContent(getElementNameRef(elRef)); + const description = getElementTextContent(getElementDescription(elRef)); + + return name.includes(value) || description.includes(value); + }) + .map((elRef) => this._getDatasetWithF(elRef.dataset)) + .reduce((filtersObject, datasets) => { + datasets.map(([key, value]) => { + this._splitByComma(value).map((val) => { + filtersObject[key] = { + ...filtersObject[key], + [val]: { + ...filtersObject[key][val], + visible: true, + }, + }; + }); + }); + return filtersObject; + }, this._allFiltersAreHidden()); + } + + _allFiltersAreHidden() { + return Object.entries(this.filters).reduce( + (filters, [key, filterGroup]) => { + filters[key] = Object.keys(filterGroup).reduce( + (group, key) => ( + (group[key] = { ...filterGroup[key], visible: false }), group + ), + {} + ); + return filters; + }, + {} + ); + } + + _withNewSelectionOfGroup(key, isActive) { + return { + ...this.filters, + [key]: Object.keys(this.filters[key]).reduce( + (obj, filterKey) => ( + (obj[filterKey] = { + ...this.filters[key][filterKey], + ...(this.filters[key][filterKey].visible && { selected: isActive }), + }), + obj + ), + {} + ), + }; + } + + _withNewFilters() { + return this._elementsRefs.reduce((filtersObject, elementRef) => { + this._getDatasetWithF(elementRef.dataset).map(([key, value]) => + this._splitByComma(value).map((val) => { + if (!filtersObject[key]) { + filtersObject[key] = { [val]: { selected: true, visible: true } }; + } else { + filtersObject[key] = { + ...filtersObject[key], + [val]: filtersObject[key][val] ?? { + selected: true, + visible: true, + }, + }; + } + }) + ); + return filtersObject; + }, {}); + } + + _withToggledFilter(key, value) { + return { + ...this.filters, + [key]: { + ...this.filters[key], + [value]: { + ...this.filters[key][value], + selected: !this.filters[key][value].selected, + }, + }, + }; + } + + _splitByComma = (str) => str.split(","); + + _getDatasetWithF = (dataset) => + Object.entries(dataset).filter(([key]) => this._startsWithF(key)); + + _startsWithF = (str) => startsWith(str, "f"); +} diff --git a/scala3doc/resources/dotty_res/scripts/components/FilterBar.js b/scala3doc/resources/dotty_res/scripts/components/FilterBar.js new file mode 100644 index 000000000000..c030b372987d --- /dev/null +++ b/scala3doc/resources/dotty_res/scripts/components/FilterBar.js @@ -0,0 +1,65 @@ +class FilterBar extends Component { + constructor(props) { + super(props); + + this.refs = { + elements: findRefs(".documentableElement"), + filterBar: findRef(".documentableFilter"), + }; + + this.state = { + filter: new Filter("", {}, this.refs.elements, true), + isVisible: false, + }; + + this.inputComp = new Input({ onInputChange: this.onInputChange }); + this.listComp = new DocumentableList({ + filter: this.state.filter, + }); + this.filterGroupComp = new FilterGroup({ + filter: this.state.filter, + onFilterToggle: this.onFilterToggle, + onGroupSelectChange: this.onGroupSelectChange, + onFilterVisibilityChange: this.onFilterVisibilityChange, + }); + + this.render(); + } + + onInputChange = (value) => { + this.setState((prevState) => ({ + filter: prevState.filter.onInputValueChange(value), + })); + }; + + onGroupSelectChange = (key, isActive) => { + this.setState((prevState) => ({ + filter: prevState.filter.onGroupSelectionChange(key, isActive), + })); + }; + + onFilterVisibilityChange = () => { + this.setState((prevState) => ({ isVisible: !prevState.isVisible })); + }; + + onFilterToggle = (key, value) => { + this.setState((prevState) => ({ + filter: prevState.filter.onFilterToggle(key, value), + })); + }; + + render() { + if (this.refs.filterBar) { + if (this.state.isVisible) { + this.refs.filterBar.classList.add("active"); + } else { + this.refs.filterBar.classList.remove("active"); + } + } + + this.listComp.render({ filter: this.state.filter }); + this.filterGroupComp.render({ filter: this.state.filter }); + } +} + +init(() => new FilterBar()); diff --git a/scala3doc/resources/dotty_res/scripts/components/FilterGroup.js b/scala3doc/resources/dotty_res/scripts/components/FilterGroup.js new file mode 100644 index 000000000000..ad28bfee1e00 --- /dev/null +++ b/scala3doc/resources/dotty_res/scripts/components/FilterGroup.js @@ -0,0 +1,112 @@ +class FilterGroup extends Component { + constructor(props) { + super(props); + + this.filterToggleRef = findRef(".filterToggleButton"); + this.filterLowerContainerRef = findRef(".filterLowerContainer"); + + withEvent( + this.filterToggleRef, + "click", + this.props.onFilterVisibilityChange + ); + + this.render(this.props); + } + + onFilterClick = ({ + currentTarget: { + dataset: { key, value }, + }, + }) => { + this.props.onFilterToggle(key, value); + }; + + onSelectAllClick = ({ + currentTarget: { + dataset: { key }, + }, + }) => { + this.props.onGroupSelectChange(key, true); + }; + + onDeselectAllClick = ({ + currentTarget: { + dataset: { key }, + }, + }) => { + this.props.onGroupSelectChange(key, false); + }; + + attachFiltersClicks() { + const refs = findRefs( + "button.filterButtonItem", + this.filterLowerContainerRef + ); + attachListeners(refs, "click", this.onFilterClick); + } + + attachSelectingButtonsClicks() { + const selectAllRefs = findRefs( + "button.selectAll", + this.filterLowerContainerRef + ); + const deselectAllRefs = findRefs( + "button.deselectAll", + this.filterLowerContainerRef + ); + + attachListeners(selectAllRefs, "click", this.onSelectAllClick); + attachListeners(deselectAllRefs, "click", this.onDeselectAllClick); + } + + isActive(isActive) { + return isActive ? "active" : ""; + } + + isVisible(visible) { + return visible ? "visible" : ""; + } + + getSortedValues(values) { + return Object.entries(values).sort((a, b) => a[0].localeCompare(b[0])); + } + + getFilterGroup(title, values) { + return ` +
      +
      + ${title.substring(1)} +
      + + +
      +
      +
      + ${this.getSortedValues(values) + .map( + ([key, data]) => + `` + ) + .join(" ")} +
      +
      + `; + } + + render({ filter }) { + attachDOM( + this.filterLowerContainerRef, + Object.entries(filter.filters) + .filter(([key, values]) => Object.values(values).some((v) => v.visible)) + .map(([key, values]) => this.getFilterGroup(key, values)) + ); + + this.attachFiltersClicks(); + this.attachSelectingButtonsClicks(); + } +} diff --git a/scala3doc/resources/dotty_res/scripts/components/Input.js b/scala3doc/resources/dotty_res/scripts/components/Input.js new file mode 100644 index 000000000000..0c7a449fa1e7 --- /dev/null +++ b/scala3doc/resources/dotty_res/scripts/components/Input.js @@ -0,0 +1,18 @@ +class Input extends Component { + constructor(props) { + super(props); + + this.inputRef = findRef(".filterableInput"); + this.onChangeFn = withEvent(this.inputRef, "input", this.onInputChange); + } + + onInputChange = ({ currentTarget: { value } }) => { + this.props.onInputChange(value); + }; + + componentWillUnmount() { + if (this.onChangeFn) { + this.onChangeFn(); + } + } +} diff --git a/scala3doc/resources/dotty_res/scripts/diagram.js b/scala3doc/resources/dotty_res/scripts/diagram.js new file mode 100644 index 000000000000..7b93482a8cd5 --- /dev/null +++ b/scala3doc/resources/dotty_res/scripts/diagram.js @@ -0,0 +1,28 @@ +$("#inheritance-diagram").ready(function() { + if ($("svg#graph").children().length == 0) { + var dotNode = document.querySelector("#dot") + if (dotNode){ + var svg = d3.select("#graph"); + var inner = svg.append("g"); + + // Set up zoom support + var zoom = d3.zoom() + .on("zoom", function({transform}) { + inner.attr("transform", transform); + }); + svg.call(zoom); + + var render = new dagreD3.render(); + var g = graphlibDot.read(dotNode.text); + g.graph().rankDir = 'BT'; + g.nodes().forEach(function (v) { + g.setNode(v, { + labelType: "html", + label: g.node(v).label, + style: g.node(v).style + }); + }); + render(inner, g); + } + } +}) diff --git a/scala3doc/resources/dotty_res/scripts/hljs-scala3.js b/scala3doc/resources/dotty_res/scripts/hljs-scala3.js new file mode 100644 index 000000000000..af8c1620f9e6 --- /dev/null +++ b/scala3doc/resources/dotty_res/scripts/hljs-scala3.js @@ -0,0 +1,408 @@ +function highlightDotty(hljs) { + + // identifiers + const camelCaseId = /[a-z][$\w]*/ + const capitalizedId = /\b[A-Z][$\w]*\b/ + const alphaId = /[a-zA-Z$_][$\w]*/ + const op = /[^\s\w\d,"'()[\]{}]+/ + const id = new RegExp(`(${alphaId.source}((?<=_)${op.source})?|${op.source}|\`.*?\`)`) + + // numbers + const hexDigit = '[a-fA-F0-9]' + const hexNumber = `0[xX]${hexDigit}((${hexDigit}|_)*${hexDigit}+)?` + const decNumber = `0|([1-9]((\\d|_)*\\d)?)` + const exponent = `[eE][+-]?\\d((\\d|_)*\\d)?` + const floatingPointA = `(${decNumber})?\\.\\d((\\d|_)*\\d)?${exponent}[fFdD]?` + const floatingPointB = `${decNumber}${exponent}[fFdD]?` + const number = new RegExp(`(${hexNumber}|${floatingPointA}|${floatingPointB}|(${decNumber}[lLfFdD]?))`) + + // Regular Keywords + // The "soft" keywords (e.g. 'using') are added later where necessary + const alwaysKeywords = { + $pattern: /(\w+|\?=>|\?{1,3}|=>>|=>|<:|>:|_|<-|\.nn)/, + keyword: + 'abstract case catch class def do else enum export extends final finally for given '+ + 'if implicit import lazy match new object package private protected override return '+ + 'sealed then throw trait true try type val var while with yield =>> => ?=> <: >: _ ? <-', + literal: 'true false null this super', + built_in: '??? asInstanceOf isInstanceOf assert assertFail implicitly locally summon .nn' + } + const modifiers = 'abstract|final|implicit|override|private|protected|sealed' + + // End of class, enum, etc. header + const templateDeclEnd = /(\/[/*]|{|: *\n|\n(?! *(extends|with|derives)))/ + + // name + function titleFor(name) { + return { + className: 'title', + begin: `(?<=${name} )${id.source}` + } + } + + // all the keywords + soft keywords, separated by spaces + function withSoftKeywords(kwd) { + return { + $pattern: alwaysKeywords.$pattern, + keyword: kwd + ' ' + alwaysKeywords.keyword, + literal: alwaysKeywords.literal, + built_in: alwaysKeywords.built_in + } + } + + const PROBABLY_TYPE = { + className: 'type', + begin: capitalizedId, + relevance: 0 + } + + const NUMBER = { + className: 'number', + begin: number, + relevance: 0 + } + + const TPARAMS = { + begin: /\[/, end: /\]/, + keywords: { + $pattern: /<:|>:|[+-?_:]/, + keyword: '<: >: : + - ? _' + }, + contains: [ + hljs.C_BLOCK_COMMENT_MODE, + { + className: 'type', + begin: alphaId + }, + ], + relevance: 3 + } + + // Class or method parameters declaration + const PARAMS = { + className: 'params', + begin: /\(/, end: /\)/, + excludeBegin: true, + excludeEnd: true, + keywords: withSoftKeywords('inline using'), + contains: [ + hljs.C_BLOCK_COMMENT_MODE, + hljs.QUOTE_STRING_MODE, + NUMBER, + PROBABLY_TYPE + ] + } + + // (using T1, T2, T3) + const CTX_PARAMS = { + className: 'params', + begin: /\(using (?!\w+:)/, end: /\)/, + excludeBegin: false, + excludeEnd: true, + relevance: 5, + keywords: withSoftKeywords('using'), + contains: [ + PROBABLY_TYPE + ] + } + + // String interpolation + const SUBST = { + className: 'subst', + variants: [ + {begin: /\$[a-zA-Z_]\w*/}, + { + begin: /\${/, end: /}/, + contains: [ + NUMBER, + hljs.QUOTE_STRING_MODE + ] + } + ] + } + + const STRING = { + className: 'string', + variants: [ + hljs.QUOTE_STRING_MODE, + { + begin: '"""', end: '"""', + contains: [hljs.BACKSLASH_ESCAPE], + relevance: 10 + }, + { + begin: alphaId.source + '"', end: '"', + contains: [hljs.BACKSLASH_ESCAPE, SUBST], + illegal: /\n/, + relevance: 5 + }, + { + begin: alphaId.source + '"""', end: '"""', + contains: [hljs.BACKSLASH_ESCAPE, SUBST], + relevance: 10 + } + ] + } + + // Class or method apply + const APPLY = { + begin: /\(/, end: /\)/, + excludeBegin: true, excludeEnd: true, + keywords: { + $pattern: alwaysKeywords.$pattern, + keyword: 'using ' + alwaysKeywords.keyword, + literal: alwaysKeywords.literal, + built_in: alwaysKeywords.built_in + }, + contains: [ + STRING, + NUMBER, + hljs.C_BLOCK_COMMENT_MODE, + PROBABLY_TYPE, + ] + } + + // @annot(...) or @my.package.annot(...) + const ANNOTATION = { + className: 'meta', + begin: `@${id.source}(\\.${id.source})*`, + contains: [ + APPLY, + hljs.C_BLOCK_COMMENT_MODE + ] + } + + // Documentation + const SCALADOC = hljs.COMMENT('/\\*\\*', '\\*/', { + contains: [ + { + className: 'doctag', + begin: /@[a-zA-Z]+/ + }, + // markdown syntax elements: + { + className: 'code', + variants: [ + {begin: /```.*\n/, end: /```/}, + {begin: /`/, end: /`/} + ], + }, + { + className: 'bold', + variants: [ + {begin: /\*\*/, end: /\*\*/}, + {begin: /__/, end: /__/} + ], + }, + { + className: 'emphasis', + variants: [ + {begin: /\*(?![\*\s/])/, end: /\*/}, + {begin: /_/, end: /_/} + ], + }, + { + className: 'bullet', // list item + begin: /- (?=\S)/, end: /\s/, + }, + { + className: 'link', + begin: /(?<=\[.*?\])\(/, end: /\)/, + } + ] + }) + + // Methods + const METHOD = { + className: 'function', + begin: `((${modifiers}|transparent|inline) +)*def`, end: / =|\n/, + excludeEnd: true, + relevance: 5, + keywords: withSoftKeywords('inline transparent'), + contains: [ + hljs.C_LINE_COMMENT_MODE, + hljs.C_BLOCK_COMMENT_MODE, + titleFor('def'), + TPARAMS, + CTX_PARAMS, + PARAMS, + PROBABLY_TYPE + ] + } + + // Variables & Constants + const VAL = { + beginKeywords: 'val var', end: /[=:;\n]/, + excludeEnd: true, + contains: [ + hljs.C_LINE_COMMENT_MODE, + hljs.C_BLOCK_COMMENT_MODE, + titleFor('(val|var)') + ] + } + + // Type declarations + const TYPEDEF = { + className: 'typedef', + begin: `((${modifiers}|opaque) +)*type`, end: /[=;\n]/, + excludeEnd: true, + keywords: withSoftKeywords('opaque'), + contains: [ + hljs.C_LINE_COMMENT_MODE, + hljs.C_BLOCK_COMMENT_MODE, + titleFor('type'), + PROBABLY_TYPE + ] + } + + // Given instances (for the soft keyword 'as') + const GIVEN = { + begin: /given/, end: /[=;\n]/, + excludeEnd: true, + keywords: 'as given using', + contains: [ + hljs.C_LINE_COMMENT_MODE, + hljs.C_BLOCK_COMMENT_MODE, + titleFor('given'), + PARAMS, + PROBABLY_TYPE + ] + } + + // Extension methods + const EXTENSION = { + begin: /extension/, end: /(\n|def)/, + returnEnd: true, + keywords: 'extension implicit using', + contains: [ + hljs.C_LINE_COMMENT_MODE, + hljs.C_BLOCK_COMMENT_MODE, + CTX_PARAMS, + PARAMS, + PROBABLY_TYPE + ] + } + + // 'end' soft keyword + const END = { + begin: `end(?= (if|while|for|match|try|given|extension|this|val|${id.source})\\n)`, end: /\s/, + keywords: 'end' + } + + // Classes, traits, enums, etc. + const EXTENDS_PARENT = { + begin: ' extends ', end: /( with | derives |\/[/*])/, + endsWithParent: true, + returnEnd: true, + keywords: 'extends', + contains: [APPLY, PROBABLY_TYPE] + } + const WITH_MIXIN = { + begin: ' with ', end: / derives |\/[/*]/, + endsWithParent: true, + returnEnd: true, + keywords: 'with', + contains: [APPLY, PROBABLY_TYPE], + relevance: 10 + } + const DERIVES_TYPECLASS = { + begin: ' derives ', end: /\n|\/[/*]/, + endsWithParent: true, + returnEnd: true, + keywords: 'derives', + contains: [PROBABLY_TYPE], + relevance: 10 + } + + const CLASS = { + className: 'class', + begin: `((${modifiers}|open|case) +)*class|trait|enum|object|package object`, end: templateDeclEnd, + keywords: withSoftKeywords('open'), + contains: [ + hljs.C_LINE_COMMENT_MODE, + hljs.C_BLOCK_COMMENT_MODE, + titleFor('(class|trait|object|enum)'), + TPARAMS, + CTX_PARAMS, + PARAMS, + EXTENDS_PARENT, + WITH_MIXIN, + DERIVES_TYPECLASS, + PROBABLY_TYPE + ] + } + + // Case in enum + const ENUM_CASE = { + begin: /case (?!.*=>)/, end: /\n/, + keywords: 'case', + excludeEnd: true, + contains: [ + hljs.C_LINE_COMMENT_MODE, + hljs.C_BLOCK_COMMENT_MODE, + { + // case A, B, C + className: 'title', + begin: `(?<=(case|,) *)${id.source}` + }, + PARAMS, + EXTENDS_PARENT, + WITH_MIXIN, + DERIVES_TYPECLASS, + PROBABLY_TYPE + ] + } + + // Case in pattern matching + const MATCH_CASE = { + begin: /case/, end: /=>/, + keywords: 'case', + excludeEnd: true, + contains: [ + hljs.C_LINE_COMMENT_MODE, + hljs.C_BLOCK_COMMENT_MODE, + { + begin: /[@_]/, + keywords: { + $pattern: /[@_]/, + keyword: '@ _' + } + }, + NUMBER, + STRING, + PROBABLY_TYPE + ] + } + + // inline someVar[andMaybeTypeParams] match + const INLINE_MATCH = { + begin: /inline [^\n:]+ match/, + keywords: 'inline match' + } + + return { + name: 'Scala3', + aliases: ['scala', 'dotty'], + keywords: alwaysKeywords, + contains: [ + NUMBER, + STRING, + SCALADOC, + hljs.C_LINE_COMMENT_MODE, + hljs.C_BLOCK_COMMENT_MODE, + METHOD, + VAL, + TYPEDEF, + CLASS, + GIVEN, + EXTENSION, + ANNOTATION, + ENUM_CASE, + MATCH_CASE, + INLINE_MATCH, + END, + APPLY, + PROBABLY_TYPE + ] + } +} diff --git a/scala3doc/resources/dotty_res/scripts/ux.js b/scala3doc/resources/dotty_res/scripts/ux.js new file mode 100644 index 000000000000..e4b19bdf8023 --- /dev/null +++ b/scala3doc/resources/dotty_res/scripts/ux.js @@ -0,0 +1,11 @@ +window.addEventListener("DOMContentLoaded", () => { + var e = document.getElementById("leftToggler"); + if (e) { + e.onclick = function () { + document.getElementById("leftColumn").classList.toggle("open"); + }; + } + hljs.registerLanguage("scala", highlightDotty); + hljs.registerAliases(["dotty", "scala3"], "scala"); + hljs.initHighlighting(); +}); diff --git a/scala3doc/resources/dotty_res/styles/diagram.css b/scala3doc/resources/dotty_res/styles/diagram.css new file mode 100644 index 000000000000..afa6e405a65a --- /dev/null +++ b/scala3doc/resources/dotty_res/styles/diagram.css @@ -0,0 +1,20 @@ +.node { + stroke: #333; + stroke-width: 2.5px; + fill: white; +} + +.edgeLabel { + fill: white; +} + +.edgePath { + stroke: #333; + stroke-width: 1.5px; + fill: none; +} + +#graph { + width: 100%; + height: 80%; +} diff --git a/scala3doc/resources/dotty_res/styles/dotty-icons.css b/scala3doc/resources/dotty_res/styles/dotty-icons.css new file mode 100644 index 000000000000..bfe6d0ed2484 --- /dev/null +++ b/scala3doc/resources/dotty_res/styles/dotty-icons.css @@ -0,0 +1,61 @@ +@font-face { + font-family: 'dotty-icons'; + src: + url('../fonts/dotty-icons.woff?kefi7x') format('woff'), + url('../fonts/dotty-icons.ttf?kefi7x') format('truetype'); + font-weight: normal; + font-style: normal; + font-display: block; +} + +[class^="icon-"], [class*=" icon-"] { + /* use !important to prevent issues with browser extensions that change fonts */ + font-family: 'dotty-icons' !important; + speak: never; + font-style: normal; + font-weight: normal; + font-variant: normal; + text-transform: none; + line-height: 1; + + /* Better Font Rendering */ + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +.icon-git:before { + content: "\e908"; +} +.icon-clear:before { + content: "\e900"; +} +.icon-content_copy:before { + content: "\e90b"; +} +.icon-create:before { + content: "\e907"; +} +.icon-link:before { + content: "\e901"; +} +.icon-vertical_align_top:before { + content: "\e902"; +} +.icon-keyboard_arrow_down:before { + content: "\e903"; +} +.icon-keyboard_arrow_right:before { + content: "\e904"; +} +.icon-keyboard_arrow_up:before { + content: "\e905"; +} +.icon-menu:before { + content: "\e90a"; +} +.icon-check_circle:before { + content: "\e909"; +} +.icon-search:before { + content: "\e906"; +} diff --git a/scala3doc/resources/dotty_res/styles/filter-bar.css b/scala3doc/resources/dotty_res/styles/filter-bar.css new file mode 100644 index 000000000000..f8ed5eb9d94d --- /dev/null +++ b/scala3doc/resources/dotty_res/styles/filter-bar.css @@ -0,0 +1,137 @@ +.documentableFilter { + padding: 24px 12px; + background-color: var(--leftbar-bg); +} + +.documentableFilter.active .filterToggleButton svg { + transform: rotate(90deg); +} + +.documentableFilter.active .filterLowerContainer { + display: block; +} + +.filterUpperContainer { + display: flex; + align-items: center; +} + +.filterToggleButton { + padding: 0; + outline: 0; + border: 0; + background-color: transparent; + cursor: pointer; + transition: width 0.2s ease-in-out; +} + +.filterToggleButton svg { + fill: var(--code-bg); + transition: fill 0.1s ease-in, transform 0.1s ease-in-out; +} + +.filterToggleButton:hover svg, +.filterToggleButton:focus svg { + fill: var(--active-tab-color); +} + +.filterableInput { + padding: 6px 4px; + flex: 1; + outline: 0; + border: 0; + border-radius: 3px; + background-color: var(--code-bg); +} + +.filterLowerContainer { + padding-top: 30px; + display: none; +} + +.filterGroup { + display: flex; + margin-bottom: 16px; +} + +.filterList { + margin-left: 10px; +} + +.filterButtonItem { + display: none; + padding: 6px 16px; + margin-bottom: 6px; + margin-right: 6px; + outline: 0; + border: 0; + border-radius: 3px; + color: var(--leftbar-bg); + background-color: var(--code-bg); + font-size: 12px; + font-weight: 700; + cursor: pointer; + border-bottom: 2px solid var(--inactive-fg); + transition: all 0.1s ease-in; +} + +.filterButtonItem:hover, +.filterButtonItem:focus { + opacity: 0.7; +} + +.filterButtonItem.active { + color: var(--code-bg); + border-bottom-color: var(--link-fg); + background-color: var(--active-tab-color); +} + +.filterButtonItem.visible { + display: inline-block; +} + +.groupTitle { + min-width: 98px; + margin-top: 4px; + font-weight: 700; + font-size: 14px; + color: var(--code-bg); +} + +.groupButtonsContainer { + display: flex; + align-items: center; + margin-top: 4px; +} + +.selectAll { + margin-right: 4px; +} + +.selectAll, +.deselectAll { + outline: 0; + border: 0; + background-color: transparent; + padding: 0; + color: var(--code-bg); + font-size: 8px; + cursor: pointer; + transition: all 0.1s ease-in; +} + +.selectAll { + padding: 4px; + border-radius: 2px; + background-color: var(--active-tab-color); +} + +.selectAll:hover, +.selectAll:focus { + opacity: 0.7; +} + +.deselectAll:hover, +.deselectAll:focus { + color: var(--active-tab-color); +} diff --git a/scala3doc/resources/dotty_res/styles/nord-light.css b/scala3doc/resources/dotty_res/styles/nord-light.css new file mode 100644 index 000000000000..71eac33c4930 --- /dev/null +++ b/scala3doc/resources/dotty_res/styles/nord-light.css @@ -0,0 +1,44 @@ +/* Theme inspired by nordtheme. The colors have been darkened to work on light backgrounds. */ +pre, .hljs { + background: #F4F5FA; + color: #4C566A; +} + +.hljs-comment { + color: #90A1C1; +} +.hljs-doctag { + color: #4B6B92; + font-weight: 500; +} +.hljs-emphasis { + font-style: italic; +} +.hljs-bold { + font-weight: bold; +} + +.hljs-meta { + color: #F9A600; + font-weight: 500; +} +.hljs-subst { + color: #F9A600; +} +.hljs-title { + color: #2B8FAC; + font-weight: 500; +} +.hljs-type { + color: #1E7C7A; +} +.hljs-keyword { + color: #2E6BB8; + font-weight: 500; +} +.hljs-string { + color: #6AA13B; +} +.hljs-built_in, .hljs-number, .hljs-literal { + color: #9D5490; +} diff --git a/scala3doc/resources/dotty_res/styles/scalastyle.css b/scala3doc/resources/dotty_res/styles/scalastyle.css new file mode 100644 index 000000000000..3674f4cae339 --- /dev/null +++ b/scala3doc/resources/dotty_res/styles/scalastyle.css @@ -0,0 +1,586 @@ +@import url('https://fonts.googleapis.com/css2?family=Fira+Code:wght@400;500&family=Lato:wght@400;700&family=Roboto+Slab:wght@700&display=swap'); + +:root { + /* Color Settings */ + --border-light: #DADFE6; + --border-medium: #abc; + + --code-bg: #F4F5FA; + --symbol-fg: #333; + --link-fg: #00607D; + --link-hover-fg: #00A0D0; + --inactive-fg: #777; + --title-fg: #00485E; + + --leftbar-bg: #003048; + --leftbar-fg: #fff; + --leftbar-current-bg: #0090BB; + --leftbar-current-fg: #fff; + --leftbar-hover-bg: #00485E; + --leftbar-hover-fg: #fff; + --logo-fg: var(--leftbar-fg); + + --icon-color: #00485E; + --active-tab-color: #00A0D0; + --selected-fg: #00303E; + --selected-bg: #BFE7F3; + + /* Font Settings */ + --mono-font: "Fira Code", monospace; + --text-font: "Lato", sans-serif; + --title-font: "Roboto Slab", serif; + + /* Layout Settings (changes on small screens) */ + --side-width: 255px; + --content-padding: 24px 42px; + --footer-height: 42px; +} + +body { + margin: 0; + padding: 0; + font-family: "Lato", sans-serif; + font-size: 16px; +} + +/* Page layout */ +#container { + min-height: 100%; +} +#leftColumn { + position: fixed; + width: var(--side-width); + height: 100%; + border-right: none; + background: var(--leftbar-bg); + display: flex; + flex-direction: column; + z-index: 5; +} +main { + min-height: calc(100vh - var(--footer-height) - 24px); +} +#content { + margin-left: var(--side-width); + padding: var(--content-padding); + padding-bottom: calc(24px + var(--footer-height)); +} + +/* Text */ +h1, h2, h3 { + font-family: var(--title-font); + color: var(--title-fg); +} +pre, code, .monospace, .hljs { + font-family: var(--mono-font); + background: var(--code-bg); + font-variant-ligatures: none; +} +code { + font-size: .8em; + padding: 0 .3em; +} +pre code, pre code.hljs { + font-size: 1em; + padding: 0; +} +pre, .symbol.monospace { + padding: 10px 8px 10px 12px; + font-weight: 500; + font-size: 12px; +} +a, a:visited, span[data-unresolved-link] { + text-decoration: none; + color: var(--link-fg); +} +a:hover, a:active { + color: var(--link-hover-fg); + text-decoration: underline; +} + +/* Tables */ +table { + border-collapse: collapse; + min-width: 400px; +} +td, th { + border: 1px solid var(--border-medium); + padding: .5rem; +} +th { + border-bottom: 2px solid var(--border-medium); +} + +/* Left bar toggler, only on small screens */ +#leftToggler { + display: none; + color: var(--icon-color); + cursor: pointer; +} + +/* Left bar */ +#sideMenu { + overflow-y: auto; + scrollbar-width: thin; + height: 100%; + font-size: 15px; +} +#logo { + background-size: contain; + background-repeat: no-repeat; + background-image: url(../images/dotty-logo-white.svg); + background-origin: content-box; + padding: 8px 0 8px 16px; + height: 42px; +} +#logo::after { + color: var(--leftbar-fg); + font-size: 22px; + content: "Scala3doc"; + margin-left: 42px; + line-height: 42px; +} +.sideMenuPart { + padding-left: 1em; +} +.sideMenuPart a { + align-items: center; + flex: 1; + overflow-x: hidden; + overflow-wrap: anywhere; + color: var(--leftbar-fg); + margin-right: .5rem; +} +.sideMenuPart > .overview { + display: flex; + align-items: center; + position: relative; + user-select: none; + padding: 7px 0; +} +.sideMenuPart > .overview::before { + width: var(--side-width); + box-sizing: border-box; + content: ''; + top: 0; + right: 0; + bottom: 0; + position: absolute; + z-index: 1; +} +.sideMenuPart > .overview:hover::before { + background: var(--leftbar-hover-bg); +} +.sideMenuPart > .overview:hover > a { + color: var(--leftbar-hover-fg); +} +.sideMenuPart[data-active] > .overview::before { + background: var(--leftbar-current-bg); +} +.sideMenuPart[data-active] > .overview > a { + font-weight: bold; + color: var(--leftbar-current-fg); +} +.sideMenuPart.hidden > .sideMenuPart { + height: 0; + visibility: hidden; +} +.overview a, .overview .navButton { + z-index: 3; +} +.sideMenuPart .navButton { + height: 100%; + align-items: center; + display: flex; + justify-content: flex-end; + padding-right: 1rem; + cursor: pointer; +} +.sideMenuPart .navButtonContent::before { + content: url("../images/arrow_down.svg"); + height: 100%; + display: flex; + flex-direction: row; + align-items: center; + justify-content: center; + transform: rotate(180deg); +} +.sideMenuPart.hidden .navButtonContent::before { + transform: rotate(0deg); +} + +/* Search */ +.search-content { + /* TODO override search style without !important */ + padding: 0 !important; + margin: var(--content-padding) !important; + position: absolute !important; + top: 0 !important; + right: 0 !important; + z-index: 5 !important; + background: none !important; +} +.search button { + background: none !important; + fill: var(--icon-color) !important; + cursor: pointer !important; +} +.search button:hover { + fill: var(--link-hover-fg) !important; +} +.search button svg { + width: 24px !important; + height: 24px !important; +} +.popup-wrapper { /* search popup */ + box-shadow: 0 0 10px var(--border-light) !important; + border: 2px solid var(--border-light) !important; + font-family: var(--mono-font) !important; + /* TODO dokka already uses !important so I can't override that... + min-width: calc(100% - var(--side-width) - 36px) !important; + left: 24px; + */ +} +.popup-wrapper .indented { + text-indent: 1.5em !important; +} +.popup-wrapper .disabled { + color: var(--inactive-fg) !important; + font-weight: 500 !important; +} +.action_def:hover, .action_def:focus-within { + color: var(--selected-fg); + background: var(--selected-bg) !important; + font-weight: 500; +} + +/* Cover */ +.cover h1 { + font-size: 38px; + margin-top: 1rem; + margin-bottom: .25rem; +} + +/* Tabs */ +.section-tab { + border: none; + outline: none; + background: transparent; + padding: 0 6px 4px 6px; + margin: 1rem 1rem 0 0; + border-bottom: 1px solid grey; + cursor: pointer; +} +.section-tab[data-active=""] { + color: unset; + font-weight: bold; + border-bottom: 2px solid var(--active-tab-color); +} +.tabs-section-body > :not([data-active]) { + display: none; +} + +/* Tabs content */ +.table { + /*! display: flex; */ + flex-direction: column; +} +.table-row { + border-bottom: 2px solid var(--border-light); + padding: 8px 24px 8px 0; +} +.main-subrow { + margin-bottom: .5em; +} +.main-subrow > span > a, .main-subrow > span > span[data-unresolved-link] { + text-decoration: none; + font-style: normal; + font-weight: bold; + color: unset; + font-size: 18px; +} +.main-subrow .anchor-icon { /* Link Anchor */ + margin-left: .25rem; + opacity: 0; + transition: 0.2s 0.5s; + cursor: pointer; +} +.main-subrow .anchor-icon > svg { + margin-bottom: -5px; + fill: var(--link-fg); +} +.main-subrow:hover .anchor-icon { + opacity: 1; + transition: 0.2s; +} +.brief-with-platform-tags ~ .main-subrow { + padding-top: 0; +} + +.brief { + white-space: pre-wrap; + overflow: hidden; + margin-bottom: .5em; +} +/* Declarations */ +.symbol.monospace { + color: var(--symbol-fg); + display: block; + white-space: normal; + position: relative; + padding-right: 24px; /* avoid the copy button */ + margin: 1em 0; +} +.symbol .top-right-position { + position: absolute; + top: 8px; + right: 8px; +} +/* "copy to clipboard" button */ +.copy-popup-wrapper { + display: none; + position: absolute; + z-index: 1000; + background: white; + width: max-content; + cursor: default; + border: 1px solid var(--border-light); + box-sizing: border-box; + box-shadow: 0px 5px 10px var(--border-light); + border-radius: 3px; + font-weight: normal; +} +.copy-popup-wrapper.active-popup { + display: flex; + align-items: center; +} +.copy-popup-wrapper.popup-to-left { + left: -14rem; +} +.copy-popup-wrapper svg { + padding: 8px; +} +.copy-popup-wrapper:last-child { + padding-right: 14px; +} + +/* Lists of definitions, e.g. doc @tags */ +dl { + background: transparent; + -webkit-box-shadow: none; + box-shadow: none; +} +dl > div > ol { + list-style-type: none; +} + +dl.attributes > dt { + display: block; + float: left; + font-style: italic; + font-weight: bold; +} +dl.attributes > dt.implicit { + font-weight: bold; + color: darkgreen; +} +dl.attributes > dd { + display: block; + padding-left: 10em; + margin-bottom: 5px; + min-height: 15px; +} + +/* params list documentation */ +dl.paramsdesc { + display: flex; + flex-flow: row wrap; +} +dl.paramsdesc dt { + flex-basis: 20%; + padding: 2px 0; + text-align: left; + font-weight: bold; +} +dl.paramsdesc dd { + flex-basis: 80%; + flex-grow: 1; + margin: 0; + padding: 2px 0; +} + +.platform-dependent-row dl.attributes > dd { + padding-left: 3em; +} + +/* Workaround for dynamically rendered content inside hidden tab. +There's some limitation of css/html that causes wrong width/height property of elements that are rendered dynamically inside element with display:none; +Same solution is already used in Dokka. +*/ +.platform-hinted[data-togglable="Type hierarchy"] > .content:not([data-active]), +.tabs-section-body > *[data-togglable="Type hierarchy"]:not([data-active]) { + display: block !important; + visibility: hidden; + height: 0; + position: fixed; + top: 0; +} + + +/* Footer */ +footer { + display: flex; + align-items: center; + position: relative; + margin-top: 1rem; + margin-left: var(--side-width); + width: calc(100% - var(--side-width)); + min-height: var(--footer-height); + border-top: 1px solid var(--border-light); +} +footer span.go-to-top-icon { + background-color: white; +} +footer > span:first-child { + margin-left: 24px; + padding-left: 0; +} +footer > span:last-child { + margin-right: 24px; + padding-right: 0; +} +footer > span { + padding: 0 16px; +} +footer .padded-icon { + padding-left: 0.5em; +} +footer .pull-right { + margin-left: auto; +} + +.modifiers { + width: 12em; + display: table-cell; + text-align: right; + padding-right: 0.5em; +} + +.documentableElement { + width: 100%; + color: var(--symbol-fg); + white-space: normal; + position: relative; + margin: 0; + padding: 5px 4px 5px 4px; + font-weight: 500; + font-size: 12px; + background: var(--code-bg); +} + +.documentableElement>div { + display: table; +} + +.annotations { + margin-left: 9em; +} +.documentableAnchor { + position: absolute; +} + +.documentableBrief { + font-size: 14px; +} + +.documentableBrief>p{ + margin: .5em 0 0 0; +} + +/* Large Screens */ +@media(min-width: 1100px) { + :root { + --content-padding: 24px 64px; + } +} +/* Landscape phones, portait tablets */ +@media(max-width: 768px) { + :root { + --content-padding: 12px 12px; + } + .cover h1 { + font-size: 32px; + } + table { + width: 100%; + } + pre, .symbol.monospace { + overflow-x: auto; + } + .symbol .top-right-position { + /* The "copy content" button doesn't work well with overflow-x */ + display: none; + } + footer > span:first-child { + margin-left: 12px; + } + footer > span:last-child { + margin-right: 12px; + } +} +/* Portrait phones */ +@media(max-width: 576px) { + :root { + --side-width: 0; + --content-padding: 48px 12px; + } + + /* Togglable left column */ + #leftColumn { + --side-width: 85vw; + margin-left: -85vw; /* closed by default */ + transition: margin .25s ease-out; + } + #leftColumn.open { + margin-left: 0; + } + #leftColumn.open ~ #main #searchBar { + display: none; + } + + #leftToggler { + display: unset; + position: absolute; + top: 5px; + left: 12px; + z-index: 5; + font-size: 30px; + } + #leftColumn.open ~ #main #leftToggler { + left: unset; + right: 5vw; + } + .icon-toggler::before { + content: "\e90a"; /* menu icon */ + } + #leftColumn.open ~ #main .icon-toggler::before { + content: "\e900"; /* clear icon */ + } + /* --- */ + + .search-content { + margin: 0 !important; + top: 9px !important; + right: 12px !important; + } + .cover h1 { + margin-top: 0; + } + .table-row { + padding-right: 0; + } + .main-subrow .anchor-icon { + display: none; + } +} + diff --git a/scala3doc/src/dotty/dokka/DottyDokkaConfig.scala b/scala3doc/src/dotty/dokka/DottyDokkaConfig.scala new file mode 100644 index 000000000000..b5190ebbdfae --- /dev/null +++ b/scala3doc/src/dotty/dokka/DottyDokkaConfig.scala @@ -0,0 +1,53 @@ +package dotty.dokka + +import org.jetbrains.dokka._ +import org.jetbrains.dokka.DokkaSourceSetImpl +import java.io.File +import java.util.{ List => JList, Map => JMap} +import collection.JavaConverters._ + +case class DottyDokkaConfig(docConfiguration: DocConfiguration) extends DokkaConfiguration: + override def getOutputDir: File = docConfiguration.args.output + override def getCacheRoot: File = null + override def getOfflineMode: Boolean = false + override def getFailOnWarning: Boolean = false + override def getSourceSets: JList[DokkaConfiguration.DokkaSourceSet] = List(mkSourceSet).asJava + override def getModules: JList[DokkaConfiguration.DokkaModuleDescription] = List().asJava + override def getPluginsClasspath: JList[File] = Nil.asJava + override def getModuleName(): String = "ModuleName" + override def getModuleVersion(): String = "" + + private object OurConfig extends DokkaConfiguration.PluginConfiguration: + override def getFqPluginName = "ExternalDocsTooKey" + override def getSerializationFormat: DokkaConfiguration$SerializationFormat = + DokkaConfiguration$SerializationFormat.JSON.asInstanceOf[DokkaConfiguration$SerializationFormat] + override def getValues: String = docConfiguration.args.docsRoot.getOrElse("") + + override def getPluginsConfiguration: JList[DokkaConfiguration.PluginConfiguration] = List(OurConfig).asJava + + def mkSourceSet: DokkaConfiguration.DokkaSourceSet = + val sourceLinks:Set[SourceLinkDefinitionImpl] = docConfiguration.args.sourceLinks.map(SourceLinkDefinitionImpl.Companion.parseSourceLinkDefinition(_)).toSet + new DokkaSourceSetImpl( + /*displayName=*/ docConfiguration.args.name, + /*sourceSetID=*/ new DokkaSourceSetID(docConfiguration.args.name, "main"), + /*classpath=*/ Nil.asJava, + /*sourceRoots=*/ Set().asJava, + /*dependentSourceSets=*/ Set().asJava, + /*samples=*/ Set().asJava, + /*includes=*/ Set().asJava, + /*includeNonPublic=*/ true, + /*reportUndocumented=*/ false, /* changed because of exception in reportUndocumentedTransformer - there's 'when' which doesnt match because it contains only KotlinVisbility cases */ + /*skipEmptyPackages=*/ false, // Now all our packages are empty from dokka perspective + /*skipDeprecated=*/ true, + /*jdkVersion=*/ 8, + /*sourceLinks=*/ sourceLinks.asJava, + /*perPackageOptions=*/ Nil.asJava, + /*externalDocumentationLinks=*/ Set().asJava, + /*languageVersion=*/ null, + /*apiVersion=*/ null, + /*noStdlibLink=*/ true, + /*noJdkLink=*/ true, + /*suppressedFiles=*/ Set().asJava, + /*suppressedFiles=*/ Platform.jvm + ).asInstanceOf[DokkaConfiguration.DokkaSourceSet] // Why I do need to cast here? Kotlin magic? + diff --git a/scala3doc/src/dotty/dokka/DottyDokkaPlugin.scala b/scala3doc/src/dotty/dokka/DottyDokkaPlugin.scala new file mode 100644 index 000000000000..9373988a13fa --- /dev/null +++ b/scala3doc/src/dotty/dokka/DottyDokkaPlugin.scala @@ -0,0 +1,149 @@ +package dotty.dokka + +import org.jetbrains.dokka.plugability._ +import org.jetbrains.dokka.transformers.sources._ +import org.jetbrains.dokka.transformers.documentation.PreMergeDocumentableTransformer +import org.jetbrains.dokka.transformers.pages.PageTransformer + +import org.jetbrains.dokka.DokkaConfiguration +import org.jetbrains.dokka.{ DokkaConfiguration$DokkaSourceSet => DokkaSourceSet } +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.doc._ +import org.jetbrains.dokka.base.parsers._ +import org.jetbrains.dokka.plugability.DokkaContext +import com.virtuslab.dokka.site.SourceSetWrapper +import com.virtuslab.dokka.site.JavaSourceToDocumentableTranslator +import collection.JavaConverters._ +import org.jetbrains.dokka.model.properties.PropertyContainer +import dotty.dokka.tasty.{DokkaTastyInspector, SbtDokkaTastyInspector} +import org.jetbrains.dokka.base.transformers.pages.comments.CommentsToContentConverter +import org.jetbrains.dokka.utilities.DokkaLogger +import org.jetbrains.dokka.base.signatures.SignatureProvider +import org.jetbrains.dokka.pages._ +import dotty.dokka.model.api._ +import org.jetbrains.dokka.CoreExtensions +import com.virtuslab.dokka.site.StaticSitePlugin +import org.jetbrains.dokka.base.DokkaBase +import com.virtuslab.dokka.site.ExtensionBuilderEx +import java.util.{List => JList} + +/** Main Dokka plugin for the doctool. + * + * Wires together classes responsible for consuming Tasty and generating + * documentation. + * + * Most of the work of parsing Tasty is done by [[DokkaTastyInspector]]. + */ +class DottyDokkaPlugin extends DokkaJavaPlugin: + + lazy val dokkaBase = plugin(classOf[DokkaBase]) + lazy val dokkaSitePlugin = plugin(classOf[StaticSitePlugin]) + + val provideMembers = extend( + _.extensionPoint(CoreExtensions.INSTANCE.getSourceToDocumentableTranslator) + .fromInstance(EmptyModuleProvider) + .overrideExtension(dokkaBase.getPsiToDocumentableTranslator) + ) + + // Just turn off another translator since multiple overrides does not work + val disableDescriptorTranslator = extend( + _.extensionPoint(CoreExtensions.INSTANCE.getSourceToDocumentableTranslator) + .fromInstance(ScalaModuleProvider) + .overrideExtension(dokkaBase.getDescriptorToDocumentableTranslator) + .name("disableDescriptorTranslator") + ) + + // Clean up empty module provided in disableDescriptorTranslator + val cleanUpEmptyModules = extend( + _.extensionPoint(CoreExtensions.INSTANCE.getPreMergeDocumentableTransformer) + .fromInstance(_.asScala.filterNot(_.getName.isEmpty).asJava) + ) + + val ourSignatureProvider = extend( + _.extensionPoint(dokkaBase.getSignatureProvider) + .fromRecipe(ctx => + new ScalaSignatureProvider(ctx.single(dokkaBase.getCommentsToContentConverter), ctx.getLogger) + ).overrideExtension(dokkaBase.getKotlinSignatureProvider) + ) + + val scalaResourceInstaller = extend( + _.extensionPoint(dokkaBase.getHtmlPreprocessors) + .fromInstance(new ScalaResourceInstaller()) + .after(dokkaBase.getCustomResourceInstaller) + ) + + val scalaEmbeddedResourceAppender = extend( + _.extensionPoint(dokkaBase.getHtmlPreprocessors) + .fromInstance(new ScalaEmbeddedResourceAppender()) + .after(dokkaBase.getCustomResourceInstaller) + .name("scalaEmbeddedResourceAppender") + ) + + val scalaDocumentableToPageTranslator = extend( + _.extensionPoint(CoreExtensions.INSTANCE.getDocumentableToPageTranslator) + .fromRecipe(ctx => ScalaDocumentableToPageTranslator( + ctx.single(dokkaBase.getCommentsToContentConverter), + ctx.single(dokkaBase.getSignatureProvider), + ctx.getLogger + )) + .overrideExtension(dokkaBase.getDocumentableToPageTranslator) + ) + + val packageHierarchyTransformer = extend( + _.extensionPoint(CoreExtensions.INSTANCE.getPageTransformer) + .fromRecipe(PackageHierarchyTransformer(_)) + .before(dokkaBase.getRootCreator) + ) + + val inheritanceTransformer = extend( + _.extensionPoint(CoreExtensions.INSTANCE.getDocumentableTransformer) + .fromRecipe(InheritanceInformationTransformer(_)) + .name("inheritanceTransformer") + ) + + val ourSourceLinksTransformer = extend( + _.extensionPoint(CoreExtensions.INSTANCE.getDocumentableTransformer) + .fromRecipe(ctx => ScalaSourceLinksTransformer( + ctx, + ctx.single(dokkaBase.getCommentsToContentConverter), + ctx.single(dokkaBase.getSignatureProvider), + ctx.getLogger + ) + ) + ) + + val ourRenderer = extend( + _.extensionPoint(CoreExtensions.INSTANCE.getRenderer) + .fromRecipe(ScalaHtmlRenderer(_)) + .overrideExtension(dokkaSitePlugin.getCustomRenderer) + ) + + val commentsToContentConverter = extend( + _.extensionPoint(dokkaBase.getCommentsToContentConverter) + .fromInstance(ScalaCommentToContentConverter) + .overrideExtension(dokkaBase.getDocTagToContentConverter) + ) + + val implicitMembersExtensionTransformer = extend( + _.extensionPoint(CoreExtensions.INSTANCE.getDocumentableTransformer ) + .fromRecipe(ImplicitMembersExtensionTransformer(_)) + .name("implicitMembersExtensionTransformer") + ) + + val muteDefaultSourceLinksTransformer = extend( + _.extensionPoint(CoreExtensions.INSTANCE.getPageTransformer) + .fromInstance(new PageTransformer { + override def invoke(root: RootPageNode) = root + }) + .overrideExtension(dokkaBase.getSourceLinksTransformer) + .name("muteDefaultSourceLinksTransformer") + ) + +// TODO remove once problem is fixed in Dokka +extension [T] (builder: ExtensionBuilder[T]): + def before(exts: Extension[_, _, _]*): ExtensionBuilder[T] = + (new ExtensionBuilderEx).newOrdering(builder, exts.toArray, Array.empty) + + def after(exts: Extension[_, _, _]*): ExtensionBuilder[T] = + (new ExtensionBuilderEx).newOrdering(builder, Array.empty, exts.toArray) \ No newline at end of file diff --git a/scala3doc/src/dotty/dokka/IO.java b/scala3doc/src/dotty/dokka/IO.java new file mode 100644 index 000000000000..d50ac777610e --- /dev/null +++ b/scala3doc/src/dotty/dokka/IO.java @@ -0,0 +1,28 @@ +package dotty.dokka; + +import java.io.*; +import java.nio.file.*; +import java.nio.file.attribute.BasicFileAttributes; + +/** This code is mostly using public snippets and tries to mimic sbt-io api. */ +public class IO { + public static void delete(File pathToBeDeleted) throws IOException { + Files.walkFileTree(pathToBeDeleted.toPath(), + new SimpleFileVisitor<Path>() { + @Override + public FileVisitResult postVisitDirectory( + Path dir, IOException exc) throws IOException { + Files.delete(dir); + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult visitFile( + Path file, BasicFileAttributes attrs) + throws IOException { + Files.delete(file); + return FileVisitResult.CONTINUE; + } + }); + } +} \ No newline at end of file diff --git a/scala3doc/src/dotty/dokka/Main.scala b/scala3doc/src/dotty/dokka/Main.scala new file mode 100644 index 000000000000..581c2c111ecf --- /dev/null +++ b/scala3doc/src/dotty/dokka/Main.scala @@ -0,0 +1,154 @@ +package dotty.dokka + +import org.jetbrains.dokka._ +import org.jetbrains.dokka.utilities._ +import org.jetbrains.dokka.plugability._ +import java.util.ServiceLoader +import java.io.File +import java.util.jar._ +import collection.JavaConverters._ +import collection.immutable.ArraySeq +import java.util.{List => JList} + +import scala.tasty.Reflection +import scala.tasty.inspector.TastyInspector +import java.nio.file.Files + +import org.kohsuke.args4j.{CmdLineParser, Option => COption} +import org.kohsuke.args4j.spi.StringArrayOptionHandler + +class RawArgs: + @COption(name="--tastyRoots", required = true, aliases = Array("-t"), usage="Roots where tools should look for tasty files") + protected var tastyRoots: String = null + + @COption(name="--dest",required = true, aliases = Array("-d"), usage="Output to generate documentation to") + protected var output: String = "output" + + @COption(name="--classpath", aliases = Array("--cp", "-c"), usage="Classpath to load dependencies from") + protected var classpath: String = System.getProperty("java.class.path") + + @COption(name="--name", required = true, aliases = Array("-n"), usage="Name of module in generated documentation") + protected var name: String = "main" + + @COption(name="--docs", aliases = Array("-p"), usage="Root of project docs") + private var docsRoot: String = null + + @COption(name="--sources", handler = classOf[StringArrayOptionHandler], aliases = Array("-s"), usage = "Links to source files provided in convention: local_directory=remote_directory#line_suffix") + private var sourceLinks: JList[String] = null + + @COption(name="--projectTitle") + protected var projectTitle: String = null + + @COption(name="--projectVersion") + protected var projectVersion: String = null + + @COption(name="--projectLogo") + protected var projectLogo: String = null + + @COption(name="--syntax") + protected var syntax: String = null + + def toArgs = + val parsedSyntax = syntax match + case null => None + case other => + Args.CommentSyntax.fromString(other) match + case None => + sys.error(s"unrecognized value for --syntax option: $other") + case some => some + + Args( + name, + tastyRoots.split(File.pathSeparatorChar).toList.map(new File(_)), + classpath, + new File(output), + Option(docsRoot), + projectVersion, + Option(projectTitle), + Option(projectLogo), + parsedSyntax, + Option(sourceLinks).map(_.asScala.toList).getOrElse(List.empty) + ) + + +case class Args( + name: String, + tastyRoots: Seq[File], + classpath: String, + output: File, + docsRoot: Option[String], + projectVersion: String, + projectTitle: Option[String], + projectLogo: Option[String], + defaultSyntax: Option[Args.CommentSyntax], + sourceLinks: List[String] +) + +object Args: + enum CommentSyntax: + case Wiki + case Markdown + + object CommentSyntax: + def fromString(str: String): Option[CommentSyntax] = + str match + case "wiki" => Some(Wiki) + case "markdown" => Some(Markdown) + case _ => None +end Args + +import dotty.tools.dotc.core.Contexts.{Context => DottyContext} +trait BaseDocConfiguration: + val args: Args + val tastyFiles: List[String] + +enum DocConfiguration extends BaseDocConfiguration: + case Standalone(args: Args, tastyFiles: List[String], tastyJars: List[String]) + case Sbt(args: Args, tastyFiles: List[String], rootCtx: DottyContext) + +/** Main class for the doctool. + * + * The `main` method is mostly responsible just for parsing arguments and + * configuring Dokka. After that, we hand control to Dokka. + * + * Other important classes: + * + * - [](package.DottyDokkaPlugin) is our class that Dokka calls back and which + * actually generates the documentation. + * - [](package.DottyDokkaConfig) is our config for Dokka. + */ +object Main: + def main(args: Array[String]): Unit = + try + val rawArgs = new RawArgs + new CmdLineParser(rawArgs).parseArgument(args:_*) + val parsedArgs = rawArgs.toArgs + + val (files, dirs) = parsedArgs.tastyRoots.partition(_.isFile) + val (providedTastyFiles, jars) = files.toList.map(_.getAbsolutePath).partition(_.endsWith(".tasty")) + jars.foreach(j => if(!j.endsWith(".jar")) sys.error(s"Provided file $j is not jar not tasty file") ) + + + def listTastyFiles(f: File): Seq[String] = + val (files, dirs) = f.listFiles().partition(_.isFile) + ArraySeq.unsafeWrapArray( + files.filter(_.getName.endsWith(".tasty")).map(_.toString) ++ dirs.flatMap(listTastyFiles) + ) + val tastyFiles = providedTastyFiles ++ dirs.flatMap(listTastyFiles) + + val config = DocConfiguration.Standalone(parsedArgs, tastyFiles, jars) + + if (parsedArgs.output.exists()) IO.delete(parsedArgs.output) + + // TODO #20 pass options, classpath etc. + new DokkaGenerator(new DottyDokkaConfig(config), DokkaConsoleLogger.INSTANCE).generate() + + println("Done") + + // Sometimes jvm is hanging, so we want to be sure that we force shout down the jvm + sys.exit(0) + catch + case a: Exception => + a.printStackTrace() + // Sometimes jvm is hanging, so we want to be sure that we force shout down the jvm + sys.exit(1) diff --git a/scala3doc/src/dotty/dokka/ScalaModuleCreator.scala b/scala3doc/src/dotty/dokka/ScalaModuleCreator.scala new file mode 100644 index 000000000000..188f7c21c093 --- /dev/null +++ b/scala3doc/src/dotty/dokka/ScalaModuleCreator.scala @@ -0,0 +1,56 @@ +package dotty.dokka + +import org.jetbrains.dokka.{ DokkaConfiguration$DokkaSourceSet => DokkaSourceSet } +import com.virtuslab.dokka.site.JavaSourceToDocumentableTranslator +import com.virtuslab.dokka.site.SourceSetWrapper +import org.jetbrains.dokka.plugability.DokkaContext +import org.jetbrains.dokka.model.properties.PropertyContainer + +import dotty.dokka.tasty.{DokkaTastyInspector, SbtDokkaTastyInspector} +import org.jetbrains.dokka.pages._ +import dotty.dokka.model.api._ +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.links.DRI +import java.util.{List => JList} +import org.jetbrains.dokka.base.parsers.MarkdownParser +import collection.JavaConverters._ + +object ScalaModuleProvider extends JavaSourceToDocumentableTranslator: + override def process(rawSourceSet: DokkaSourceSet, cxt: DokkaContext) = + val sourceSet = SourceSetWrapper(rawSourceSet) + cxt.getConfiguration match + case dottyConfig: DottyDokkaConfig => + val result = dottyConfig.docConfiguration match { + case DocConfiguration.Standalone(args, tastyFiles, jars) => + // TODO use it to resolve link logic + val inspector = DokkaTastyInspector(sourceSet, new MarkdownParser(_ => null), dottyConfig) + inspector.inspectAllTastyFiles(tastyFiles, jars, args.classpath.split(java.io.File.pathSeparator).toList) + inspector.result() + case DocConfiguration.Sbt(args, tastyFiles, rootCtx) => + val inspector = + SbtDokkaTastyInspector( + sourceSet, + // new MarkdownParser(null, null, cxt.getLogger), + dottyConfig, + tastyFiles, + rootCtx, + ) + inspector.run() + } + + def flattenMember(m: Member): Seq[(DRI, Member)] = (m.dri -> m) +: m.allMembers.flatMap(flattenMember) + + new DModule( + sourceSet.getSourceSet.getDisplayName, + result.asJava, + Map().asJava, + null, + sourceSet.toSet, + PropertyContainer.Companion.empty() plus ModuleExtension(result.flatMap(flattenMember).toMap) + ) + case _ => + ??? + +object EmptyModuleProvider extends JavaSourceToDocumentableTranslator: + override def process(sourceSet: DokkaSourceSet, context: DokkaContext) = + DModule("", Nil.asJava, Map.empty.asJava, null, Set(sourceSet).asJava, PropertyContainer.Companion.empty()) \ No newline at end of file diff --git a/scala3doc/src/dotty/dokka/model/api/api.scala b/scala3doc/src/dotty/dokka/model/api/api.scala new file mode 100644 index 000000000000..4fa950ab94eb --- /dev/null +++ b/scala3doc/src/dotty/dokka/model/api/api.scala @@ -0,0 +1,141 @@ +package dotty.dokka +package model +package api + +import org.jetbrains.dokka.DokkaConfiguration$DokkaSourceSet +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model._ +import collection.JavaConverters._ +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.doc._ +import org.jetbrains.dokka.model.properties._ +import org.jetbrains.dokka.pages._ +import java.util.{List => JList, Set => JSet} + + +enum Visibility(val name: String): + case Unrestricted extends Visibility("") + case Protected(scope: VisibilityScope) extends Visibility("protected") + case Private(scope: VisibilityScope) extends Visibility("private") + + def asSignature = this match + case Unrestricted => "" + case Protected(scope) => s"protected${visibilityScopeToString(scope)}" + case Private(scope) => s"private${visibilityScopeToString(scope)}" + + + private def visibilityScopeToString(scope: VisibilityScope) = scope match + case VisibilityScope.ImplicitTypeScope | VisibilityScope.ImplicitModuleScope => "" + case VisibilityScope.ExplicitTypeScope(name) => s"[$name]" + case VisibilityScope.ExplicitModuleScope(name) => s"[$name]" + case VisibilityScope.ThisScope => "[this]" + +enum VisibilityScope: + case ImplicitTypeScope // private/protected inside a class or a trait + case ImplicitModuleScope // private/protected inside a package or an object + case ExplicitTypeScope(typeName: String) // private[X]/protected[X] inside a class or a trait + case ExplicitModuleScope(moduleName: String) // private[X]/protected[X] inside a package or an object + case ThisScope // private[this]/protected[this] + +enum Modifier(val name: String, val prefix: Boolean): + case Abstract extends Modifier("abstract", true) + case Final extends Modifier("final", true) + case Empty extends Modifier("", true) + case Sealed extends Modifier("sealed", true) + case Case extends Modifier("case", false) + case Implicit extends Modifier("implicit", true) + case Inline extends Modifier("inline", true) + case Lazy extends Modifier("lazy", true) + case Override extends Modifier("override", true) + case Erased extends Modifier("erased", true) + case Opaque extends Modifier("opaque", true) + case Open extends Modifier("open", true) + +case class ExtensionTarget(name: String, signature: Signature, dri: DRI) +case class ImplicitConversion(from: DRI, to: DRI) +trait ImplicitConversionProvider { def conversion: Option[ImplicitConversion] } +trait Classlike + +enum Kind(val name: String){ + case Class extends Kind("class") with Classlike + case Object extends Kind("object") with Classlike + case Trait extends Kind("trait") with Classlike + case Enum extends Kind("enum") with Classlike + case EnumCase extends Kind("case") + case Def extends Kind("def") + case Extension(on: ExtensionTarget) extends Kind("def") + case Constructor extends Kind("def") + case Var extends Kind("var") + case Val extends Kind("val") + case Type(concreate: Boolean, opaque: Boolean) extends Kind("Type") // should we handle opaque as modifier? + case Given(as: Option[Signature], conversion: Option[ImplicitConversion]) extends Kind("Given") with ImplicitConversionProvider + case Implicit(kind: Kind, conversion: Option[ImplicitConversion]) extends Kind(kind.name) with ImplicitConversionProvider + case Unknown extends Kind("Unknown") +} + +enum Origin: + case InheritedFrom(name: String, dri: DRI) + case ImplicitlyAddedBy(name: String, dri: DRI) + case ExtensionFrom(name: String, dri: DRI) + case DefinedWithin + +case class Annotation(val dri: DRI, val params: List[Annotation.AnnotationParameter]) + +object Annotation: + sealed trait AnnotationParameter + case class PrimitiveParameter(val name: Option[String] = None, val value: String) extends AnnotationParameter + case class LinkParameter(val name: Option[String] = None, val dri: DRI, val value: String) extends AnnotationParameter + case class UnresolvedParameter(val name: Option[String] = None, val unresolvedText: String) extends AnnotationParameter + +// TODO (longterm) properly represent signatures +case class Link(name: String, dri: DRI) +type Signature = Seq[String | Link]// TODO migrate tupes to Links + +object Signature: + def apply(names: (String | Link)*): Signature = names // TO batter dotty shortcommings in union types + +extension (s: Signature): + def join(a: Signature): Signature = s ++ a + +case class LinkToType(signature: Signature, dri: DRI, kind: Kind) + +case class HierarchyDiagram(edges: Seq[Edge]) +case class Vertex(val id: Int, val body: LinkToType) +case class Edge(val from: Vertex, val to: Vertex) + + +type Member = Documentable // with WithExtraProperty[_] // Kotlin does not add generics to ExtraProperty implemented by e.g. DFunction + +object Member: + def unapply(d: Documentable): Option[(String, DRI, Visibility, Kind, Origin)] = + d.memberExt.map(v => (d.getName, d.getDri, v.visibility, v.kind, v.origin)) + +extension[T] (member: Member): + + private[api] def memberExt = MemberExtension.getFrom(member) + + private[api] def compositeMemberExt = CompositeMemberExtension.getFrom(member) + + def visibility: Visibility = memberExt.fold(Visibility.Unrestricted)(_.visibility) + + def signature: Signature = memberExt.fold(Signature(name))(_.signature) + def asLink: LinkToType = LinkToType(signature, dri, kind) + + def modifiers: Seq[dotty.dokka.model.api.Modifier] = memberExt.fold(Nil)(_.modifiers) + def kind: Kind = memberExt.fold(Kind.Unknown)(_.kind) + def origin: Origin = memberExt.fold(Origin.DefinedWithin)(_.origin) + def annotations: List[Annotation] = memberExt.fold(Nil)(_.annotations) + def name = member.getName + def dri = member.getDri + + // TODO rename parent and knownChildren + def allMembers: Seq[Member] = compositeMemberExt.fold(Nil)(_.members) + def parents: Seq[LinkToType] = compositeMemberExt.fold(Nil)(_.parents) + def directParents: Seq[Signature] = compositeMemberExt.fold(Nil)(_.directParents) + def knownChildren: Seq[LinkToType] = compositeMemberExt.fold(Nil)(_.knownChildren) + + def membersBy(op: Member => Boolean): (Seq[Member], Seq[Member]) = allMembers.filter(op).partition(_.origin == Origin.DefinedWithin) + + +extension (module: DModule): + def driMap: Map[DRI, Member] = ModuleExtension.getFrom(module).fold(Map.empty)(_.driMap) diff --git a/scala3doc/src/dotty/dokka/model/api/internalExtensions.scala b/scala3doc/src/dotty/dokka/model/api/internalExtensions.scala new file mode 100644 index 000000000000..07cd5c847ed5 --- /dev/null +++ b/scala3doc/src/dotty/dokka/model/api/internalExtensions.scala @@ -0,0 +1,114 @@ +package dotty.dokka +package model +package api + +import org.jetbrains.dokka.DokkaConfiguration$DokkaSourceSet +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.{Projection => JProjection} +import org.jetbrains.dokka.model.Documentable +import org.jetbrains.dokka.model.DFunction +import org.jetbrains.dokka.model.DClass +import org.jetbrains.dokka.model.DocumentableSource +import org.jetbrains.dokka.model.Dynamic +import org.jetbrains.dokka.model.Bound +import org.jetbrains.dokka.model.TypeConstructor +import org.jetbrains.dokka.model.TypeParameter +import org.jetbrains.dokka.model.UnresolvedBound +import org.jetbrains.dokka.model.DPackage +import org.jetbrains.dokka.model.DModule + +import collection.JavaConverters._ +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.doc.DocumentationNode +import org.jetbrains.dokka.model.properties._ +import java.util.{List => JList, Set => JSet} + +import com.virtuslab.dokka.site.SourceSetWrapper + +private [model] case class MemberExtension( + visibility: Visibility, + modifiers: Seq[dotty.dokka.model.api.Modifier], + kind: Kind, + val annotations: List[Annotation], + signature: Signature, + origin: Origin = Origin.DefinedWithin +) extends ExtraProperty[Documentable]: + override def getKey = MemberExtension + +object MemberExtension extends BaseKey[Documentable, MemberExtension]: + val empty = MemberExtension(Visibility.Unrestricted, Nil, Kind.Unknown, Nil, Nil) + +case class CompositeMemberExtension( + members : Seq[Member] = Nil, + directParents: Seq[Signature] = Nil, + parents: Seq[LinkToType] = Nil, + knownChildren: Seq[LinkToType] = Nil +) extends ExtraProperty[Documentable]: + override def getKey = CompositeMemberExtension + +object CompositeMemberExtension extends BaseKey[Documentable, CompositeMemberExtension]: + val empty = CompositeMemberExtension() + + override def mergeStrategyFor(left: CompositeMemberExtension, right: CompositeMemberExtension) = + new MergeStrategy$Replace(left.copy(members = left.members ++ right.members)) + .asInstanceOf[MergeStrategy[Documentable]] + +extension (member: Member): + private def putInMember(ext: MemberExtension) = + val memberWithExtra = member.asInstanceOf[WithExtraProperties[Member]] + memberWithExtra.withNewExtras(memberWithExtra.getExtra plus ext).asInstanceOf[Member] + + private def putInCompositeMember(ext: CompositeMemberExtension) = + val memberWithExtra = member.asInstanceOf[WithExtraProperties[Member]] + memberWithExtra.withNewExtras(memberWithExtra.getExtra plus ext).asInstanceOf[Member] + + def copy(modifiers: Seq[Modifier]) = + val ext = MemberExtension.getFrom(member).getOrElse(MemberExtension.empty).copy(modifiers = modifiers) + putInMember(ext) + + def withOrigin(origin: Origin) = + val ext = MemberExtension.getFrom(member).getOrElse(MemberExtension.empty).copy(origin = origin) + putInMember(ext) + + def withKind(kind: Kind) = + val ext = MemberExtension.getFrom(member).getOrElse(MemberExtension.empty).copy(kind = kind) + putInMember(ext) + + def withMembers(newMembers: Seq[Member]): Member = + val original = member.compositeMemberExt.getOrElse(CompositeMemberExtension()) + val newExt = original.copy(members = newMembers) + putInCompositeMember(newExt) + + def withNewMembers(newMembers: Seq[Member]): Member = + val original = member.compositeMemberExt.getOrElse(CompositeMemberExtension()) + val newExt = original.copy(members = original.members ++ newMembers) + putInCompositeMember(newExt) + + def withKnownChildren(knownChildren: Seq[LinkToType]): Member = + val original = member.compositeMemberExt.getOrElse(CompositeMemberExtension()) + val newExt = original.copy(knownChildren = knownChildren) + putInCompositeMember(newExt) + + def updateRecusivly(op: Member => Member) = op(member).withMembers(member.allMembers.map(op)) + +extension (bound: Bound): + def asSignature: Signature = bound match + case tc: TypeConstructor => + tc.getProjections.asScala.toSeq.map { + case txt: UnresolvedBound => txt.getName + case link: TypeParameter => + Link(link.getName, link.getDri) + } + +extension (m: DModule): + def updatePackages(op: Seq[DPackage] => Seq[DPackage]): DModule = + m.copy( + m.getName, + op(m.getPackages.asScala.toSeq).asJava, + m.getDocumentation, + m.getExpectPresentInSet, + m.getSourceSets, + m.getExtra + ) + + def updateMembers(op: Member => Member): DModule = updatePackages(_.map(p => p.updateRecusivly(op).asInstanceOf[DPackage])) diff --git a/scala3doc/src/dotty/dokka/model/extras.scala b/scala3doc/src/dotty/dokka/model/extras.scala new file mode 100644 index 000000000000..8cd6948f36e5 --- /dev/null +++ b/scala3doc/src/dotty/dokka/model/extras.scala @@ -0,0 +1,55 @@ +package dotty.dokka + +import org.jetbrains.dokka.DokkaConfiguration$DokkaSourceSet +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.{Projection => JProjection} +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.pages._ +import collection.JavaConverters._ +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.doc._ +import org.jetbrains.dokka.model.properties._ +import java.util.{List => JList, Set => JSet} +import dotty.dokka.model.api._ + +case class ModuleExtension(driMap: Map[DRI, Member]) extends ExtraProperty[DModule]: + override def getKey = ModuleExtension + +object ModuleExtension extends BaseKey[DModule, ModuleExtension] + +case class MethodExtension(parametersListSizes: Seq[Int]) extends ExtraProperty[DFunction]: + override def getKey = MethodExtension + +object MethodExtension extends BaseKey[DFunction, MethodExtension] + +case class ParameterExtension(isExtendedSymbol: Boolean, isGrouped: Boolean) extends ExtraProperty[DParameter]: + override def getKey = ParameterExtension + +object ParameterExtension extends BaseKey[DParameter, ParameterExtension] + +case class ClasslikeExtension( + constructor: Option[DFunction], // will be replaced by signature + companion: Option[DRI], // moved to kind? +) extends ExtraProperty[DClasslike]: + override def getKey = ClasslikeExtension + +object ClasslikeExtension extends BaseKey[DClasslike, ClasslikeExtension] + + +case class SourceLinks( + links: Map[DokkaConfiguration$DokkaSourceSet, String] +) extends ExtraProperty[Documentable]: + override def getKey = SourceLinks + +object SourceLinks extends BaseKey[Documentable, SourceLinks] + +// case class ImplicitConversions(val conversions: List[ImplicitConversion]) extends ExtraProperty[WithScope]: +// override def getKey = ImplicitConversions + +// object ImplicitConversions extends BaseKey[WithScope, ImplicitConversions] + + +case class IsInherited(flag: Boolean) extends ExtraProperty[Documentable]: + override def getKey = IsInherited + +object IsInherited extends BaseKey[Documentable, IsInherited] diff --git a/scala3doc/src/dotty/dokka/model/scalaModel.scala b/scala3doc/src/dotty/dokka/model/scalaModel.scala new file mode 100644 index 000000000000..86e139985b0c --- /dev/null +++ b/scala3doc/src/dotty/dokka/model/scalaModel.scala @@ -0,0 +1,129 @@ +package dotty.dokka + +import org.jetbrains.dokka.DokkaConfiguration$DokkaSourceSet +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model._ +import collection.JavaConverters._ +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.doc._ +import org.jetbrains.dokka.model.properties._ +import org.jetbrains.dokka.pages._ +import java.util.{List => JList, Set => JSet} +import dotty.dokka.model.api.Signature +import dotty.dokka.model.api.HierarchyDiagram + +case class TastyDocumentableSource(val path: String, val lineNumber: Int) extends DocumentableSource { + override def getPath = path +} + +enum TableStyle extends org.jetbrains.dokka.pages.Style: + case Borderless + case DescriptionList + case NestedDescriptionList + +case class HtmlContentNode( + val body: String, + val dci: DCI, + val sourceSets: Set[DisplaySourceSet], + val style: Set[Style], + val extra: PropertyContainer[ContentNode] = PropertyContainer.Companion.empty +) extends ContentNode: + override def getDci = dci + override def getSourceSets = sourceSets.asJava + override def getStyle = style.asJava + override def hasAnyContent = !body.isEmpty + def withSourceSets(sourceSets: JSet[DisplaySourceSet]) = copy(sourceSets = sourceSets.asScala.toSet) + override def getChildren: JList[ContentNode] = Nil.asJava + override def getExtra = extra + override def withNewExtras(p: PropertyContainer[ContentNode]) = copy(extra = p) + +class ScalaTagWrapper(root: DocTag) extends TagWrapper(null): + override def getRoot = root + +object ScalaTagWrapper { + + case class See(root: DocTag) extends ScalaTagWrapper(root) + case class Todo(root: DocTag) extends ScalaTagWrapper(root) + case class Note(root: DocTag) extends ScalaTagWrapper(root) + case class Example(root: DocTag) extends ScalaTagWrapper(root) + case class NestedNamedTag( + name: String, + subname: String, + identTag: DocTag, + descTag: DocTag + ) extends NamedTagWrapper(null): + override def getName = name + override def getRoot = descTag +} + +case class ImplicitConversion(conversion: Documentable, from: DRI, to: DRI) + +case class HierarchyDiagramContentNode( + val diagram: HierarchyDiagram, + val dci: DCI, + val sourceSets: Set[DisplaySourceSet], + val style: Set[Style], + val extra: PropertyContainer[ContentNode] = PropertyContainer.Companion.empty +) extends ContentNode: + override def getDci = dci + override def getSourceSets = sourceSets.asJava + override def getStyle = style.asJava + override def hasAnyContent = !diagram.edges.isEmpty + def withSourceSets(sourceSets: JSet[DisplaySourceSet]) = copy(sourceSets = sourceSets.asScala.toSet) + override def getChildren: JList[ContentNode] = Nil.asJava + override def getExtra = extra + override def withNewExtras(p: PropertyContainer[ContentNode]) = copy(extra = p) + +case class ContentNodeParams( + val dci: DCI, + val sourceSets: java.util.Set[DisplaySourceSet], + val style: Set[Style], + val extra: PropertyContainer[ContentNode] = PropertyContainer.Companion.empty +): + def dri = dci.getDri.asScala.head + +abstract class ScalaContentNode(params: ContentNodeParams) extends ContentNode: + def newInstance(params: ContentNodeParams): ScalaContentNode + + override def getDci = params.dci + override def getSourceSets = params.sourceSets + override def getStyle = params.style.asJava + override def hasAnyContent = true + def withSourceSets(sourceSets: JSet[DisplaySourceSet]) = + newInstance(params.copy(sourceSets = sourceSets)) + override def getChildren: JList[ContentNode] = Nil.asJava + override def getExtra = params.extra + override def withNewExtras(p: PropertyContainer[ContentNode]) = newInstance(params.copy(extra = p)) + +case class DocumentableElement( + annotations: Signature, + modifiers: Signature, + name: String, + signature: Signature, + brief: Seq[ContentNode], + originInfo: Signature, + attributes: Map[String, String], + params: ContentNodeParams +) extends ScalaContentNode(params): + override def newInstance(params: ContentNodeParams) = copy(params = params) + +case class DocumentableElementGroup( + header: Signature, + elements: Seq[DocumentableElement], + params: ContentNodeParams +) extends ScalaContentNode(params): + override def newInstance(params: ContentNodeParams) = copy(params = params) + override def hasAnyContent = elements.nonEmpty + override def getChildren: JList[ContentNode] = elements.asJava + +case class DocumentableList( + groupName: Signature, + elements: Seq[DocumentableElement | DocumentableElementGroup], + params: ContentNodeParams +) extends ScalaContentNode(params): + override def newInstance(params: ContentNodeParams) = copy(params = params) + override def hasAnyContent = elements.nonEmpty + override def getChildren: JList[ContentNode] = elements.asJava + +case class DocumentableFilter(params: ContentNodeParams) extends ScalaContentNode(params): + override def newInstance(params: ContentNodeParams) = copy(params = params) diff --git a/scala3doc/src/dotty/dokka/preprocessors/ScalaEmbeddedResourceApppender.scala b/scala3doc/src/dotty/dokka/preprocessors/ScalaEmbeddedResourceApppender.scala new file mode 100644 index 000000000000..865df4c64abe --- /dev/null +++ b/scala3doc/src/dotty/dokka/preprocessors/ScalaEmbeddedResourceApppender.scala @@ -0,0 +1,45 @@ +package dotty.dokka + +import org.jetbrains.dokka.transformers.pages.{PageTransformer} +import org.jetbrains.dokka.pages.{RootPageNode, PageNode} +import scala.jdk.CollectionConverters._ + +class ScalaEmbeddedResourceAppender extends PageTransformer { + override def invoke(input: RootPageNode): RootPageNode = + input.transformContentPagesTree(page => + page.modified( + page.getName, + page.getContent, + page.getDri, + // Remove default CSS and add our own + (page.getEmbeddedResources.asScala + .filterNot(_.endsWith(".css")) ++ Seq( + "styles/nord-light.css", + "styles/scalastyle.css", + "styles/dotty-icons.css", + "styles/diagram.css", + "hljs/highlight.pack.js", + "scripts/hljs-scala3.js", + "scripts/ux.js", + "https://code.jquery.com/jquery-3.5.1.min.js", + "https://d3js.org/d3.v6.min.js", + "https://cdn.jsdelivr.net/npm/graphlib-dot@0.6.2/dist/graphlib-dot.min.js", + "https://cdnjs.cloudflare.com/ajax/libs/dagre-d3/0.6.1/dagre-d3.min.js", + "scripts/diagram.js", + "styles/filter-bar.css", + "hljs/highlight.pack.js", + "scripts/hljs-scala3.js", + "scripts/ux.js", + "scripts/common/component.js", + "scripts/common/utils.js", + "scripts/components/FilterBar.js", + "scripts/components/DocumentableList.js", + "scripts/components/Input.js", + "scripts/components/FilterGroup.js", + "scripts/components/Filter.js", + "scripts/data.js", + )).asJava, + page.getChildren + ) + ) +} diff --git a/scala3doc/src/dotty/dokka/preprocessors/ScalaResourceInstaller.scala b/scala3doc/src/dotty/dokka/preprocessors/ScalaResourceInstaller.scala new file mode 100644 index 000000000000..d184ba1334e1 --- /dev/null +++ b/scala3doc/src/dotty/dokka/preprocessors/ScalaResourceInstaller.scala @@ -0,0 +1,22 @@ +package dotty.dokka + +import org.jetbrains.dokka.transformers.pages.{PageTransformer} +import org.jetbrains.dokka.pages.{RootPageNode, RendererSpecificResourcePage, RenderingStrategy$Copy, PageNode, RenderingStrategy$Write} +import scala.jdk.CollectionConverters._ +import com.fasterxml.jackson.databind.ObjectMapper +import dotty.dokka.translators.FilterAttributes + +class ScalaResourceInstaller extends PageTransformer: + private def dottyRes(resourceName: String) = + new RendererSpecificResourcePage(resourceName, java.util.ArrayList(), RenderingStrategy$Copy(s"/dotty_res/$resourceName")) + + override def invoke(input: RootPageNode): RootPageNode = + val newResources = input.getChildren.asScala ++ Seq("fonts", "images", "styles", "scripts", "hljs").map(dottyRes) ++ Seq(dynamicJsData) + input.modified(input.getName, newResources.asJava) + + private def dynamicJsData = + // If data at any point will become more complex we should use a proper + val data: Map[String, Map[String, String]] = Map("filterDefaults" -> FilterAttributes.defaultValues) + val str = new ObjectMapper().writeValueAsString(data.transform((_, v) => v.asJava).asJava) + + new RendererSpecificResourcePage("scripts/data.js", java.util.ArrayList(), RenderingStrategy$Write(s"var scala3DocData = $str")) diff --git a/scala3doc/src/dotty/dokka/tasty/BasicSupport.scala b/scala3doc/src/dotty/dokka/tasty/BasicSupport.scala new file mode 100644 index 000000000000..eff22ed059f1 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/BasicSupport.scala @@ -0,0 +1,55 @@ +package dotty.dokka.tasty + +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model._ +import collection.JavaConverters._ +import dotty.dokka._ +import dotty.dokka.model.api.Annotation + +trait BasicSupport: + self: TastyParser => + import qctx.reflect._ + object SymOps extends SymOps[qctx.reflect.type](qctx.reflect) + export SymOps._ + + def parseAnnotation(annotTerm: Term): Annotation = + val dri = annotTerm.tpe.typeSymbol.dri + val params = annotTerm match + case Apply(target, appliedWith) => { + appliedWith.map { + case Literal(constant) => Annotation.PrimitiveParameter(None, constant.value match { + case s: String => "\"" + s"$s" + "\"" + case other => other.toString() + }) + case Select(qual, name) => + val dri = qual.tpe.termSymbol.companionClass.dri + Annotation.LinkParameter(None, dri, s"${dri.getClassNames}.$name") // TODO this is a nasty hack! + + case other => Annotation.UnresolvedParameter(None, other.show) + } + } + + Annotation(dri, params) + + + extension (sym: Symbol): + def documentation(using cxt: Context) = sym.documentation match + case Some(comment) => + Map(sourceSet.getSourceSet -> parseComment(comment, sym.tree)) + case None => + Map.empty + + def source(using ctx: Context) = + val path = Some(sym.pos.sourceFile.jpath).filter(_ != null).map(_.toAbsolutePath).map(_.toString) + path match{ + case Some(p) => Map(sourceSet.getSourceSet -> TastyDocumentableSource(p, sym.pos.startLine)) + case None => Map.empty + } + + def getAnnotations(): List[Annotation] = + sym.annots.filterNot(_.symbol.packageName.startsWith("scala.annotation.internal")).map(parseAnnotation).reverse + + private val emptyDRI = DRI.Companion.getTopLevel + + + diff --git a/scala3doc/src/dotty/dokka/tasty/ClassLikeSupport.scala b/scala3doc/src/dotty/dokka/tasty/ClassLikeSupport.scala new file mode 100644 index 000000000000..ba39ed3d2280 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/ClassLikeSupport.scala @@ -0,0 +1,351 @@ +package dotty.dokka.tasty + +import org.jetbrains.dokka.model.{TypeConstructor => DTypeConstructor, _} +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.doc._ +import org.jetbrains.dokka.DokkaConfiguration$DokkaSourceSet +import collection.JavaConverters._ +import org.jetbrains.dokka.model.properties._ +import dotty.dokka._ +import org.jetbrains.dokka.base.transformers.documentables.CallableExtensions +import dotty.dokka.model.api._ +import dotty.dokka.model.api.Modifier +import dotty.dokka.model.api.Kind +import dotty.dokka.model.api.ImplicitConversion +import dotty.dokka.model.api.{Signature => DSignature, Link => DLink} + + +trait ClassLikeSupport: + self: TastyParser => + import qctx.reflect._ + + private val placeholderVisibility = Map(sourceSet.getSourceSet -> KotlinVisibility.Public.INSTANCE).asJava + private val placeholderModifier = Map(sourceSet.getSourceSet -> KotlinModifier.Empty.INSTANCE).asJava + + private def kindForClasslike(sym: Symbol): Kind = + if sym.flags.is(Flags.Object) then Kind.Object + else if sym.flags.is(Flags.Trait) then Kind.Trait + else if sym.flags.is(Flags.Enum) then Kind.Enum + else Kind.Class + + object DClass: + def apply[T >: DClass](classDef: ClassDef)( + dri: DRI = classDef.symbol.dri, + name: String = classDef.name, + signatureOnly: Boolean = false, + modifiers: Seq[Modifier] = classDef.symbol.getExtraModifiers(), + ): DClass = + val supertypes = getSupertypes(classDef).map{ case (symbol, tpe) => + LinkToType(tpe.dokkaType.asSignature, symbol.dri, kindForClasslike(symbol)) + } + val selfSiangture: DSignature = typeForClass(classDef).dokkaType.asSignature + val baseExtra = PropertyContainer.Companion.empty() + .plus(ClasslikeExtension(classDef.getConstructorMethod, classDef.getCompanion)) + .plus(MemberExtension( + classDef.symbol.getVisibility(), + modifiers, + kindForClasslike( classDef.symbol), + classDef.symbol.getAnnotations(), + selfSiangture + )) + + val fullExtra = + if (signatureOnly) baseExtra + else baseExtra.plus(CompositeMemberExtension( + classDef.extractMembers, + classDef.getParents.map(_.dokkaType.asSignature), + supertypes, + Nil)) + + new DClass( + dri, + name, + (if(signatureOnly) Nil else classDef.getConstructors.map(parseMethod(_))).asJava, + Nil.asJava, + Nil.asJava, + Nil.asJava, + classDef.symbol.source.asJava, + placeholderVisibility, + null, + /*generics =*/classDef.getTypeParams.map(parseTypeArgument).asJava, + Map.empty.asJava, + classDef.symbol.documentation.asJava, + null, + placeholderModifier, + inspector.sourceSet.toSet, + /*isExpectActual =*/ false, + fullExtra.asInstanceOf[PropertyContainer[DClass]] + ) + + private val conversionSymbol = Symbol.requiredClass("scala.Conversion") + + def extractImplicitConversion(tpe: TypeRepr): Option[ImplicitConversion] = + if tpe.derivesFrom(conversionSymbol) then None + else tpe.baseType(conversionSymbol) match + case AppliedType(tpe, List(from: TypeRepr, to: TypeRepr)) => + Some(ImplicitConversion(from.typeSymbol.dri, to.typeSymbol.dri)) + case _ => + None + + private def parseMember(s: Tree): Option[Member] = processTreeOpt(s)(s match + case dd: DefDef if !dd.symbol.isHiddenByVisibility && !dd.symbol.isSyntheticFunc && dd.symbol.isExtensionMethod => + dd.symbol.extendedSymbol.map { extSym => + val target = ExtensionTarget(extSym.symbol.name, extSym.tpt.dokkaType.asSignature, extSym.tpt.symbol.dri) + parseMethod(dd.symbol, kind = Kind.Extension(target)) + } + + case dd: DefDef if !dd.symbol.isHiddenByVisibility && dd.symbol.isGiven => + Some(parseMethod(dd.symbol, kind = Kind.Given(getGivenInstance(dd).map(_.asSignature), None))) // TODO check given methods? + + case dd: DefDef if !dd.symbol.isHiddenByVisibility && !dd.symbol.isGiven && !dd.symbol.isSyntheticFunc && !dd.symbol.isExtensionMethod => + Some(parseMethod(dd.symbol)) + + case td: TypeDef if !td.symbol.flags.is(Flags.Synthetic) && (!td.symbol.flags.is(Flags.Case) || !td.symbol.flags.is(Flags.Enum)) => + Some(parseTypeDef(td)) + + case vd: ValDef if !isSyntheticField(vd.symbol) && (!vd.symbol.flags.is(Flags.Case) || !vd.symbol.flags.is(Flags.Enum)) => + Some(parseValDef(vd)) + + case c: ClassDef if c.symbol.shouldDocumentClasslike && !c.symbol.isGiven => + Some(parseClasslike(c)) + + case _ => None + ) + + extension (c: ClassDef): + def membersToDocument = c.body.filterNot(_.symbol.isHiddenByVisibility) + + def getNonTrivialInheritedMemberTrees = + c.symbol.getAllMembers.filterNot(s => s.isHiddenByVisibility || s.maybeOwner == c.symbol) + .filter(s => s.maybeOwner != defn.ObjectClass && s.maybeOwner != defn.AnyClass) + .map(_.tree) + + def extractMembers: Seq[Member] = { + // val inherited = c.getNonTrivialInheritedMemberTrees.collect { + // case dd: DefDef if !dd.symbol.isClassConstructor && !(dd.symbol.isSuperBridgeMethod || dd.symbol.isDefaultHelperMethod) => dd + // case other => other + // } + + c.membersToDocument.flatMap(parseMember) + // ++ + // inherited.flatMap(s => parseMember(s).map(_.withOrigin(Origin.InheritedFrom(s.symbol.owner.name, s.symbol.owner.dri)))) + } + + def getParents: List[Tree] = + for + parentTree <- c.parents if isValidPos(parentTree.pos) // We assume here that order is correct + parentSymbol = if parentTree.symbol.isClassConstructor then parentTree.symbol.owner else parentTree.symbol + if parentSymbol != defn.ObjectClass && parentSymbol != defn.AnyClass + yield parentTree + + + def getConstructors: List[Symbol] = membersToDocument.collect { + case d: DefDef if d.symbol.isClassConstructor && c.constructor.symbol != d.symbol => d.symbol + }.toList + + def getParameterModifier(parameter: Symbol): String = + val fieldSymbol = c.symbol.field(parameter.name) + if fieldSymbol.flags.is(Flags.Mutable) then "var " + else if fieldSymbol.flags.is(Flags.ParamAccessor) && !c.symbol.flags.is(Flags.Case) && !fieldSymbol.flags.is(Flags.Private) then "val " + else "" + + def getTypeParams: List[TypeDef] = c.body.collect { case targ: TypeDef => targ }.filter(_.symbol.isTypeParam) + + def getCompanion: Option[DRI] = c.symbol.getCompanionSymbol + .filter(!_.flags.is(Flags.Synthetic)) + .filterNot(_.isHiddenByVisibility) + .map(_.dri) + + def getConstructorMethod: Option[DFunction] = + Some(c.constructor.symbol).filter(_.exists).filterNot(_.isHiddenByVisibility).map( d => + parseMethod(d, constructorWithoutParamLists(c), s => c.getParameterModifier(s)) + ) + + def parseClasslike(classDef: ClassDef, signatureOnly: Boolean = false)(using ctx: Context): DClass = classDef match + case c: ClassDef if classDef.symbol.flags.is(Flags.Object) => parseObject(c, signatureOnly) + case c: ClassDef if classDef.symbol.flags.is(Flags.Enum) => parseEnum(c, signatureOnly) + case clazz => DClass(classDef)(signatureOnly = signatureOnly) + + def parseObject(classDef: ClassDef, signatureOnly: Boolean = false)(using ctx: Context): DClass = + DClass(classDef)( + name = classDef.name.stripSuffix("$"), + // All objects are final so we do not need final modifer! + modifiers = classDef.symbol.getExtraModifiers().filter(_ != Modifier.Final), + signatureOnly = signatureOnly + ) + + // TODO check withNewExtras? + def parseEnum(classDef: ClassDef, signatureOnly: Boolean = false)(using ctx: Context): DClass = + val extraModifiers = classDef.symbol.getExtraModifiers().filter(_ != Modifier.Sealed).filter(_ != Modifier.Abstract) + val companion = classDef.symbol.getCompanionSymbol.map(_.tree.asInstanceOf[ClassDef]).get + + val enumVals = companion.membersToDocument.collect { + case vd: ValDef if !isSyntheticField(vd.symbol) && vd.symbol.flags.is(Flags.Enum) && vd.symbol.flags.is(Flags.Case) => vd + }.toList.map(parseValDef(_)) + + val enumTypes = companion.membersToDocument.collect { + case td: TypeDef if !td.symbol.flags.is(Flags.Synthetic) && td.symbol.flags.is(Flags.Enum) && td.symbol.flags.is(Flags.Case) => td + }.toList.map(parseTypeDef) + + val enumNested = companion.membersToDocument.collect { + case c: ClassDef if c.symbol.flags.is(Flags.Case) && c.symbol.flags.is(Flags.Enum) => processTree(c)(parseClasslike(c)) + }.flatten + + val classlikie = DClass(classDef)(modifiers = extraModifiers, signatureOnly = signatureOnly) + classlikie.withNewMembers((enumVals ++ enumTypes ++ enumNested).map(_.withKind(Kind.EnumCase))).asInstanceOf[DClass] + + def parseMethod( + methodSymbol: Symbol, + emptyParamsList: Boolean = false, + paramPrefix: Symbol => String = _ => "", + kind: Kind = Kind.Def + ): DFunction = + val method = methodSymbol.tree.asInstanceOf[DefDef] + val paramLists = if emptyParamsList then Nil else method.paramss + val genericTypes = if (methodSymbol.isClassConstructor) Nil else method.typeParams + + val methodKind = + if methodSymbol.isClassConstructor then Kind.Constructor + else if methodSymbol.flags.is(Flags.Implicit) then extractImplicitConversion(method.returnTpt.tpe) match + case Some(conversion) if paramLists.size == 0 || (paramLists.size == 1 && paramLists.head.size == 0) => + Kind.Implicit(Kind.Def, Some(conversion)) + case _ => + Kind.Implicit(Kind.Def, None) + else kind + + val name = methodKind match + case Kind.Constructor => "this" + case Kind.Given(_, _) => methodSymbol.name.stripPrefix("given_") + case Kind.Extension(_) => methodSymbol.name.stripPrefix("extension_") + case _ => methodSymbol.name + + new DFunction( + methodSymbol.dri, + name, + /*isConstructor =*/ methodSymbol.isClassConstructor, + /*parameters =*/ paramLists.flatten.map(parseArgument(_, paramPrefix)).asJava, // TODO add support for parameters + /*documentation =*/ methodSymbol.documentation.asJava, + /*expectPresentInSet =*/ null, // unused + /*sources =*/ methodSymbol.source.asJava, + /*visibility =*/ placeholderVisibility, + /*type =*/ method.returnTpt.dokkaType, + /*generics =*/ genericTypes.map(parseTypeArgument).asJava, + /*receiver =*/ null, // Not used + /*modifier =*/ placeholderModifier, + sourceSet.toSet(), + /*isExpectActual =*/ false, + PropertyContainer.Companion.empty() + plus MethodExtension(paramLists.map(_.size)) + plus(MemberExtension( + methodSymbol.getVisibility(), + methodSymbol.getExtraModifiers(), + methodKind, + methodSymbol.getAnnotations(), + method.returnTpt.dokkaType.asSignature + )) + ) + + def parseArgument(argument: ValDef, prefix: Symbol => String, isExtendedSymbol: Boolean = false, isGrouped: Boolean = false): DParameter = + new DParameter( + argument.symbol.dri, + prefix(argument.symbol) + argument.symbol.name, + argument.symbol.documentation.asJava, + null, + argument.tpt.dokkaType, + sourceSet.toSet(), + PropertyContainer.Companion.empty() + .plus(ParameterExtension(isExtendedSymbol, isGrouped)) + .plus(MemberExtension.empty.copy(annotations = argument.symbol.getAnnotations())) + ) + + def parseTypeArgument(argument: TypeDef): DTypeParameter = + // Not sure if we should have such hacks... + val variancePrefix = + if argument.symbol.flags.is(Flags.Covariant) then "+" + else if argument.symbol.flags.is(Flags.Contravariant) then "-" + else "" + + new DTypeParameter( + Invariance(TypeParameter(argument.symbol.dri, variancePrefix + argument.symbol.name, null)), + argument.symbol.documentation.asJava, + null, + List(argument.rhs.dokkaType).asJava, + sourceSet.toSet(), + PropertyContainer.Companion.empty() + ) + + def parseTypeDef(typeDef: TypeDef): DProperty = + + def isTreeAbstract(typ: Tree): Boolean = typ match { + case TypeBoundsTree(_, _) => true + case LambdaTypeTree(params, body) => isTreeAbstract(body) + case _ => false + } + + + val (generics, tpeTree) = typeDef.rhs match + case LambdaTypeTree(params, body) => (params.map(parseTypeArgument), body) + case tpe => (Nil, tpe) + + new DProperty( + typeDef.symbol.dri, + typeDef.name, + /*documentation =*/ typeDef.symbol.documentation.asJava, + /*expectPresentInSet =*/ null, // unused + /*sources =*/ typeDef.symbol.source.asJava, + /*visibility =*/ placeholderVisibility, + /*type =*/ tpeTree.dokkaType, // TODO this may be hard... + /*receiver =*/ null, // Not used + /*setter =*/ null, + /*getter =*/ null, + /*modifier =*/ placeholderModifier, + sourceSet.toSet(), + /*generics =*/ generics.asJava, // TODO + /*isExpectActual =*/ false, + PropertyContainer.Companion.empty() plus MemberExtension( + typeDef.symbol.getVisibility(), + typeDef.symbol.getExtraModifiers(), + Kind.Type(!isTreeAbstract(typeDef.rhs), typeDef.symbol.isOpaque), + typeDef.symbol.getAnnotations(), + tpeTree.dokkaType.asSignature + ) + ) + + def parseValDef(valDef: ValDef): DProperty = + def givenInstance = Some(valDef.symbol.moduleClass) + .filter(_.exists) + .map(_.tree.asInstanceOf[ClassDef]) + .flatMap(_.getParents.headOption) + .map(_.dokkaType.asSignature) + + def defaultKind = if valDef.symbol.flags.is(Flags.Mutable) then Kind.Var else Kind.Val + val kind = + if valDef.symbol.isGiven then Kind.Given(givenInstance, extractImplicitConversion(valDef.tpt.tpe)) + else if valDef.symbol.flags.is(Flags.Implicit) then + Kind.Implicit(Kind.Val, extractImplicitConversion(valDef.tpt.tpe)) + else defaultKind + + new DProperty( + valDef.symbol.dri, + valDef.name, + /*documentation =*/ valDef.symbol.documentation.asJava, + /*expectPresentInSet =*/ null, // unused + /*sources =*/ valDef.symbol.source.asJava, + /*visibility =*/ placeholderVisibility, + /*type =*/ valDef.tpt.dokkaType, + /*receiver =*/ null, // Not used + /*setter =*/ null, + /*getter =*/ null, + /*modifier =*/ placeholderModifier, + sourceSet.toSet(), + /*generics =*/ Nil.asJava, + /*isExpectActual =*/ false, + PropertyContainer.Companion.empty().plus(MemberExtension( + valDef.symbol.getVisibility(), + valDef.symbol.getExtraModifiers(), + kind, + valDef.symbol.getAnnotations(), + valDef.tpt.tpe.dokkaType.asSignature + )) + ) + diff --git a/scala3doc/src/dotty/dokka/tasty/PackageSupport.scala b/scala3doc/src/dotty/dokka/tasty/PackageSupport.scala new file mode 100644 index 000000000000..932fefc0b1c3 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/PackageSupport.scala @@ -0,0 +1,53 @@ +package dotty.dokka.tasty + +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.properties._ +import org.jetbrains.dokka.model.doc.DocumentationNode +import dotty.dokka._ +import dotty.dokka.model.api.CompositeMemberExtension + +import collection.JavaConverters._ + +trait PackageSupport: + self: TastyParser => + import qctx.reflect._ + + def parsePackage(pck: PackageClause): DPackage = { + val name = extractPackageName(pck.pid.show) + val documentation = pck.symbol.documentation + DPackage( + new DRI(name, null, null, PointingToDeclaration.INSTANCE, null), + Nil.asJava, + Nil.asJava, + Nil.asJava, + Nil.asJava, + documentation.asJava, + null, + sourceSet.toSet, + PropertyContainer.Companion.empty() + ) + } + + def parsePackageObject(pckObj: ClassDef): DPackage = + parseClasslike(pckObj) match { + case clazz: DClass => + DPackage( + new DRI(pckObj.symbol.dri.getPackageName, null, null, PointingToDeclaration.INSTANCE, null), + clazz.getFunctions, + clazz.getProperties, + Nil.asJava, + Nil.asJava, + pckObj.symbol.documentation.asJava, + null, + sourceSet.toSet, + PropertyContainer.Companion.empty() + .plus(clazz.get(CompositeMemberExtension)) + ) + } + + + private def extractPackageName(pidShowNoColor: String): String = { + val pidSplit = pidShowNoColor.split("\\.") + pidSplit.mkString("",".","") + } diff --git a/scala3doc/src/dotty/dokka/tasty/ScalaDocSupport.scala b/scala3doc/src/dotty/dokka/tasty/ScalaDocSupport.scala new file mode 100644 index 000000000000..357e3404afce --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/ScalaDocSupport.scala @@ -0,0 +1,92 @@ +package dotty.dokka.tasty + +import scala.jdk.CollectionConverters._ +import scala.tasty.reflect._ + +import org.jetbrains.dokka.model.{doc => dkkd} + +import dotty.dokka.Args.CommentSyntax +import dotty.dokka.ScalaTagWrapper +import comments.{kt, dkk} + +trait ScaladocSupport { self: TastyParser => + import qctx.reflect._ + + def parseComment( + commentNode: Documentation, + tree: Tree + ): dkkd.DocumentationNode = { + val preparsed = + comments.Preparser.preparse(comments.Cleaner.clean(commentNode.raw)) + + val commentSyntax = + preparsed.syntax.headOption match { + case Some(commentSetting) => + CommentSyntax.fromString(commentSetting).getOrElse { + println(s"WARN: not a valid comment syntax: $commentSetting") + println(s"WARN: Defaulting to Markdown syntax.") + CommentSyntax.Markdown + } + case None => defaultCommentSyntax + } + + val parser = commentSyntax match { + case CommentSyntax.Wiki => + comments.WikiCommentParser(comments.Repr(qctx.reflect)(tree.symbol)) + case CommentSyntax.Markdown => + comments.MarkdownCommentParser(comments.Repr(qctx.reflect)(tree.symbol)) + } + val parsed = parser.parse(preparsed) + + import kotlin.collections.builders.{ListBuilder => KtListBuilder} + val bld = new KtListBuilder[dkkd.TagWrapper] + parsed.short match { + case Some(tag) => bld.add(dkkd.Description(tag)) + case None => bld.add(dkkd.Description(dkk.text(""))) + } + bld.add(dkkd.Description(parsed.body)) + + inline def addOpt(opt: Option[dkkd.DocTag])(wrap: dkkd.DocTag => dkkd.TagWrapper) = + opt.foreach { t => bld.add(wrap(t)) } + + inline def addSeq[T](seq: Iterable[T])(wrap: T => dkkd.TagWrapper) = + seq.foreach { t => bld.add(wrap(t)) } + + // this is a total kludge, this should be done in a deeper layer but we'd + // need to refactor code there first + def correctParagraphTags(tag: dkkd.DocTag): dkkd.DocTag = + tag match { + case tag: dkkd.P => + // NOTE we recurse once, since both the top-level element and its children can be P + // (there is no special root DocTag) + dkkd.Span(tag.getChildren.iterator.asScala.map(correctParagraphTags).toSeq.asJava, tag.getParams) + case tag => tag + } + + addSeq(parsed.authors)(dkkd.Author(_)) + addOpt(parsed.version)(dkkd.Version(_)) + addOpt(parsed.since)(dkkd.Since(_)) + addOpt(parsed.deprecated)(dkkd.Deprecated(_)) + addSeq(parsed.todo)(ScalaTagWrapper.Todo) + addSeq(parsed.see)(ScalaTagWrapper.See) + addSeq(parsed.note)(ScalaTagWrapper.Note) + addSeq(parsed.example)(ScalaTagWrapper.Example) + + addOpt(parsed.constructor)(dkkd.Constructor(_)) + addSeq(parsed.valueParams){ case (name, tag) => + ScalaTagWrapper.NestedNamedTag("Param", name, dkk.text(name), correctParagraphTags(tag)) + } + addSeq(parsed.typeParams){ case (name, tag) => + ScalaTagWrapper.NestedNamedTag("Type param", name, dkk.text(name), correctParagraphTags(tag)) + } + addSeq(parsed.throws){ case (key, (exc, desc)) => + ScalaTagWrapper.NestedNamedTag("Throws", key, exc, correctParagraphTags(desc)) + } + addOpt(parsed.result)(dkkd.Return(_)) + + new dkkd.DocumentationNode(bld.build()) + } + + private val defaultCommentSyntax = + self.config.docConfiguration.args.defaultSyntax getOrElse CommentSyntax.Markdown +} diff --git a/scala3doc/src/dotty/dokka/tasty/SymOps.scala b/scala3doc/src/dotty/dokka/tasty/SymOps.scala new file mode 100644 index 000000000000..c7803d776ab4 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/SymOps.scala @@ -0,0 +1,121 @@ +package dotty.dokka.tasty + +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model._ +import collection.JavaConverters._ +import dotty.dokka._ +import dotty.dokka.model.api.Visibility +import dotty.dokka.model.api.VisibilityScope +import dotty.dokka.model.api.Modifier +import scala.tasty.Reflection + +class SymOps[R <: Reflection](val r: R): + import r._ + + given R = r + extension (sym: r.Symbol): + def packageName(using ctx: Context): String = + if (sym.isPackageDef) sym.fullName + else sym.maybeOwner.packageName + + def topLevelEntryName(using ctx: Context): Option[String] = if (sym.isPackageDef) None else + if (sym.owner.isPackageDef) Some(sym.name) else sym.owner.topLevelEntryName + + def getVisibility(): Visibility = + import VisibilityScope._ + + def explicitScope(ownerType: TypeRepr): VisibilityScope = + val moduleSym = ownerType.typeSymbol.companionModule + if moduleSym.isNoSymbol + then ExplicitTypeScope(ownerType.typeSymbol.name) + else ExplicitModuleScope(moduleSym.name) + + def implicitScope(ownerSym: Symbol): VisibilityScope = + val moduleSym = ownerSym.companionModule + if moduleSym.isNoSymbol + then ImplicitTypeScope + else ImplicitModuleScope + + val visibilityFlags = (sym.flags.is(Flags.Private), sym.flags.is(Flags.Protected), sym.flags.is(Flags.Local)) + (sym.privateWithin, sym.protectedWithin, visibilityFlags) match + case (Some(owner), None, _) => Visibility.Private(explicitScope(owner)) + case (None, Some(owner), _) => Visibility.Protected(explicitScope(owner)) + case (None, None, (true, false, _)) => Visibility.Private(implicitScope(sym.owner)) + case (None, None, (false, true, true)) => Visibility.Protected(ThisScope) + case (None, None, (false, true, false)) => Visibility.Protected(implicitScope(sym.owner)) + case (None, None, (false, false, false)) => Visibility.Unrestricted + case _ => throw new Exception(s"Visibility for symbol $sym cannot be determined") + + // TODO: #49 Remove it after TASTY-Reflect release with published flag Extension + def hackIsOpen: Boolean = { + import dotty.tools.dotc + given dotc.core.Contexts.Context = r.rootContext.asInstanceOf + val symbol = sym.asInstanceOf[dotc.core.Symbols.Symbol] + symbol.is(dotc.core.Flags.Open) + } + + // Order here determines order in documenation + def getExtraModifiers(): Seq[Modifier] = Seq( + Flags.Final -> Modifier.Final, + Flags.Sealed -> Modifier.Sealed, + Flags.Erased -> Modifier.Erased, + Flags.Abstract -> Modifier.Abstract, + Flags.Implicit -> Modifier.Implicit, + Flags.Inline -> Modifier.Inline, + Flags.Lazy -> Modifier.Lazy, + Flags.Override -> Modifier.Override, + Flags.Case -> Modifier.Case, + ).collect { case (flag, mod) if sym.flags.is(flag) => mod } + ++ (if(sym.hackIsOpen) Seq(Modifier.Open) else Nil) + + def isHiddenByVisibility: Boolean = + import VisibilityScope._ + + getVisibility() match + case Visibility.Private(_) => true + case Visibility.Protected(ThisScope | ImplicitModuleScope | _: ExplicitModuleScope) => true + case _ => false + + def shouldDocumentClasslike: Boolean = !isHiddenByVisibility + && !sym.flags.is(Flags.Synthetic) + && (!sym.flags.is(Flags.Case) || !sym.flags.is(Flags.Enum)) + && !(sym.companionModule.flags.is(Flags.Given)) + + + def getCompanionSymbol: Option[Symbol] = Some(sym.companionClass).filter(_.exists) + + def isCompanionObject: Boolean = sym.flags.is(Flags.Object) && sym.companionClass.exists + + def isGiven: Boolean = sym.flags.is(Flags.Given) + + def isExtensionMethod: Boolean = sym.flags.is(Flags.ExtensionMethod) + + def isLeftAssoc(d: Symbol): Boolean = !d.name.endsWith(":") + + def extendedSymbol: Option[ValDef] = + Option.when(sym.isExtensionMethod)( + if(isLeftAssoc(sym)) sym.tree.asInstanceOf[DefDef].paramss(0)(0) + else sym.tree.asInstanceOf[DefDef].paramss(1)(0) + ) + + // TODO #22 make sure that DRIs are unique plus probably reuse semantic db code? + def dri: DRI = + if sym == Symbol.noSymbol then emptyDRI else if sym.isValDef && sym.moduleClass.exists then sym.moduleClass.dri else + val pointsTo = + if (!sym.isTypeDef) PointingToDeclaration.INSTANCE + else PointingToGenericParameters(sym.owner.typeMembers.indexOf(sym)) + + val method = + if (sym.isDefDef) Some(sym) + else if (sym.maybeOwner.isDefDef) Some(sym.owner) + else None + + new DRI( + sym.packageName, + sym.topLevelEntryName.orNull, // TODO do we need any of this fields? + method.map(s => new org.jetbrains.dokka.links.Callable(s.name, null, Nil.asJava)).orNull, + pointsTo, // TODO different targets? + s"${sym.show}/${sym.signature.resultSig}/[${sym.signature.paramSigs.mkString("/")}]" + ) + + private val emptyDRI = DRI.Companion.getTopLevel \ No newline at end of file diff --git a/scala3doc/src/dotty/dokka/tasty/SyntheticSupport.scala b/scala3doc/src/dotty/dokka/tasty/SyntheticSupport.scala new file mode 100644 index 000000000000..ad624c21f4ed --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/SyntheticSupport.scala @@ -0,0 +1,111 @@ +package dotty.dokka.tasty + +import scala.tasty.Reflection + +trait SyntheticsSupport: + self: TastyParser => + + import qctx.reflect._ + + extension (t: TypeRepr): + def isTupleType: Boolean = hackIsTupleType(qctx.reflect)(t) + + def isCompiletimeAppliedType: Boolean = hackIsCompiletimeAppliedType(qctx.reflect)(t) + + def hackIsTupleType(r: Reflection)(rtpe: r.TypeRepr): Boolean = + import dotty.tools.dotc + given ctx as dotc.core.Contexts.Context = r.rootContext.asInstanceOf + val tpe = rtpe.asInstanceOf[dotc.core.Types.Type] + ctx.definitions.isTupleType(tpe) + + def hackIsCompiletimeAppliedType(r: Reflection)(rtpe: r.TypeRepr): Boolean = + import dotty.tools.dotc + given ctx as dotc.core.Contexts.Context = r.rootContext.asInstanceOf + val tpe = rtpe.asInstanceOf[dotc.core.Types.Type] + ctx.definitions.isCompiletimeAppliedType(tpe.typeSymbol) + + extension (s: Symbol): + def isSyntheticFunc: Boolean = s.flags.is(Flags.Synthetic) || s.flags.is(Flags.FieldAccessor) || isDefaultHelperMethod + + def isSuperBridgeMethod: Boolean = s.name.contains("$super$") + + def isDefaultHelperMethod: Boolean = ".*\\$default\\$\\d+$".r.matches(s.name) + + def isOpaque: Boolean = hackIsOpaque(qctx.reflect)(s) + + def isInfix: Boolean = hackIsInfix(qctx.reflect)(s) + + def getAllMembers: List[Symbol] = hackGetAllMembers(qctx.reflect)(s) + + def isSyntheticField(c: Symbol) = + c.flags.is(Flags.CaseAccessor) || c.flags.is(Flags.Object) + + def isValidPos(pos: Position) = + pos.exists && pos.start != pos.end + + def constructorWithoutParamLists(c: ClassDef): Boolean = + !isValidPos(c.constructor.pos) || { + val end = c.constructor.pos.end + val typesEnd = c.constructor.typeParams.lastOption.fold(end - 1)(_.pos.end) + val classDefTree = c.constructor.show + c.constructor.typeParams.nonEmpty && end <= typesEnd + 1 + } + + // TODO: #49 Remove it after TASTY-Reflect release with published flag Extension + def hackIsInfix(r: Reflection)(rsym: r.Symbol): Boolean = { + import dotty.tools.dotc + given ctx as dotc.core.Contexts.Context = r.rootContext.asInstanceOf + val sym = rsym.asInstanceOf[dotc.core.Symbols.Symbol] + ctx.definitions.isInfix(sym) + } + /* We need there to filter out symbols with certain flagsets, because these symbols come from compiler and TASTY can't handle them well. + They are valdefs that describe case companion objects and cases from enum. + TASTY crashed when calling _.tree on them. + */ + def hackGetAllMembers(r: Reflection)(rsym: r.Symbol): List[r.Symbol] = { + import dotty.tools.dotc + given ctx as dotc.core.Contexts.Context = r.rootContext.asInstanceOf + val sym = rsym.asInstanceOf[dotc.core.Symbols.Symbol] + sym.typeRef.appliedTo(sym.typeParams.map(_.typeRef)).allMembers.iterator.map(_.symbol) + .collect { + case sym if + !sym.is(dotc.core.Flags.ModuleVal) && + !sym.flags.isAllOf(dotc.core.Flags.Enum | dotc.core.Flags.Case | dotc.core.Flags.JavaStatic) => + sym.asInstanceOf[r.Symbol] + }.toList + } + + def hackIsOpaque(r: Reflection)(rsym: r.Symbol): Boolean = { + import dotty.tools.dotc + given dotc.core.Contexts.Context = r.rootContext.asInstanceOf + val sym = rsym.asInstanceOf[dotc.core.Symbols.Symbol] + sym.is(dotc.core.Flags.Opaque) + } + + def hackGetSupertypes(r: Reflection)(rdef: r.ClassDef) = { + import dotty.tools.dotc + given dotc.core.Contexts.Context = r.rootContext.asInstanceOf + val classdef = rdef.asInstanceOf[dotc.ast.tpd.TypeDef] + val ref = classdef.symbol.info.asInstanceOf[dotc.core.Types.ClassInfo].appliedRef + val baseTypes: List[(dotc.core.Symbols.Symbol, dotc.core.Types.Type)] = + ref.baseClasses.map(b => b -> ref.baseType(b)) + baseTypes.asInstanceOf[List[(r.Symbol, r.TypeRepr)]] + } + + def getSupertypes(c: ClassDef) = hackGetSupertypes(qctx.reflect)(c).tail + + def typeForClass(c: ClassDef): r.TypeRepr = + import dotty.tools.dotc + given dotc.core.Contexts.Context = r.rootContext.asInstanceOf + val cSym = c.symbol.asInstanceOf[dotc.core.Symbols.Symbol] + cSym.typeRef.appliedTo(cSym.typeParams.map(_.typeRef)).asInstanceOf[r.TypeRepr] + + object MatchTypeCase: + def unapply(tpe: TypeRepr): Option[(TypeRepr, TypeRepr)] = + tpe match + case AppliedType(t, Seq(from, to)) /*if t == MatchCaseType*/ => + Some((from, to)) + case TypeLambda(paramNames, paramTypes, AppliedType(t, Seq(from, to))) /*if t == MatchCaseType*/ => + Some((from, to)) + case _ => + None diff --git a/scala3doc/src/dotty/dokka/tasty/TastyParser.scala b/scala3doc/src/dotty/dokka/tasty/TastyParser.scala new file mode 100644 index 000000000000..f92ec1c26b3b --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/TastyParser.scala @@ -0,0 +1,206 @@ +package dotty.dokka +package tasty + +import org.jetbrains.dokka.plugability._ +import org.jetbrains.dokka.transformers.sources._ + +import org.jetbrains.dokka.DokkaConfiguration +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.doc._ +import org.jetbrains.dokka.base.parsers._ +import org.jetbrains.dokka.plugability.DokkaContext +import collection.JavaConverters._ +import org.jetbrains.dokka.model.properties.PropertyContainer +import org.jetbrains.dokka.model.properties.PropertyContainerKt._ +import org.jetbrains.dokka.model.properties.{WithExtraProperties} +import java.util.{List => JList} + +import quoted.QuoteContext +import scala.tasty.inspector.TastyInspector +import dotty.dokka.model.api.withNewMembers +import com.virtuslab.dokka.site.SourceSetWrapper + +/** Responsible for collectively inspecting all the Tasty files we're interested in. + * + * Delegates most of the work to [[TastyParser]] [[dotty.dokka.tasty.TastyParser]]. + */ +case class DokkaTastyInspector(sourceSet: SourceSetWrapper, parser: Parser, config: DottyDokkaConfig) extends DokkaBaseTastyInspector with TastyInspector + +import dotty.tools.dotc.core.Contexts.{Context => DottyContext} +case class SbtDokkaTastyInspector( + sourceSet: SourceSetWrapper, + config: DottyDokkaConfig, + filesToDocument: List[String], + rootCtx: DottyContext, +) extends DokkaBaseTastyInspector: + self => + + import dotty.tools.dotc.Compiler + import dotty.tools.dotc.Driver + import dotty.tools.dotc.Run + import dotty.tools.dotc.core.Contexts.Context + import dotty.tools.dotc.core.Mode + import dotty.tools.dotc.core.Phases.Phase + import dotty.tools.dotc.fromtasty._ + import dotty.tools.dotc.quoted.QuoteContextImpl + + + val parser: Parser = null + + def run(): List[DPackage] = { + val driver = new InspectorDriver + driver.run(filesToDocument)(rootCtx) + result() + } + + class InspectorDriver extends Driver: + override protected def newCompiler(implicit ctx: Context): Compiler = new TastyFromClass + + def run(filesToDocument: List[String])(implicit ctx: Context): Unit = + doCompile(newCompiler, filesToDocument) + + end InspectorDriver + + class TastyFromClass extends TASTYCompiler: + + override protected def frontendPhases: List[List[Phase]] = + List(new ReadTasty) :: // Load classes from tasty + Nil + + override protected def picklerPhases: List[List[Phase]] = Nil + + override protected def transformPhases: List[List[Phase]] = Nil + + override protected def backendPhases: List[List[Phase]] = + List(new TastyInspectorPhase) :: // Print all loaded classes + Nil + + override def newRun(implicit ctx: Context): Run = + reset() + new TASTYRun(this, ctx.fresh.addMode(Mode.ReadPositions).addMode(Mode.ReadComments)) + + end TastyFromClass + + class TastyInspectorPhase extends Phase: + + override def phaseName: String = "tastyInspector" + + override def run(implicit ctx: Context): Unit = + val qctx = QuoteContextImpl() + self.processCompilationUnit(using qctx)(ctx.compilationUnit.tpdTree.asInstanceOf[qctx.tasty.Tree]) + + end TastyInspectorPhase + +end SbtDokkaTastyInspector + +trait DokkaBaseTastyInspector: + val sourceSet: SourceSetWrapper + val parser: Parser + val config: DottyDokkaConfig + + private val topLevels = Seq.newBuilder[Documentable] + + def processCompilationUnit(using qctx: QuoteContext)(root: qctx.reflect.Tree): Unit = + val parser = new TastyParser(qctx, this, config) + topLevels ++= parser.parseRootTree(root.asInstanceOf[parser.qctx.reflect.Tree]) + + def result(): List[DPackage] = + val all = topLevels.result() + val packages = all + .filter(_.isInstanceOf[DPackage]) + .map(_.asInstanceOf[DPackage]) + .groupBy(_.getDri) + .map((dri, pckgs) => + pckgs.reduce(_.mergeWith(_)) + ) + + val byPackage = all.filter(_.getDri != null).groupBy(_.getDri().getPackageName()) + byPackage.map { + case (pck, entries) => { + val found = packages.find(d => d.getName == pck) + .map( f => + new DPackage( + f.getDri, + f.getFunctions, + f.getProperties, + Nil.asJava, // TODO add support for other things like type or package object entries + Nil.asJava, + f.getDocumentation, + null, + sourceSet.toSet, + f.getExtra + ).withNewMembers(entries.filterNot(_.isInstanceOf[DPackage]).toList).asInstanceOf[DPackage] + ) + found.getOrElse(throw IllegalStateException("No package for entries found")) + } + }.toList + + extension (self: DPackage) def mergeWith(other: DPackage): DPackage = + val doc1 = self.getDocumentation.asScala.get(sourceSet.getSourceSet).map(_.getChildren).getOrElse(Nil.asJava) + val doc2 = other.getDocumentation.asScala.get(sourceSet.getSourceSet).map(_.getChildren).getOrElse(Nil.asJava) + mergeExtras( + DPackage( + self.getDri, + (self.getFunctions.asScala ++ other.getFunctions.asScala).asJava, + (self.getProperties.asScala ++ other.getProperties.asScala).asJava, + Nil.asJava, // WARNING Merging is done before collecting classlikes, if it changes it needs to be refactored + Nil.asJava, + sourceSet.asMap( + DocumentationNode( + ( + doc1.asScala ++ doc2.asScala + ).asJava + ) + ), + null, + sourceSet.toSet, + PropertyContainer.Companion.empty() + ), + self, + other + ) + +/** Parses a single Tasty compilation unit. */ +case class TastyParser(qctx: QuoteContext, inspector: DokkaBaseTastyInspector, config: DottyDokkaConfig) + extends ScaladocSupport with BasicSupport with TypesSupport with ClassLikeSupport with SyntheticsSupport with PackageSupport: + import qctx.reflect._ + + def sourceSet = inspector.sourceSet + + def processTree[T](tree: Tree)(op: => T): Option[T] = try Option(op) catch case e: Throwable => errorMsg(tree, tree.symbol.show, e) + def processTreeOpt[T](tree: Tree)(op: => Option[T]): Option[T] = try op catch case e: Throwable => errorMsg(tree, tree.symbol.show, e) + def processSymbol[T](sym: Symbol)(op: => T): Option[T] = try Option(op) catch case e: Throwable => errorMsg(sym, sym.show, e) + + private def errorMsg[T](a: Any, m: => String, e: Throwable): Option[T] = + val msg = try m catch case e: Throwable => a.toString + println(s"ERROR: tree is faling: msg") + e.printStackTrace() + throw e + + def parseRootTree(root: Tree): Seq[Documentable] = + val docs = Seq.newBuilder[Documentable] + object Traverser extends TreeTraverser: + var seen: List[Tree] = Nil + + override def traverseTree(tree: Tree)(using ctx: Context): Unit = + seen = tree :: seen + tree match { + case pck: PackageClause => + docs += parsePackage(pck) + super.traverseTree(tree) + case packageObject: ClassDef if(packageObject.symbol.name.contains("package$")) => + docs += parsePackageObject(packageObject) + case clazz: ClassDef if clazz.symbol.shouldDocumentClasslike => + docs += parseClasslike(clazz) + case _ => + } + seen = seen.tail + + try Traverser.traverseTree(root)(using qctx.reflect.rootContext) + catch case e: Throwable => + println(s"Problem parsing ${root.pos}, documentation may not be generated.") + e.printStackTrace() + + docs.result() + diff --git a/scala3doc/src/dotty/dokka/tasty/TypesSupport.scala b/scala3doc/src/dotty/dokka/tasty/TypesSupport.scala new file mode 100644 index 000000000000..24273e5962c9 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/TypesSupport.scala @@ -0,0 +1,260 @@ +package dotty.dokka.tasty + +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.model.{Projection => JProjection} +import collection.JavaConverters._ + +trait TypesSupport: + self: TastyParser => + import qctx.reflect._ + + def getGivenInstance(method: DefDef): Option[Bound] = { + def extractTypeSymbol(t: Tree): Option[Symbol] = t match + case tpeTree: TypeTree => + inner(tpeTree.tpe) + case other => None + + def inner(tpe: TypeRepr): Option[Symbol] = tpe match + case ThisType(tpe) => inner(tpe) + case AnnotatedType(tpe, _) => inner(tpe) + case AppliedType(tpe, _) => inner(tpe) + case tp @ TermRef(qual, typeName) => + qual match + case _: TypeRepr | _: NoPrefix => Some(tp.termSymbol) + case other => None + case tp @ TypeRef(qual, typeName) => + qual match + case _: TypeRepr | _: NoPrefix => Some(tp.typeSymbol) + case other => None + + val typeSymbol = extractTypeSymbol(method.returnTpt) + + typeSymbol.map(_.tree).collect { + case c: ClassDef => c.getParents.headOption + case _ => Some(method.returnTpt) + }.flatten.map(_.dokkaType) + } + + given TreeSyntax as AnyRef: + extension (tpeTree: Tree): + def dokkaType(using cxt: Context): Bound = + val data = tpeTree match + case TypeBoundsTree(low, high) => typeBound(low.tpe, low = true) ++ typeBound(high.tpe, low = false) + case tpeTree: TypeTree => inner(tpeTree.tpe) + case term: Term => inner(term.tpe) + + new GenericTypeConstructor(tpeTree.symbol.dri, data.asJava, null) + + given TypeSyntax as AnyRef: + extension (tpe: TypeRepr): + def dokkaType(using ctx: Context): Bound = + val data = inner(tpe) + val dri = data.collect{ + case o: TypeParameter => o + }.headOption.map(_.getDri).getOrElse(defn.AnyClass.dri) + new GenericTypeConstructor(dri, data.asJava, null) + + private def text(str: String): JProjection = new UnresolvedBound(str) + + private def texts(str: String): List[JProjection] = List(text(str)) + + + private def link(symbol: Symbol)(using cxt: Context): List[JProjection] = { + val suffix = if symbol.isValDef then texts(".type") else Nil + (new TypeParameter(symbol.dri, symbol.name, null)) :: suffix + } + + private def commas(lists: List[List[JProjection]]) = lists match + case List(single) => single + case other => other.reduce((r, e) => r ++ texts(", ") ++ e) + + private def isRepeated(tpeAnnotation: Term) = + // For some reason annotation.tpe.typeSymbol != defn.RepeatedParamClass + // annotation.tpe.typeSymbol prints 'class Repeated' and defn.RepeatedParamClass prints 'class <repeated>' + tpeAnnotation.tpe.typeSymbol.toString == "class Repeated" + + // TODO #23 add support for all types signatures that makes sense + private def inner(tp: TypeRepr)(using cxt: Context): List[JProjection] = + def noSupported(name: String): List[JProjection] = + println(s"WARN: Unsupported type: $name: ${tp.show}") + List(text(s"Unsupported[$name]")) + + tp match + case OrType(left, right) => inner(left) ++ texts(" | ") ++ inner(right) + case AndType(left, right) => inner(left) ++ texts(" & ") ++ inner(right) + case ByNameType(tpe) => text("=> ") :: inner(tpe) + case ConstantType(constant) => + texts(constant.value match + case c: Char => s"'$c'" + case other => other.toString + ) + case ThisType(tpe) => inner(tpe) + case AnnotatedType(AppliedType(_, Seq(tpe)), annotation) if isRepeated(annotation) => + inner(tpe) :+ text("*") + case AnnotatedType(tpe, _) => + inner(tpe) + case tl @ TypeLambda(params, paramBounds, resType) => + // println(params) + // println(paramBounds) + texts("[") ++ commas(params.zip(paramBounds).map( (name, typ) => texts(s"${name}") ++ inner(typ) )) ++ texts("]") + ++ texts(" =>> ") + ++ inner(resType) + + + case r: Refinement => { //(parent, name, info) + def getRefinementInformation(t: TypeRepr): List[TypeRepr] = t match { + case r: Refinement => getRefinementInformation(r.parent) :+ r + case tr: TypeRef => List(tr) + } + + def getParamBounds(t: PolyType): List[JProjection] = commas( + t.paramNames.zip(t.paramBounds.map(inner(_))) + .map(b => texts(b(0)) ++ b(1)) + ) + + def getParamList(m: MethodType): List[JProjection] = + texts("(") + ++ m.paramNames.zip(m.paramTypes).map{ case (name, tp) => texts(s"$name: ") ++ inner(tp)} + .reduceLeftOption((acc: List[JProjection], elem: List[JProjection]) => acc ++ texts(", ") ++ elem).getOrElse(List()) + ++ texts(")") + + def parseRefinedElem(name: String, info: TypeRepr, polyTyped: List[JProjection] = Nil): List[JProjection] = ( info match { + case m: MethodType => { + val paramList = getParamList(m) + texts(s"def $name") ++ polyTyped ++ paramList ++ texts(": ") ++ inner(m.resType) + } + case t: PolyType => { + val paramBounds = getParamBounds(t) + val parsedMethod = parseRefinedElem(name, t.resType) + if (!paramBounds.isEmpty){ + parseRefinedElem(name, t.resType, texts("[") ++ paramBounds ++ texts("]")) + } else parseRefinedElem(name, t.resType) + } + case ByNameType(tp) => texts(s"def $name: ") ++ inner(tp) + case t: TypeBounds => texts(s"type $name") ++ inner(t) + case t: TypeRef => texts(s"val $name: ") ++ inner(t) + case t: TermRef => texts(s"val $name: ") ++ inner(t) + case other => noSupported(s"Not supported type in refinement $info") + } ) ++ texts("; ") + + def parsePolyFunction(info: TypeRepr): List[JProjection] = info match { + case t: PolyType => + val paramBounds = getParamBounds(t) + val method = t.resType.asInstanceOf[MethodType] + val paramList = getParamList(method) + val resType = inner(method.resType) + texts("[") ++ paramBounds ++ texts("] => ") ++ paramList ++ texts(" => ") ++ resType + case other => noSupported(s"Not supported type in refinement $info") + } + val refinementInfo = getRefinementInformation(r) + val refinedType = refinementInfo.head + val refinedElems = refinementInfo.tail.collect{ case r: Refinement => r }.toList + val prefix = if refinedType.typeSymbol != defn.ObjectClass then inner(refinedType) ++ texts(" ") else List.empty[JProjection] + if (refinedType.typeSymbol.fullName == "scala.PolyFunction" && refinedElems.size == 1) { + parsePolyFunction(refinedElems.head.info) + } else { + prefix ++ texts("{ ") ++ refinedElems.flatMap(e => parseRefinedElem(e.name, e.info)) ++ texts(" }") + } + } + case t @ AppliedType(tpe, typeList) => + import scala.internal.Chars._ + if !t.typeSymbol.name.forall(isIdentifierPart) && typeList.size == 2 then + inner(typeList.head) + ++ texts(" ") + ++ inner(tpe) + ++ texts(" ") + ++ inner(typeList.last) + else if t.isFunctionType then + typeList match + case Nil => + Nil + case Seq(rtpe) => + text("() => ") :: inner(rtpe) + case Seq(arg, rtpe) => + inner(arg) ++ texts(" => ") ++ inner(rtpe) + case args => + texts("(") ++ commas(args.init.map(inner)) ++ texts(") => ") ++ inner(args.last) + else if t.isTupleType then + typeList match + case Nil => + Nil + case args => + texts("(") ++ commas(args.map(inner)) ++ texts(")") + else inner(tpe) ++ texts("[") ++ commas(typeList.map(inner)) ++ texts("]") + + case tp @ TypeRef(qual, typeName) => + qual match { + case r: RecursiveThis => texts(s"this.$typeName") + case _: TypeRepr | _: NoPrefix => link(tp.typeSymbol) + case other => noSupported(s"TypeRepr: $tp") + } + // convertTypeOrBoundsToReference(reflect)(qual) match { + // case TypeReference(label, link, xs, _) => TypeReference(typeName, link + "/" + label, xs, true) + // case EmptyReference => TypeReference(typeName, "", Nil, true) + // case _ if tp.typeSymbol.exists => + // tp.typeSymbol match { + // // NOTE: Only TypeRefs can reference ClassDefSymbols + // case sym if sym.isClassDef => //Need to be split because these types have their own file + // convertTypeOrBoundsToReference(reflect)(qual) match { + // case TypeReference(label, link, xs, _) => TypeReference(sym.name, link + "/" + label, xs, true) + // case EmptyReference if sym.name == "<root>" | sym.name == "_root_" => EmptyReference + // case EmptyReference => TypeReference(sym.name, "", Nil, true) + // case _ => throw Exception("Match error in SymRef/TypeOrBounds/ClassDef. This should not happen, please open an issue. " + convertTypeOrBoundsToReference(reflect)(qual)) + // } + + // // NOTE: This branch handles packages, which are now TypeRefs + // case sym if sym.isTerm || sym.isTypeDef => + // convertTypeOrBoundsToReference(reflect)(qual) match { + // case TypeReference(label, link, xs, _) => TypeReference(sym.name, link + "/" + label, xs) + // case EmptyReference if sym.name == "<root>" | sym.name == "_root_" => EmptyReference + // case EmptyReference => TypeReference(sym.name, "", Nil) + // case _ => throw Exception("Match error in SymRef/TypeOrBounds/Other. This should not happen, please open an issue. " + convertTypeOrBoundsToReference(reflect)(qual)) + // } + // case sym => throw Exception("Match error in SymRef. This should not happen, please open an issue. " + sym) + // } + // case _ => + // throw Exception("Match error in TypeRef. This should not happen, please open an issue. " + convertTypeOrBoundsToReference(reflect)(qual)) + // } + case tr @ TermRef(qual, typeName) => qual match { + case _ => link(tr.termSymbol) + } + // convertTypeOrBoundsToReference(reflect)(qual) match { + // case TypeReference(label, link, xs, _) => TypeReference(typeName + "$", link + "/" + label, xs) + // case EmptyReference => TypeReference(typeName, "", Nil) + // case _ => throw Exception("Match error in TermRef. This should not happen, please open an issue. " + convertTypeOrBoundsToReference(reflect)(qual)) + // } + + // NOTE: old SymRefs are now either TypeRefs or TermRefs - the logic here needs to be moved into above branches + // NOTE: _.symbol on *Ref returns its symbol + // case SymRef(symbol, typeOrBounds) => symbol match { + // } + // case _ => throw Exception("No match for type in conversion to Reference. This should not happen, please open an issue. " + tp) + case TypeBounds(low, hi) => + if(low == hi) texts(" = ") ++ inner(low) + else typeBound(low, low = true) ++ typeBound(hi, low = false) + + case NoPrefix() => Nil + + case MatchType(bond, sc, cases) => + val casesTexts = cases.flatMap { + case MatchTypeCase(from, to) => + texts(" case ") ++ inner(from) ++ texts(" => ") ++ inner(to) ++ texts("\n") + } + inner(sc) ++ texts(" match {\n") ++ casesTexts ++ texts("}") + + case TypeIdent(t) => texts(t) + + case ParamRef(TypeLambda(names, _, _), i) => texts(names.apply(i)) + + case RecursiveType(tp) => inner(tp) + + private def typeBound(t: TypeRepr, low: Boolean) = + val ignore = if(low) t.typeSymbol == defn.NothingClass else t.typeSymbol == defn.AnyClass + val prefix = text(if low then " >: " else " <: ") + t match { + case l: TypeLambda => prefix :: texts("(") ++ inner(l) ++ texts(")") + case p: ParamRef => prefix :: inner(p) + case other if !ignore => prefix :: inner(other) + case _ => Nil + } + diff --git a/scala3doc/src/dotty/dokka/tasty/comments/BaseConverter.scala b/scala3doc/src/dotty/dokka/tasty/comments/BaseConverter.scala new file mode 100644 index 000000000000..e028b4f08b6e --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/BaseConverter.scala @@ -0,0 +1,22 @@ +package dotty.dokka.tasty.comments + +import scala.jdk.CollectionConverters._ + +import org.jetbrains.dokka.model.{doc => dkkd} + +/** Quick'n'dirty class to remove some code duplication */ +trait BaseConverter { + + protected def withParsedQuery(queryStr: String)(thunk: Query => dkkd.DocTag): dkkd.DocTag = { + QueryParser(queryStr).tryReadQuery() match { + case Left(err) => + // TODO: for better experience we should show source location here + println("WARN: " + err.getMessage) + dkkd.A(List(dkk.text(err.getMessage)).asJava, Map("href" -> "#").asJava) + case Right(query) => + thunk(query) + } + } + + protected val SchemeUri = """[a-z]+:.*""".r +} diff --git a/scala3doc/src/dotty/dokka/tasty/comments/Cleaner.scala b/scala3doc/src/dotty/dokka/tasty/comments/Cleaner.scala new file mode 100644 index 000000000000..05e96a4bc939 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/Cleaner.scala @@ -0,0 +1,28 @@ +package dotty.dokka.tasty.comments + +object Cleaner { + import Regexes._ + import java.util.regex.Matcher + + /** Prepares the comment for pre-parsing: removes documentation markers and + * extra whitespace, removes dangerous HTML and Javadoc tags, and splits it + * into lines. + */ + def clean(comment: String): List[String] = { + def cleanLine(line: String): String = { + // Remove trailing whitespaces + TrailingWhitespace.replaceAllIn(line, "") match { + case CleanCommentLine(ctl) => ctl + case tl => tl + } + } + val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/") + val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) }) + val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) }) + val markedTagComment = + SafeTags.replaceAllIn(javadoclessComment, { mtch => + Matcher.quoteReplacement(s"$safeTagMarker${mtch.matched}$safeTagMarker") + }) + markedTagComment.linesIterator.toList map (cleanLine) + } +} diff --git a/scala3doc/src/dotty/dokka/tasty/comments/CommentRegex.scala b/scala3doc/src/dotty/dokka/tasty/comments/CommentRegex.scala new file mode 100644 index 000000000000..58cb1b391aed --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/CommentRegex.scala @@ -0,0 +1,82 @@ +package dotty.dokka.tasty.comments + +import scala.util.matching.Regex + +object Regexes { + val TrailingWhitespace = """\s+$""".r + + /** The body of a line, dropping the (optional) start star-marker, + * one leading whitespace and all trailing whitespace + */ + val CleanCommentLine = + new Regex("""(?:\s*\*\s?\s?)?(.*)""") + + /** Dangerous HTML tags that should be replaced by something safer, + * such as wiki syntax, or that should be dropped + */ + val DangerousTags = + new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|<!--.*-->""") + + /** Javadoc tags that should be replaced by something useful, such as wiki + * syntax, or that should be dropped. */ + val JavadocTags = + new Regex("""\{\@(code|docRoot|linkplain|link|literal|value)\p{Zs}*([^}]*)\}""") + + /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */ + def javadocReplacement(mtch: Regex.Match): String = { + mtch.group(1) match { + case "code" => "<code>" + mtch.group(2) + "</code>" + case "docRoot" => "" + case "link" => "`[[" + mtch.group(2) + "]]`" + case "linkplain" => "[[" + mtch.group(2) + "]]" + case "literal" => "`" + mtch.group(2) + "`" + case "value" => "`" + mtch.group(2) + "`" + case _ => "" + } + } + + /** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string + * if it cannot be salvaged. */ + def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match { + case "p" | "div" => "\n\n" + case "h1" => "\n= " + case "/h1" => " =\n" + case "h2" => "\n== " + case "/h2" => " ==\n" + case "h3" => "\n=== " + case "/h3" => " ===\n" + case "h4" | "h5" | "h6" => "\n==== " + case "/h4" | "/h5" | "/h6" => " ====\n" + case "li" => "\n * - " + case _ => "" + } + + /** Safe HTML tags that can be kept. */ + val SafeTags = + new Regex("""((&\w+;)|(&#\d+;)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|code|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""") + + val safeTagMarker = '\u000E' + val endOfLine = '\u000A' + val endOfText = '\u0003' + + /** A Scaladoc tag not linked to a symbol and not followed by text */ + val SingleTagRegex = + new Regex("""\s*@(\S+)\s*""") + + /** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */ + val SimpleTagRegex = + new Regex("""\s*@(\S+)\s+(.*)""") + + /** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name + * of the symbol, and the rest of the line. */ + val SymbolTagRegex = + new Regex("""\s*@(param|tparam|throws|groupdesc|groupname|groupprio)\s+(\S*)\s*(.*)""") + + /** The start of a Scaladoc code block */ + val CodeBlockStartRegex = + new Regex("""(.*?)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""") + + /** The end of a Scaladoc code block */ + val CodeBlockEndRegex = + new Regex("""(.*?)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""") +} diff --git a/scala3doc/src/dotty/dokka/tasty/comments/Comments.scala b/scala3doc/src/dotty/dokka/tasty/comments/Comments.scala new file mode 100644 index 000000000000..cfccb9e8a982 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/Comments.scala @@ -0,0 +1,172 @@ +package dotty.dokka.tasty.comments + +import scala.collection.immutable.SortedMap + +import org.jetbrains.dokka.model.{doc => dkkd} + +import com.vladsch.flexmark.util.{ast => mdu} +import com.vladsch.flexmark.formatter.Formatter +import com.vladsch.flexmark.util.options.MutableDataSet + +import scala.tasty.Reflection +class Repr(val r: Reflection)(val sym: r.Symbol) + +case class Comment ( + body: dkkd.DocTag, + short: Option[dkkd.DocTag], + authors: List[dkkd.DocTag], + see: List[dkkd.DocTag], + result: Option[dkkd.DocTag], + throws: SortedMap[String, (dkkd.DocTag, dkkd.DocTag)], + valueParams: SortedMap[String, dkkd.DocTag], + typeParams: SortedMap[String, dkkd.DocTag], + version: Option[dkkd.DocTag], + since: Option[dkkd.DocTag], + todo: List[dkkd.DocTag], + deprecated: Option[dkkd.DocTag], + note: List[dkkd.DocTag], + example: List[dkkd.DocTag], + constructor: Option[dkkd.DocTag], + group: Option[dkkd.DocTag], + // see comment in PreparsedComment below regarding these + groupDesc: SortedMap[String, dkkd.DocTag], + groupNames: SortedMap[String, dkkd.DocTag], + groupPrio: SortedMap[String, dkkd.DocTag], + /** List of conversions to hide - containing e.g: `scala.Predef.FloatArrayOps` */ + hideImplicitConversions: List[dkkd.DocTag] +) + +case class PreparsedComment ( + body: String, + authors: List[String], + see: List[String], + result: List[String], + throws: SortedMap[String, String], + valueParams: SortedMap[String, String], + typeParams: SortedMap[String, String], + version: List[String], + since: List[String], + todo: List[String], + deprecated: List[String], + note: List[String], + example: List[String], + constructor: List[String], + group: List[String], + // NOTE these don't need to be sorted in principle, but code is nicer if they are + groupDesc: SortedMap[String, String], + groupNames: SortedMap[String, String], + groupPrio: SortedMap[String, String], + hideImplicitConversions: List[String], + shortDescription: List[String], + syntax: List[String], +) + +case class DokkaCommentBody(summary: Option[dkkd.DocTag], body: dkkd.DocTag) + +trait MarkupConversion[T] { + protected def linkedExceptions(m: SortedMap[String, String]): SortedMap[String, (dkkd.DocTag, dkkd.DocTag)] + protected def stringToMarkup(str: String): T + protected def markupToDokka(t: T): dkkd.DocTag + protected def markupToDokkaCommentBody(t: T): DokkaCommentBody + protected def filterEmpty(xs: List[String]): List[T] + protected def filterEmpty(xs: SortedMap[String, String]): SortedMap[String, T] + + private def single(annot: String, xs: List[String], filter: Boolean = true): Option[T] = + (if (filter) filterEmpty(xs) else xs.map(stringToMarkup)) match { + case x :: xs => + Some(x) + case _ => None + } + + final def parse(preparsed: PreparsedComment): Comment = + val body = markupToDokkaCommentBody(stringToMarkup(preparsed.body)) + Comment( + body = body.body, + short = body.summary, + authors = filterEmpty(preparsed.authors).map(markupToDokka), + see = filterEmpty(preparsed.see).map(markupToDokka), + result = single("@result", preparsed.result).map(markupToDokka), + throws = linkedExceptions(preparsed.throws), + valueParams = filterEmpty(preparsed.valueParams).view.mapValues(markupToDokka).to(SortedMap), + typeParams = filterEmpty(preparsed.typeParams).view.mapValues(markupToDokka).to(SortedMap), + version = single("@version", preparsed.version).map(markupToDokka), + since = single("@since", preparsed.since).map(markupToDokka), + todo = filterEmpty(preparsed.todo).map(markupToDokka), + deprecated = single("@deprecated", preparsed.deprecated, filter = false).map(markupToDokka), + note = filterEmpty(preparsed.note).map(markupToDokka), + example = filterEmpty(preparsed.example).map(markupToDokka), + constructor = single("@constructor", preparsed.constructor).map(markupToDokka), + group = single("@group", preparsed.group).map(markupToDokka), + groupDesc = filterEmpty(preparsed.groupDesc).view.mapValues(markupToDokka).to(SortedMap), + groupNames = filterEmpty(preparsed.groupNames).view.mapValues(markupToDokka).to(SortedMap), + groupPrio = filterEmpty(preparsed.groupPrio).view.mapValues(markupToDokka).to(SortedMap), + hideImplicitConversions = filterEmpty(preparsed.hideImplicitConversions).map(markupToDokka) + ) +} + +class MarkdownCommentParser(repr: Repr) + extends MarkupConversion[mdu.Document] { + + def stringToMarkup(str: String) = + MarkdownParser.parseToMarkdown(str) + + def markupToDokka(md: mdu.Document) = + MarkdownConverter(repr).convertDocument(md) + + def markupToDokkaCommentBody(md: mdu.Document) = + val converter = MarkdownConverter(repr) + DokkaCommentBody( + summary = converter.extractAndConvertSummary(md), + body = converter.convertDocument(md), + ) + + def linkedExceptions(m: SortedMap[String, String]) = { + val c = MarkdownConverter(repr) + m.map { case (targetStr, body) => + targetStr -> (c.resolveLinkQuery(targetStr, ""), dkk.text(body)) + } + } + + def filterEmpty(xs: List[String]) = { + xs.map(_.trim) + .filterNot(_.isEmpty) + .map(stringToMarkup) + } + + def filterEmpty(xs: SortedMap[String,String]) = + xs.view.mapValues(_.trim) + .filterNot { case (_, v) => v.isEmpty } + .mapValues(stringToMarkup).to(SortedMap) +} + +case class WikiCommentParser(repr: Repr) + extends MarkupConversion[wiki.Body] { + + def stringToMarkup(str: String) = + wiki.Parser(str).document() + + def markupToDokka(body: wiki.Body) = + wiki.Converter(repr).convertBody(body) + + def markupToDokkaCommentBody(body: wiki.Body) = + val converter = wiki.Converter(repr) + DokkaCommentBody( + summary = body.summary.map(converter.convertBody), + body = converter.convertBody(body), + ) + + def linkedExceptions(m: SortedMap[String, String]) = { + m.map { case (targetStr, body) => + val c = wiki.Converter(repr) + targetStr -> (c.resolveLinkQuery(targetStr, None), c.convertBody(stringToMarkup(body))) + } + } + + def filterEmpty(xs: List[String]) = + xs.map(stringToMarkup) + + def filterEmpty(xs: SortedMap[String,String]) = + xs.view.mapValues(stringToMarkup).to(SortedMap) + .filterNot { case (_, v) => v.blocks.isEmpty } + +} diff --git a/scala3doc/src/dotty/dokka/tasty/comments/Emitter.scala b/scala3doc/src/dotty/dokka/tasty/comments/Emitter.scala new file mode 100644 index 000000000000..2b7ea003f0b2 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/Emitter.scala @@ -0,0 +1,18 @@ +package dotty.dokka.tasty.comments + +import scala.collection.mutable.ArrayBuffer + +object Emitter { + opaque type Emitter[T] = ArrayBuffer[T] + + def collect[T](thunk: Emitter[T] ?=> Unit): Seq[T] = { + val bld = new ArrayBuffer[T] + thunk(using bld) + bld.toSeq + } + + def emit[T](using e: Emitter[T])(t: T) = e.addOne(t) + + def lastEmittedItem[T](using e: Emitter[T]) = + if e.isEmpty then None else Some(e.last) +} diff --git a/scala3doc/src/dotty/dokka/tasty/comments/MarkdownConverter.scala b/scala3doc/src/dotty/dokka/tasty/comments/MarkdownConverter.scala new file mode 100644 index 000000000000..35b5450a5808 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/MarkdownConverter.scala @@ -0,0 +1,205 @@ +package dotty.dokka.tasty.comments + +import scala.jdk.CollectionConverters._ +import scala.tasty.Reflection + +import org.jetbrains.dokka.model.{doc => dkkd} +import com.vladsch.flexmark.{ast => mda} +import com.vladsch.flexmark.util.{ast => mdu} +import com.vladsch.flexmark.ext.gfm.{tables => mdt} +import com.vladsch.flexmark.ext.{wikilink => mdw} + +import dotty.dokka.tasty.SymOps + +class MarkdownConverter(val repr: Repr) extends BaseConverter { + import Emitter._ + + // makeshift support for not passing an owner + // see same in wiki.Converter + val r: repr.r.type = if repr == null then null else repr.r + val owner: r.Symbol = if repr == null then null.asInstanceOf[r.Symbol] else repr.sym + + object SymOps extends SymOps[r.type](r) + import SymOps._ + + def convertDocument(doc: mdu.Document): dkkd.DocTag = { + val res = collect { + doc.getChildIterator.asScala.foreach(emitConvertedNode) + } + + dkkd.P(res.asJava, kt.emptyMap) + } + + def convertChildren(n: mdu.Node): Seq[dkkd.DocTag] = + collect { + n.getChildIterator.asScala.foreach(emitConvertedNode) + } + + def emitConvertedNode(n: mdu.Node)(using Emitter[dkkd.DocTag]): Unit = n match { + case n: mda.Paragraph => + emit(dkkd.P(convertChildren(n).asJava, kt.emptyMap)) + + case n: mda.Heading => emit(n.getLevel match { + case 1 => dkkd.H1(List(dkk.text(n.getText().toString)).asJava, kt.emptyMap) + case 2 => dkkd.H2(List(dkk.text(n.getText().toString)).asJava, kt.emptyMap) + case 3 => dkkd.H3(List(dkk.text(n.getText().toString)).asJava, kt.emptyMap) + case 4 => dkkd.H4(List(dkk.text(n.getText().toString)).asJava, kt.emptyMap) + case 5 => dkkd.H5(List(dkk.text(n.getText().toString)).asJava, kt.emptyMap) + case 6 => dkkd.H6(List(dkk.text(n.getText().toString)).asJava, kt.emptyMap) + }) + + case n: mda.Text => emit(dkk.text(n.getChars.toString)) + case n: mda.TextBase => + // TextBase is a wrapper for other nodes that for unclear reasons + // sometimes gets emitted (`AutoLink`s seem to be involved) + n.getChildren.asScala.foreach(n => emitConvertedNode(n)) + + // case n: mda.HtmlInline => dkkd.Br.INSTANCE + case n: mda.Emphasis => + // TODO doesn't actually show up in output, why? + emit(n.getOpeningMarker.toString match { + case "*" => dkkd.B(convertChildren(n).asJava, kt.emptyMap) + case "_" => dkkd.I(convertChildren(n).asJava, kt.emptyMap) + }) + + case n: mda.StrongEmphasis => + // TODO doesn't actually show up in output, why? + // TODO distinguish between strong and regular emphasis? + emit(n.getOpeningMarker.toString match { + case "**" => dkkd.B(convertChildren(n).asJava, kt.emptyMap) + case "__" => dkkd.I(convertChildren(n).asJava, kt.emptyMap) + }) + + case n: mda.AutoLink => + val url = n.getUrl.toString + emit(dkkd.A(List(dkk.text(url)).asJava, Map("href" -> url).asJava)) + + case n: mda.Link => + val body: String = n.getText.toString + val target: String = n.getUrl.toString + def resolveBody(default: String) = + val resolved = if !body.isEmpty then body else default + List(dkk.text(resolved)).asJava + + emit(dkkd.A(resolveBody(default = target), Map("href" -> target).asJava)) + + case n: mdw.WikiLink => + val (target, body) = + val chars = n.getChars.toString.substring(2, n.getChars.length - 2) + MarkdownConverter.splitWikiLink(chars) + + def resolveBody(default: String) = + val resolved = if !body.isEmpty then body else default + List(dkk.text(resolved)).asJava + + emit(target match { + case SchemeUri() => + dkkd.A(resolveBody(default = target), Map("href" -> target).asJava) + case _ => + resolveLinkQuery(target, body) + }) + + case n: mda.Code => + emit(dkkd.CodeInline(convertChildren(n).asJava, kt.emptyMap)) + case n: mda.IndentedCodeBlock => + val bld = new StringBuilder + n.getContentLines.asScala.foreach(bld append _) + emit(dkkd.CodeBlock(List(dkk.text(bld.toString)).asJava, kt.emptyMap)) + case n: mda.FencedCodeBlock => + // n.getInfo - where to stick this? + emit(dkkd.CodeBlock(convertChildren(n).asJava, kt.emptyMap)) + + case n: mda.ListBlock => + val c = convertChildren(n).asJava + emit(n match { + case _: mda.OrderedList => dkkd.Ol(c, kt.emptyMap) + case _ => dkkd.Ul(c, kt.emptyMap) + }) + case n: mda.ListItem => + emit(dkkd.Li(convertChildren(n).asJava, kt.emptyMap)) + + case n: mda.BlockQuote => + emit(dkkd.BlockQuote(convertChildren(n).asJava, kt.emptyMap)) + + case n: mdt.TableBlock => + // the structure is: + // TableBlock { + // TableHeader { + // TableRow { + // TableCell { ... } + // TableCell { ... } + // } + // } + // TableSeparator { TableRow { ... } } + // TableBody { TableRow { ... } ... } + // } + val header = + n.getFirstChild.getChildIterator.asScala.map { nn => + dkkd.Tr( + nn.getChildIterator.asScala.map { nnn => + dkkd.Th(convertChildren(nnn).asJava, kt.emptyMap) + }.toSeq.asJava, + kt.emptyMap + ) + } + + val body = + n.getChildIterator.asScala.drop(2).next.getChildIterator.asScala.map { nn => + dkkd.Tr( + nn.getChildIterator.asScala.map { nnn => + dkkd.Td(convertChildren(nnn).asJava, kt.emptyMap) + }.toSeq.asJava, + kt.emptyMap + ) + } + + emit(dkkd.Table( + (header ++ body).toSeq.asJava, + kt.emptyMap + )) + + case _: mda.SoftLineBreak => emit(dkkd.Br.INSTANCE) + + case _ => + println(s"!!! DEFAULTING @ ${n.getNodeName}") + emit(dkkd.P( + List[dkkd.DocTag]( + dkkd.Span( + List(dkk.text(s"!!! DEFAULTING @ ${n.getNodeName}")).asJava, + kt.emptyMap, + ), + dkk.text(MarkdownParser.renderToText(n)) + ).asJava, + kt.emptyMap[String, String] + )) + } + + def extractAndConvertSummary(doc: mdu.Document): Option[dkkd.DocTag] = + doc.getChildIterator.asScala.collectFirst { case p: mda.Paragraph => + dkkd.P(convertChildren(p).asJava, kt.emptyMap) + } + + def resolveLinkQuery(queryStr: String, body: String): dkkd.DocTag = { + def resolveBody(default: String) = + val resolved = if !body.isEmpty then body else default + List(dkk.text(resolved)).asJava + + withParsedQuery(queryStr) { query => + MemberLookup.lookup(using r)(query, owner) match { + case Some((sym, targetText)) => + dkkd.DocumentationLink(sym.dri, resolveBody(default = targetText), kt.emptyMap) + case None => + dkkd.A(resolveBody(default = query.join), Map("href" -> "#").asJava) + } + } + } +} + +object MarkdownConverter { + def splitWikiLink(chars: String): (String, String) = + // split on a space which is not backslash escaped (regex uses "zero-width negative lookbehind") + chars.split("(?<!\\\\) ", /*max*/ 2) match { + case Array(target) => (target, "") + case Array(target, userText) => (target, userText) + } +} diff --git a/scala3doc/src/dotty/dokka/tasty/comments/MarkdownParser.scala b/scala3doc/src/dotty/dokka/tasty/comments/MarkdownParser.scala new file mode 100644 index 000000000000..92915a86910d --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/MarkdownParser.scala @@ -0,0 +1,48 @@ +package dotty.dokka.tasty.comments + +import java.util.{ Arrays } + +import com.vladsch.flexmark.util.{ast => mdu} +import com.vladsch.flexmark.formatter.Formatter +import com.vladsch.flexmark.parser.Parser +import com.vladsch.flexmark.util.sequence.CharSubSequence +import com.vladsch.flexmark.parser.ParserEmulationProfile +import com.vladsch.flexmark.ext.gfm.tables.TablesExtension +import com.vladsch.flexmark.ext.gfm.strikethrough.StrikethroughExtension +import com.vladsch.flexmark.ext.gfm.tasklist.TaskListExtension +import com.vladsch.flexmark.ext.emoji.EmojiExtension +import com.vladsch.flexmark.ext.autolink.AutolinkExtension +import com.vladsch.flexmark.ext.anchorlink.AnchorLinkExtension +import com.vladsch.flexmark.ext.yaml.front.matter.YamlFrontMatterExtension +import com.vladsch.flexmark.ext.wikilink.WikiLinkExtension +import com.vladsch.flexmark.util.options.{ DataHolder, MutableDataSet } + +object MarkdownParser { + + val markdownOptions: DataHolder = + new MutableDataSet() + .setFrom(ParserEmulationProfile.KRAMDOWN.getOptions) + .set(Parser.EXTENSIONS, Arrays.asList( + TablesExtension.create(), + TaskListExtension.create(), + AutolinkExtension.create(), + AnchorLinkExtension.create(), + EmojiExtension.create(), + YamlFrontMatterExtension.create(), + StrikethroughExtension.create(), + WikiLinkExtension.create(), + )) + .set(EmojiExtension.ROOT_IMAGE_PATH, + "https://github.global.ssl.fastly.net/images/icons/emoji/") + .set(WikiLinkExtension.LINK_ESCAPE_CHARS, "") + + val RENDERER = Formatter.builder(markdownOptions).build() + + def parseToMarkdown(text: String): mdu.Document = + Parser.builder(markdownOptions) + .build.parse(text).asInstanceOf[mdu.Document] + + + def renderToText(node: mdu.Node): String = + RENDERER.render(node) +} diff --git a/scala3doc/src/dotty/dokka/tasty/comments/MemberLookup.scala b/scala3doc/src/dotty/dokka/tasty/comments/MemberLookup.scala new file mode 100644 index 000000000000..85b77eb378a7 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/MemberLookup.scala @@ -0,0 +1,126 @@ +package dotty.dokka.tasty.comments + +import scala.tasty.Reflection + +trait MemberLookup { + + def lookup(using r: Reflection)( + query: Query, + owner: r.Symbol, + ): Option[(r.Symbol, String)] = lookupOpt(query, Some(owner)) + + def lookupOpt(using r: Reflection)( + query: Query, + ownerOpt: Option[r.Symbol], + ): Option[(r.Symbol, String)] = { + + def nearestClass(sym: r.Symbol): r.Symbol = + if sym.isClassDef then sym else nearestClass(sym.owner) + + def nearestPackage(sym: r.Symbol): r.Symbol = + if sym.flags.is(r.Flags.Package) then sym else nearestPackage(sym.owner) + + def nearestMembered(sym: r.Symbol): r.Symbol = + if sym.isClassDef || sym.flags.is(r.Flags.Package) then sym else nearestMembered(sym.owner) + + val res = + ownerOpt match { + case Some(owner) => + val nearest = nearestMembered(owner) + val nearestCls = nearestClass(owner) + val nearestPkg = nearestPackage(owner) + query match { + case Query.StrictMemberId(id) => localLookup(id, nearest).map(_ -> id) + case Query.Id(id) => + (localLookup(id, nearest) orElse localLookup(id, nearestPkg)).map(_ -> id) + case Query.QualifiedId(Query.Qual.This, _, rest) => + downwardLookup(rest.asList, nearestCls).map(_ -> rest.join) + case Query.QualifiedId(Query.Qual.Package, _, rest) => + downwardLookup(rest.asList, nearestPkg).map(_ -> rest.join) + case Query.QualifiedId(Query.Qual.Id(id), _, rest) if id == nearestCls.name => + downwardLookup(rest.asList, nearestCls).map(_ -> rest.join) + case Query.QualifiedId(Query.Qual.Id(id), _, rest) if id == nearestPkg.name => + downwardLookup(rest.asList, nearestPkg).map(_ -> rest.join) + case query: Query.QualifiedId => downwardLookup(query.asList, r.defn.RootPackage).map(_ -> query.join) + } + + case None => + downwardLookup(query.asList, r.defn.RootPackage).map(_ -> query.join) + } + + // println(s"looked up `$query` in ${owner.show}[${owner.flags.show}] as ${res.map(_.show)}") + + res + } + + private def hackMembersOf(using r: Reflection)(rsym: r.Symbol) = { + import dotty.tools.dotc + given dotc.core.Contexts.Context = r.rootContext.asInstanceOf + val sym = rsym.asInstanceOf[dotc.core.Symbols.Symbol] + val members = sym.info.decls.iterator + // println(s"members of ${sym.show} : ${members.map(_.show).mkString(", ")}") + members.asInstanceOf[Iterator[r.Symbol]] + } + + private def localLookup(using r: Reflection)(query: String, owner: r.Symbol): Option[r.Symbol] = { + import r._ + + inline def whenExists(s: Symbol)(otherwise: => Option[r.Symbol]): Option[r.Symbol] = + if s.exists then Some(s) else otherwise + + def findMatch(syms: Iterator[r.Symbol]): Option[r.Symbol] = { + // Scaladoc overloading support allows terminal * (and they're meaningless) + val cleanQuery = query.stripSuffix("*") + val (q, forceTerm, forceType) = + if cleanQuery endsWith "$" then + (cleanQuery.init, true, false) + else if cleanQuery endsWith "!" then + (cleanQuery.init, false, true) + else + (cleanQuery, false, false) + + def matches(s: r.Symbol): Boolean = + s.name == q && ( + if forceTerm then s.isTerm + else if forceType then s.isType + else true + ) + + def hackResolveModule(s: r.Symbol): r.Symbol = + if s.flags.is(Flags.Object) then s.moduleClass else s + + val matched = syms.find(matches) + + // def showMatched() = matched.foreach { s => + // println(s">>> ${s.show}") + // println(s">>> ${s.pos}") + // println(s">>> [${s.flags.show}]") + // println(s">>> {${if s.isTerm then "isterm" else ""};${if s.isType then "istype" else ""}}") + // println(s">>> moduleClass = ${if hackResolveModule(s) == s then hackResolveModule(s).show else "none"}") + // } + // println(s"localLookup for class ${owner.show} of `$q`{forceTerm=$forceTerm}") + // showMatched() + + matched.map(hackResolveModule) + } + + if owner.isPackageDef then + findMatch(hackMembersOf(owner)) + else + owner.tree match { + case tree: r.ClassDef => + findMatch(tree.body.iterator.collect { case t: r.Definition => t.symbol }) + case _ => + findMatch(hackMembersOf(owner)) + } + } + + private def downwardLookup(using r: Reflection)(query: List[String], owner: r.Symbol): Option[r.Symbol] = + query match { + case Nil => None + case q :: Nil => localLookup(q, owner) + case q :: qs => localLookup(q, owner).flatMap(downwardLookup(qs, _)) + } +} + +object MemberLookup extends MemberLookup diff --git a/scala3doc/src/dotty/dokka/tasty/comments/Preparser.scala b/scala3doc/src/dotty/dokka/tasty/comments/Preparser.scala new file mode 100644 index 000000000000..1cad1828c4e7 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/Preparser.scala @@ -0,0 +1,197 @@ +package dotty.dokka.tasty.comments + +import scala.collection.mutable +import scala.collection.immutable.SortedMap +import scala.util.matching.Regex + +object Preparser { + import Regexes._ + + /** Parses a raw comment string into a `Comment` object. */ + def preparse( + comment: List[String], + ): PreparsedComment = { + + /** Parses a comment (in the form of a list of lines) to a `Comment` + * instance, recursively on lines. To do so, it splits the whole comment + * into main body and tag bodies, then runs the `WikiParser` on each body + * before creating the comment instance. + * + * @param docBody The body of the comment parsed until now. + * @param tags All tags parsed until now. + * @param lastTagKey The last parsed tag, or `None` if the tag section + * hasn't started. Lines that are not tagged are part + * of the previous tag or, if none exists, of the body. + * @param remaining The lines that must still recursively be parsed. + * @param inCodeBlock Whether the next line is part of a code block (in + * which no tags must be read). + */ + def go( + docBody: StringBuilder, + tags: Map[TagKey, List[String]], + lastTagKey: Option[TagKey], + remaining: List[String], + inCodeBlock: Boolean + ): PreparsedComment = remaining match { + case CodeBlockStartRegex(before, marker, after) :: ls if !inCodeBlock => + if (!before.trim.isEmpty && !after.trim.isEmpty) + go(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = false) + else if (!before.trim.isEmpty) + go(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = false) + else if (!after.trim.isEmpty) + go(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = true) + else lastTagKey match { + case Some(key) => + val value = + ((tags get key): @unchecked) match { + case Some(b :: bs) => (b + endOfLine + marker) :: bs + case None => oops("lastTagKey set when no tag exists for key") + } + go(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = true) + case None => + go(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = true) + } + + case CodeBlockEndRegex(before, marker, after) :: ls => + if (!before.trim.isEmpty && !after.trim.isEmpty) + go(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = true) + if (!before.trim.isEmpty) + go(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = true) + else if (!after.trim.isEmpty) + go(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = false) + else lastTagKey match { + case Some(key) => + val value = + ((tags get key): @unchecked) match { + case Some(b :: bs) => (b + endOfLine + marker) :: bs + case None => oops("lastTagKey set when no tag exists for key") + } + go(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = false) + case None => + go(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = false) + } + + + case SymbolTagRegex(name, sym, body) :: ls if !inCodeBlock => + val key = SymbolTagKey(name, sym) + val value = body :: tags.getOrElse(key, Nil) + go(docBody, tags + (key -> value), Some(key), ls, inCodeBlock) + + case SimpleTagRegex(name, body) :: ls if !inCodeBlock => + val key = SimpleTagKey(name) + val value = body :: tags.getOrElse(key, Nil) + go(docBody, tags + (key -> value), Some(key), ls, inCodeBlock) + + + case SingleTagRegex(name) :: ls if !inCodeBlock => + val key = SimpleTagKey(name) + val value = "" :: tags.getOrElse(key, Nil) + go(docBody, tags + (key -> value), Some(key), ls, inCodeBlock) + + + case line :: ls if lastTagKey.isDefined => + val newtags = if !line.isEmpty then { + val key = lastTagKey.get + val value = + ((tags get key): @unchecked) match { + case Some(b :: bs) => (b + endOfLine + line) :: bs + case None => oops("lastTagKey set when no tag exists for key") + } + tags + (key -> value) + } else tags + go(docBody, newtags, lastTagKey, ls, inCodeBlock) + + + case line :: ls => + if docBody.length > 0 then docBody.append(endOfLine) + docBody.append(line) + go(docBody, tags, lastTagKey, ls, inCodeBlock) + + + case Nil => + // Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing + val inheritDiagramTag = SimpleTagKey("inheritanceDiagram") + val contentDiagramTag = SimpleTagKey("contentDiagram") + + val inheritDiagramText: List[String] = tags.get(inheritDiagramTag) match { + case Some(list) => list + case None => List.empty + } + + val contentDiagramText: List[String] = tags.get(contentDiagramTag) match { + case Some(list) => list + case None => List.empty + } + + val stripTags = List(inheritDiagramTag, contentDiagramTag, SimpleTagKey("template"), SimpleTagKey("documentable")) + val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1)) + + val bodyTags: mutable.Map[TagKey, List[String]] = + mutable.Map((tagsWithoutDiagram).toSeq: _*) + + def allTags(key: SimpleTagKey): List[String] = + (bodyTags remove key).getOrElse(Nil).reverse + + def allSymsOneTag(key: TagKey, filterEmpty: Boolean = true): SortedMap[String, String] = { + val keys: Seq[SymbolTagKey] = + bodyTags.keys.toSeq flatMap { + case stk: SymbolTagKey if (stk.name == key.name) => Some(stk) + case stk: SimpleTagKey if (stk.name == key.name) => + // dottydoc.println(s"$span: tag '@${stk.name}' must be followed by a symbol name") + None + case _ => None + } + val pairs: Seq[(String, String)] = + for (key <- keys) yield { + val bs = (bodyTags remove key).get + // if (bs.length > 1) + // dottydoc.println(s"$span: only one '@${key.name}' tag for symbol ${key.symbol} is allowed") + (key.symbol, bs.head) + } + SortedMap.empty[String, String] ++ pairs + } + + val cmt = PreparsedComment( + body = docBody.toString, + authors = allTags(SimpleTagKey("author")), + see = allTags(SimpleTagKey("see")), + result = allTags(SimpleTagKey("return")), + throws = allSymsOneTag(SimpleTagKey("throws")), + valueParams = allSymsOneTag(SimpleTagKey("param")), + typeParams = allSymsOneTag(SimpleTagKey("tparam")), + version = allTags(SimpleTagKey("version")), + since = allTags(SimpleTagKey("since")), + todo = allTags(SimpleTagKey("todo")), + deprecated = allTags(SimpleTagKey("deprecated")), + note = allTags(SimpleTagKey("note")), + example = allTags(SimpleTagKey("example")), + constructor = allTags(SimpleTagKey("constructor")), + group = allTags(SimpleTagKey("group")), + groupDesc = allSymsOneTag(SimpleTagKey("groupdesc")), + groupNames = allSymsOneTag(SimpleTagKey("groupname")), + groupPrio = allSymsOneTag(SimpleTagKey("groupprio")), + hideImplicitConversions = allTags(SimpleTagKey("hideImplicitConversion")), + shortDescription = allTags(SimpleTagKey("shortDescription")), + syntax = allTags(SimpleTagKey("syntax")), + ) + + cmt + } + + go(new StringBuilder(comment.size), Map.empty, None, comment, inCodeBlock = false) + } + + /** A key used for a tag map. The key is built from the name of the tag and + * from the linked symbol if the tag has one. + * Equality on tag keys is structural. */ + private sealed abstract class TagKey { + def name: String + } + + private case class SimpleTagKey(name: String) extends TagKey + private case class SymbolTagKey(name: String, symbol: String) extends TagKey + + /** Something that should not have happened, happened, and Scaladoc should exit. */ + private def oops(msg: String): Nothing = + throw new IllegalArgumentException("program logic: " + msg) +} diff --git a/scala3doc/src/dotty/dokka/tasty/comments/Queries.scala b/scala3doc/src/dotty/dokka/tasty/comments/Queries.scala new file mode 100644 index 000000000000..cb7a3700a417 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/Queries.scala @@ -0,0 +1,159 @@ +package dotty.dokka.tasty.comments + +sealed trait Query { + def asList: List[String] = this match { + case Query.StrictMemberId(id) => id :: Nil + case Query.Id(id) => id :: Nil + case Query.QualifiedId(qual, _, rest) => qual.asString :: rest.asList + } + + def join: String = + def go(sb: StringBuilder, segment: Query): String = segment match { + case Query.StrictMemberId(id) => + sb ++= id + sb.toString + case Query.Id(id) => + sb ++= id + sb.toString + case Query.QualifiedId(qual, sep, rest) => + sb ++= qual.asString + sb += sep + go(sb, rest) + } + go(new StringBuilder, this) +} + +sealed trait QuerySegment extends Query +object Query { + case class StrictMemberId(id: String) extends Query + case class Id(id: String) extends QuerySegment + case class QualifiedId(id: Qual, sep: Char, rest: QuerySegment) extends QuerySegment + + enum Qual { + case Id(id: String) + case This + case Package + + def asString: String = this match { + case Qual.This => "this" + case Qual.Package => "package" + case Qual.Id(id) => id + } + } +} + +class QueryParser(val query: CharSequence) { + private var idx = 0 + private var bld: StringBuilder = StringBuilder() + + def tryReadQuery(): Either[QueryParseException, Query] = + try Right(readQuery()) catch { + case ex : QueryParseException => Left(ex) + } + + def readQuery(): Query = { + assertBounds("expected start of query") + if lookingAt('#') then { + popCh() + val res = readIdentifier().asString + Query.StrictMemberId(res) + } else readSegmentedQuery() + } + + def readSegmentedQuery(): QuerySegment = { + val id = readIdentifier() + if atEnd() || lookingAt('(') || lookingAt('[') then { + Query.Id(id.asString) + } else { + val ch = popCh() + if ch == '.' || ch == '#' + then Query.QualifiedId(id, ch, readSegmentedQuery()) + else err(s"expected . or #, instead saw: '$ch'") + } + } + + def readIdentifier(): Query.Qual = { + assertBounds("expected start of identifier") + if lookingAt('`') then { + popCh() + readQuotedIdentifier() + } else readSimpleIdentifier() + } + + def readSimpleIdentifier(): Query.Qual = { + def atEndOfId(): Boolean = { + var escaped = false + atEnd() || { + lookingAt('\\') && { + popCh() + escaped = true + // NOTE: in principle we should never be at the end here, since + // backslashes are always followed by another char. Ideally we'd just + // throw an exception here, but that seems bad for someone who just + // wants some documentation generated. + atEnd() + } + } || { + // NOTE: backquotes intentionally cannot be backslash-escaped, as they + // cannot be used in Scala identifiers + if lookingAt('`') then err("backquotes are not allowed in identifiers") + if escaped then false else + lookingAt('#') || lookingAt('.') + || lookingAt('(') || lookingAt('[') + } + } + + while !atEndOfId() do pull() + val res = popRes() + if res.isEmpty then err("empty identifier") + res match { + case "this" => Query.Qual.This + case "package" => Query.Qual.Package + case res => Query.Qual.Id(res) + } + } + + def readQuotedIdentifier(): Query.Qual = { + while { + assertBounds("unexpected end of quoted identifier (expected '`')") + !lookingAt('`') + } do pull() + popCh() + val res = popRes() + if res.isEmpty then err("empty quoted identifier") + Query.Qual.Id(res) + } + + private def popCh(): Char = { + val res = query.charAt(idx) + idx += 1 + res + } + + private def popRes(): String = { + val res = bld.toString + bld = StringBuilder() + res + } + + private def pull(): Unit = { + bld += query.charAt(idx) + idx += 1 + } + + private def lookingAt(char: Char) = query.charAt(idx) == char + + private def atEnd() = idx >= query.length + + private def assertBounds(context: String) = + if idx >= query.length then err(context) + + private def err(problem: String) = + throw new QueryParseException(query, idx, problem) + + class QueryParseException( + val query: CharSequence, + val at: Int, + val problem: String + ) extends Exception(s"$problem at char $at in query: $query") +} diff --git a/scala3doc/src/dotty/dokka/tasty/comments/package.scala b/scala3doc/src/dotty/dokka/tasty/comments/package.scala new file mode 100644 index 000000000000..55cc90954528 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/package.scala @@ -0,0 +1,75 @@ +package dotty.dokka.tasty.comments + +import scala.jdk.CollectionConverters._ + +import org.jetbrains.dokka.model.{doc => dkkd} +import com.vladsch.flexmark.util.{ast => mdu} + +object kt: + import kotlin.collections.builders.{ListBuilder => KtListBuilder, MapBuilder => KtMapBuilder} + + def emptyList[T] = new KtListBuilder[T]().build() + def emptyMap[A, B] = new KtMapBuilder[A, B]().build() + +object dkk: + def p(children: dkkd.DocTag*) = + dkkd.P(children.asJava, Map.empty.asJava) + def p(params: (String, String)*)(children: dkkd.DocTag*) = + dkkd.P(children.asJava, params.toMap.asJava) + + def text(str: String) = dkkd.Text(str, Nil.asJava, Map.empty.asJava) + + def a(children: dkkd.DocTag*) = + dkkd.A(children.asJava, Map.empty.asJava) + def a(params: (String, String)*)(children: dkkd.DocTag*) = + dkkd.A(children.asJava, params.toMap.asJava) + + def pre(params: (String, String)*)(children: dkkd.DocTag*) = + dkkd.Pre(children.asJava, params.toMap.asJava) + + def codeInline(children: dkkd.DocTag*) = + dkkd.CodeInline(children.asJava, Map.empty.asJava) + def codeInline(params: (String, String)*)(children: dkkd.DocTag*) = + dkkd.CodeInline(children.asJava, params.toMap.asJava) + def codeBlock(children: dkkd.DocTag*) = + dkkd.CodeBlock(children.asJava, Map.empty.asJava) + def codeBlock(params: (String, String)*)(children: dkkd.DocTag*) = + dkkd.CodeBlock(children.asJava, params.toMap.asJava) + + def ul(children: dkkd.DocTag*) = + dkkd.Ul(children.asJava, Map.empty.asJava) + def ul(params: (String, String)*)(children: dkkd.DocTag*) = + dkkd.Ul(children.asJava, params.toMap.asJava) + def ol(children: dkkd.DocTag*) = + dkkd.Ol(children.asJava, Map.empty.asJava) + def ol(params: (String, String)*)(children: dkkd.DocTag*) = + dkkd.Ol(children.asJava, params.toMap.asJava) + def li(children: dkkd.DocTag*) = + dkkd.Li(children.asJava, Map.empty.asJava) + def li(params: (String, String)*)(children: dkkd.DocTag*) = + dkkd.Li(children.asJava, params.toMap.asJava) + +object dbg: + case class See(n: mdu.Node, c: Seq[See]) { + def show(sb: StringBuilder, indent: Int): Unit = { + sb ++= " " * indent + sb ++= n.toString + sb ++= "\n" + c.foreach { s => s.show(sb, indent + 2) } + } + + override def toString = { + val sb = new StringBuilder + show(sb, 0) + sb.toString + } + } + + def see(n: mdu.Node): See = + See(n, n.getChildIterator.asScala.map(see).toList) + + def parseRaw(str: String) = + MarkdownCommentParser(null).stringToMarkup(str) + + def parse(str: String) = + parseRaw( Preparser.preparse( Cleaner.clean(str) ).body ) diff --git a/scala3doc/src/dotty/dokka/tasty/comments/wiki/Converter.scala b/scala3doc/src/dotty/dokka/tasty/comments/wiki/Converter.scala new file mode 100644 index 000000000000..6575f9fbbbac --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/wiki/Converter.scala @@ -0,0 +1,142 @@ +package dotty.dokka.tasty.comments +package wiki + +import scala.jdk.CollectionConverters._ +import scala.tasty.Reflection + +import org.jetbrains.dokka.model.{doc => dkkd} + +import dotty.dokka.tasty.SymOps + +class Converter(val repr: Repr) extends BaseConverter { + import Emitter._ + + // makeshift support for not passing an owner + // see same in MarkdownConverter + val r: repr.r.type = if repr == null then null else repr.r + val owner: r.Symbol = if repr == null then null.asInstanceOf[r.Symbol] else repr.sym + + object SymOps extends SymOps[r.type](r) + import SymOps._ + + def convertBody(body: Body): dkkd.DocTag = { + dkkd.P( + collect { + body.blocks.foreach(emitBlock) + }.asJava, + kt.emptyMap, + ) + } + + def emitBlock(block: Block)(using Emitter[dkkd.DocTag]): Unit = + block match { + case Title(text, level) => + val content = convertInline(text) + emit(level match { + case 1 => dkkd.H1(content.asJava, kt.emptyMap) + case 2 => dkkd.H2(content.asJava, kt.emptyMap) + case 3 => dkkd.H3(content.asJava, kt.emptyMap) + case 4 => dkkd.H4(content.asJava, kt.emptyMap) + case 5 => dkkd.H5(content.asJava, kt.emptyMap) + case 6 => dkkd.H6(content.asJava, kt.emptyMap) + }) + + case Paragraph(text) => + emit(dkkd.P( + convertInline(text).asJava, + kt.emptyMap, + )) + case Code(data: String) => emit(dkkd.CodeBlock(List(dkk.text(data)).asJava, kt.emptyMap)) + case HorizontalRule() => emit(dkkd.HorizontalRule.INSTANCE) + case DefinitionList(items) => + sys.error("not supported yet: definition list") + + case UnorderedList(items) => + emit(dkkd.Ul( + convertListItems(items).asJava, + kt.emptyMap, + )) + + case OrderedList(items, style) => + // TODO use style + emit(dkkd.Ol( + convertListItems(items).asJava, + kt.emptyMap, + )) + } + + def convertListItems(items: Seq[Block]): Seq[dkkd.DocTag] = { + import scala.collection.mutable.ListBuffer + val listBld = ListBuffer.empty[dkkd.DocTag] + var elemBld = ListBuffer.empty[dkkd.DocTag] + + items.foreach { i => + val c = convertBlock(i) + c match { + case Seq(list: (dkkd.Ul | dkkd.Ol)) => + elemBld.append(list) + case c => + if !elemBld.isEmpty then { + listBld.append(dkkd.Li(elemBld.result.asJava, kt.emptyMap)) + elemBld = ListBuffer.empty + } + elemBld.appendAll(c) + } + } + + if elemBld.nonEmpty then + listBld.append(dkkd.Li(elemBld.result.asJava, kt.emptyMap)) + + listBld.result + } + + def convertBlock(block: Block): Seq[dkkd.DocTag] = + collect { emitBlock(block) } + + def emitInline(inl: Inline)(using Emitter[dkkd.DocTag]): Unit = inl match { + case Chain(items: Seq[Inline]) => items.foreach(emitInline) + case Summary(text) => emitInline(text) + case Text(text) => emit(dkk.text(text)) + case Italic(text) => emit(dkkd.I(convertInline(text).asJava, kt.emptyMap)) + case Bold(text) => emit(dkkd.B(convertInline(text).asJava, kt.emptyMap)) + case Underline(text) => emit(dkkd.U(convertInline(text).asJava, kt.emptyMap)) + case Monospace(text) => emit(dkkd.CodeInline(convertInline(text).asJava, kt.emptyMap)) + case Link(target, body) => + def resolveBody(default: String) = + if !body.isEmpty + then convertInline(body).asJava + else Seq(dkk.text(default)).asJava + + emit(target match { + case SchemeUri() => + dkkd.A(resolveBody(default = target), Map("href" -> target).asJava) + case _ => + resolveLinkQuery(target, Some(body).filter(!_.isEmpty)) + }) + + case _: (Superscript | Subscript | RepresentationLink | HtmlTag) => + sys.error("not yet supported: Superscript | Subscript | RepresentationLink | HtmlTag") + } + + def convertInline(inl: Inline): Seq[dkkd.DocTag] = + collect { emitInline(inl) } + + def resolveLinkQuery(queryStr: String, bodyOpt: Option[Inline]): dkkd.DocTag = { + def resolveBody(default: String) = + bodyOpt match { + case Some(body) => + convertInline(body).asJava + case None => + Seq(dkk.text(default)).asJava + } + + withParsedQuery(queryStr) { query => + MemberLookup.lookup(using r)(query, owner) match { + case Some((sym, targetText)) => + dkkd.DocumentationLink(sym.dri, resolveBody(default = targetText), kt.emptyMap) + case None => + dkkd.A(resolveBody(default = query.join), Map("href" -> "#").asJava) + } + } + } +} diff --git a/scala3doc/src/dotty/dokka/tasty/comments/wiki/Entities.scala b/scala3doc/src/dotty/dokka/tasty/comments/wiki/Entities.scala new file mode 100644 index 000000000000..4f573cb0390c --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/wiki/Entities.scala @@ -0,0 +1,101 @@ +package dotty.dokka.tasty.comments.wiki + +import scala.collection.{Seq => _, _} +// import representations._ + +/** A body of text. A comment has a single body, which is composed of + * at least one block. Inside every body is exactly one summary (see + * [[scala.tools.nsc.doc.model.comment.Summary]]). */ +final case class Body(blocks: Seq[Block]) { + + /** The summary text of the comment body. */ + lazy val summary: Option[Body] = { + def summaryInBlock(block: Block): Seq[Inline] = block match { + case Title(text, _) => summaryInInline(text) + case Paragraph(text) => summaryInInline(text) + case UnorderedList(items) => items flatMap summaryInBlock + case OrderedList(items, _) => items flatMap summaryInBlock + case DefinitionList(items) => items.values.toSeq flatMap summaryInBlock + case _ => Nil + } + def summaryInInline(text: Inline): Seq[Inline] = text match { + case Summary(text) => List(text) + case Chain(items) => items flatMap summaryInInline + case Italic(text) => summaryInInline(text) + case Bold(text) => summaryInInline(text) + case Underline(text) => summaryInInline(text) + case Superscript(text) => summaryInInline(text) + case Subscript(text) => summaryInInline(text) + case Link(_, title) => summaryInInline(title) + case _ => Nil + } + (blocks flatMap summaryInBlock).toList match { + case Nil => None + case inl :: Nil => Some(Body(Seq(Paragraph(inl)))) + case inls => Some(Body(Seq(Paragraph(Chain(inls))))) + } + } +} + +/** A block-level element of text, such as a paragraph or code block. */ +sealed abstract class Block + +final case class Title(text: Inline, level: Int) extends Block +final case class Paragraph(text: Inline) extends Block +final case class Code(data: String) extends Block +final case class UnorderedList(items: Seq[Block]) extends Block +final case class OrderedList(items: Seq[Block], style: String) extends Block +final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block +final case class HorizontalRule() extends Block + +/** An section of text inside a block, possibly with formatting. */ +sealed abstract class Inline { + def isEmpty = this match { + case Chain(items) if items.isEmpty => true + case _ => false + } +} + +final case class Chain(items: Seq[Inline]) extends Inline +object Chain { + val Empty = Chain(Nil) +} +final case class Italic(text: Inline) extends Inline +final case class Bold(text: Inline) extends Inline +final case class Underline(text: Inline) extends Inline +final case class Superscript(text: Inline) extends Inline +final case class Subscript(text: Inline) extends Inline +final case class Link(target: String, title: Inline) extends Inline +final case class Monospace(text: Inline) extends Inline +final case class Text(text: String) extends Inline +abstract class RepresentationLink(val title: Inline) extends Inline { def link: LinkTo } +object RepresentationLink { + def apply(title: Inline, linkTo: LinkTo) = new RepresentationLink(title) { def link: LinkTo = linkTo } + def unapply(el: RepresentationLink): Some[(Inline, LinkTo)] = Some((el.title, el.link)) +} +final case class HtmlTag(data: String) extends Inline { + private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r + private val (isEnd, tagName) = data match { + case Pattern(s1, s2) => + (! s1.isEmpty, Some(s2.toLowerCase)) + case _ => + (false, None) + } + + def canClose(open: HtmlTag) = { + isEnd && tagName == open.tagName + } + + private val TagsNotToClose = Set("br", "img") + def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") } +} + +/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */ +final case class Summary(text: Inline) extends Inline + +sealed trait LinkTo +// final case class LinkToExternal(name: String, url: String) extends LinkTo +// final case class Tooltip(name: String) extends LinkTo + +// /** Linking directly to entities is not picklable because of cyclic references */ +// final case class LinkToRepresentation(representation: Representation) extends LinkTo diff --git a/scala3doc/src/dotty/dokka/tasty/comments/wiki/Parser.scala b/scala3doc/src/dotty/dokka/tasty/comments/wiki/Parser.scala new file mode 100644 index 000000000000..caf51e1e69d9 --- /dev/null +++ b/scala3doc/src/dotty/dokka/tasty/comments/wiki/Parser.scala @@ -0,0 +1,532 @@ +package dotty.dokka.tasty.comments.wiki + +import scala.collection.mutable + +import dotty.dokka.tasty.comments.Regexes._ + +/** Original wikiparser from NSC + * @author Ingo Maier + * @author Manohar Jonnalagedda + * @author Gilles Dubochet + */ +final class Parser( + val buffer: String, +) extends CharReader(buffer) { wiki => + var summaryParsed = false + + def document(): Body = { + val blocks = new mutable.ListBuffer[Block] + while (char != endOfText) + blocks += block() + Body(blocks.toList) + } + + /* BLOCKS */ + + /** {{{ block ::= code | title | hrule | listBlock | para }}} */ + def block(): Block = { + if (checkSkipInitWhitespace("{{{")) + code() + else if (checkSkipInitWhitespace('=')) + title() + else if (checkSkipInitWhitespace("----")) + hrule() + else if (checkList) + listBlock() + else { + para() + } + } + + /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc + * Characters used to build lists and their constructors */ + protected val listStyles = Map[String, (Seq[Block] => Block)]( + "- " -> ( UnorderedList(_) ), + "1. " -> ( OrderedList(_,"decimal") ), + "I. " -> ( OrderedList(_,"upperRoman") ), + "i. " -> ( OrderedList(_,"lowerRoman") ), + "A. " -> ( OrderedList(_,"upperAlpha") ), + "a. " -> ( OrderedList(_,"lowerAlpha") ) + ) + + /** Checks if the current line is formed with more than one space and one the listStyles */ + def checkList = + (countWhitespace > 0) && (listStyles.keys exists { checkSkipInitWhitespace(_) }) + + /** {{{ + * nListBlock ::= nLine { mListBlock } + * nLine ::= nSpc listStyle para '\n' + * }}} + * Where n and m stand for the number of spaces. When `m > n`, a new list is nested. */ + def listBlock(): Block = { + + /** Consumes one list item block and returns it, or None if the block is + * not a list or a different list. */ + def listLine(indent: Int, style: String): Option[Block] = + if (countWhitespace > indent && checkList) + Some(listBlock()) + else if (countWhitespace != indent || !checkSkipInitWhitespace(style)) + None + else { + jumpWhitespace() + jump(style) + val p = Paragraph(getInline(isInlineEnd = false)) + blockEnded("end of list line ") + Some(p) + } + + /** Consumes all list item blocks (possibly with nested lists) of the + * same list and returns the list block. */ + def listLevel(indent: Int, style: String): Block = { + val lines = mutable.ListBuffer.empty[Block] + var line: Option[Block] = listLine(indent, style) + while (line.isDefined) { + lines += line.get + line = listLine(indent, style) + } + val constructor = listStyles(style) + constructor(lines.toList) + } + + val indent = countWhitespace + val style = (listStyles.keys find { checkSkipInitWhitespace(_) }).getOrElse(listStyles.keys.head) + listLevel(indent, style) + } + + def code(): Block = { + jumpWhitespace() + jump("{{{") + val str = readUntil("}}}") + if (char == endOfText) + reportError("unclosed code block") + else + jump("}}}") + blockEnded("code block") + Code(normalizeIndentation(str)) + } + + /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */ + def title(): Block = { + jumpWhitespace() + val inLevel = repeatJump('=') + val text = getInline(check("=" * inLevel)) + val outLevel = repeatJump('=', inLevel) + if (inLevel != outLevel) + reportError("unbalanced or unclosed heading") + blockEnded("heading") + Title(text, inLevel) + } + + /** {{{ hrule ::= "----" { '-' } '\n' }}} */ + def hrule(): Block = { + jumpWhitespace() + repeatJump('-') + blockEnded("horizontal rule") + HorizontalRule() + } + + /** {{{ para ::= inline '\n' }}} */ + def para(): Block = { + val p = + if (summaryParsed) + Paragraph(getInline(isInlineEnd = false)) + else { + val s = summary() + val r = + if (checkParaEnded()) List(s) else List(s, getInline(isInlineEnd = false)) + summaryParsed = true + Paragraph(Chain(r)) + } + while (char == endOfLine && char != endOfText) + nextChar() + p + } + + /* INLINES */ + + val OPEN_TAG = "^<([A-Za-z]+)( [^>]*)?(/?)>$".r + val CLOSE_TAG = "^</([A-Za-z]+)>$".r + private def readHTMLFrom(begin: HtmlTag): String = { + val list = mutable.ListBuffer.empty[String] + val stack = mutable.ListBuffer.empty[String] + + begin.close match { + case Some(HtmlTag(CLOSE_TAG(s))) => + stack += s + case _ => + return "" + } + + while ({ + val str = readUntil { char == safeTagMarker || char == endOfText } + nextChar() + + list += str + + str match { + case OPEN_TAG(s, _, standalone) => { + if (standalone != "/") { + stack += s + } + } + case CLOSE_TAG(s) => { + if (s == stack.last) { + stack.remove(stack.length-1) + } + } + case _ => ; + } + + stack.length > 0 && char != endOfText + }) do {} + + list mkString "" + } + + def getInline(isInlineEnd: => Boolean): Inline = { + + def inline0(): Inline = { + if (char == safeTagMarker) { + val tag = htmlTag() + HtmlTag(tag.data + readHTMLFrom(tag)) + } + else if (check("'''")) bold() + else if (check("''")) italic() + else if (check("`")) monospace() + else if (check("__")) underline() + else if (check("^")) superscript() + else if (check(",,")) subscript() + else if (check("[[")) link() + else { + val str = readUntil { + char == safeTagMarker || + check("''") || + char == '`' || + check("__") || + char == '^' || + check(",,") || + check("[[") || + isInlineEnd || + checkParaEnded() || + char == endOfLine + } + Text(str) + } + } + + val inlines: List[Inline] = { + val iss = mutable.ListBuffer.empty[Inline] + iss += inline0() + while (!isInlineEnd && !checkParaEnded()) { + val skipEndOfLine = if (char == endOfLine) { + nextChar() + true + } else { + false + } + + val current = inline0() + (iss.last, current) match { + case (Text(t1), Text(t2)) if skipEndOfLine => + iss.update(iss.length - 1, Text(t1 + endOfLine + t2)) + case (i1, i2) if skipEndOfLine => + iss ++= List(Text(endOfLine.toString), i2) + case _ => iss += current + } + } + iss.toList + } + + inlines match { + case Nil => Text("") + case i :: Nil => i + case is => Chain(is) + } + + } + + def htmlTag(): HtmlTag = { + jump(safeTagMarker) + val read = readUntil(safeTagMarker) + if (char != endOfText) jump(safeTagMarker) + HtmlTag(read) + } + + def bold(): Inline = { + jump("'''") + val i = getInline(check("'''")) + jump("'''") + Bold(i) + } + + def italic(): Inline = { + jump("''") + val i = getInline(check("''")) + jump("''") + Italic(i) + } + + def monospace(): Inline = { + jump("`") + val i = getInline(check("`")) + jump("`") + Monospace(i) + } + + def underline(): Inline = { + jump("__") + val i = getInline(check("__")) + jump("__") + Underline(i) + } + + def superscript(): Inline = { + jump("^") + val i = getInline(check("^")) + if (jump("^")) { + Superscript(i) + } else { + Chain(Seq(Text("^"), i)) + } + } + + def subscript(): Inline = { + jump(",,") + val i = getInline(check(",,")) + jump(",,") + Subscript(i) + } + + def summary(): Inline = { + val i = getInline(checkSentenceEnded()) + Summary( + if (jump(".")) + Chain(List(i, Text("."))) + else + i + ) + } + + def link(): Inline = { + jump("[[") + val parens = 2 + repeatJump('[') + val stop = "]" * parens + val target = readUntil { check(stop) || isWhitespaceOrNewLine(char) } + val title = + if (!check(stop)) Some({ + jumpWhitespaceOrNewLine() + getInline(check(stop)) + }) + else None + jump(stop) + + Link(target, title getOrElse Chain.Empty) + } + + /* UTILITY */ + + /** {{{ eol ::= { whitespace } '\n' }}} */ + def blockEnded(blockType: String): Unit = { + if (char != endOfLine && char != endOfText) { + reportError("no additional content on same line after " + blockType) + jumpUntil(endOfLine) + } + while (char == endOfLine) + nextChar() + } + + /** + * Eliminates the (common) leading spaces in all lines, based on the first line + * For indented pieces of code, it reduces the indent to the least whitespace prefix: + * {{{ + * indented example + * another indented line + * if (condition) + * then do something; + * ^ this is the least whitespace prefix + * }}} + */ + def normalizeIndentation(_code: String): String = { + + val code = _code.replaceAll("\\s+$", "").dropWhile(_ == '\n') // right-trim + remove all leading '\n' + val lines = code.split("\n") + + // maxSkip - size of the longest common whitespace prefix of non-empty lines + val nonEmptyLines = lines.filter(_.trim.nonEmpty) + val maxSkip = if (nonEmptyLines.isEmpty) 0 else nonEmptyLines.map(line => line.iterator.takeWhile(_ == ' ').size).min + + // remove common whitespace prefix + lines.map(line => if (line.trim.nonEmpty) line.substring(maxSkip) else line).mkString("\n") + } + + def checkParaEnded(): Boolean = { + (char == endOfText) || + ((char == endOfLine) && { + val poff = offset + nextChar() // read EOL + val ok = { + checkSkipInitWhitespace(endOfLine) || + checkSkipInitWhitespace('=') || + checkSkipInitWhitespace("{{{") || + checkList || + checkSkipInitWhitespace('\u003D') + } + offset = poff + ok + }) + } + + def checkSentenceEnded(): Boolean = { + (char == '.') && { + val poff = offset + nextChar() // read '.' + val ok = char == endOfText || char == endOfLine || isWhitespace(char) + offset = poff + ok + } + } + + def reportError(message: String) = println(s"$message") +} + +sealed class CharReader(buffer: String) { reader => + + var offset: Int = 0 + def char: Char = + if (offset >= buffer.length) endOfText else buffer charAt offset + + final def nextChar() = + offset += 1 + + final def check(chars: String): Boolean = { + val poff = offset + val ok = jump(chars) + offset = poff + ok + } + + def checkSkipInitWhitespace(c: Char): Boolean = { + val poff = offset + jumpWhitespace() + val ok = jump(c) + offset = poff + ok + } + + def checkSkipInitWhitespace(chars: String): Boolean = { + val poff = offset + jumpWhitespace() + val (ok0, chars0) = + if (chars.charAt(0) == ' ') + (offset > poff, chars substring 1) + else + (true, chars) + val ok = ok0 && jump(chars0) + offset = poff + ok + } + + def countWhitespace: Int = { + var count = 0 + val poff = offset + while (isWhitespace(char) && char != endOfText) { + nextChar() + count += 1 + } + offset = poff + count + } + + /* Jumpers */ + + /** Jumps a character and consumes it + * @return true only if the correct character has been jumped */ + final def jump(ch: Char): Boolean = { + if (char == ch) { + nextChar() + true + } + else false + } + + /** Jumps all the characters in chars, consuming them in the process. + * @return true only if the correct characters have been jumped + */ + final def jump(chars: String): Boolean = { + var index = 0 + while (index < chars.length && char == chars.charAt(index) && char != endOfText) { + nextChar() + index += 1 + } + index == chars.length + } + + final def repeatJump(c: Char, max: Int = Int.MaxValue): Int = { + var count = 0 + while (jump(c) && count < max) + count += 1 + count + } + + final def jumpUntil(ch: Char): Int = { + var count = 0 + while (char != ch && char != endOfText) { + nextChar() + count += 1 + } + count + } + + final def jumpUntil(pred: => Boolean): Int = { + var count = 0 + while (!pred && char != endOfText) { + nextChar() + count += 1 + } + count + } + + def jumpWhitespace() = jumpUntil(!isWhitespace(char)) + + def jumpWhitespaceOrNewLine() = jumpUntil(!isWhitespaceOrNewLine(char)) + + /* Readers */ + final def readUntil(c: Char): String = { + withRead { + while (char != c && char != endOfText) { + nextChar() + } + } + } + + final def readUntil(chars: String): String = { + assert(chars.length > 0) + withRead { + val c = chars.charAt(0) + while (!check(chars) && char != endOfText) { + nextChar() + while (char != c && char != endOfText) + nextChar() + } + } + } + + final def readUntil(pred: => Boolean): String = { + withRead { + while (char != endOfText && !pred) { + nextChar() + } + } + } + + private def withRead(read: => Unit): String = { + val start = offset + read + buffer.substring(start, offset) + } + + /* Chars classes */ + def isWhitespace(c: Char) = c == ' ' || c == '\t' + + def isWhitespaceOrNewLine(c: Char) = isWhitespace(c) || c == '\n' +} diff --git a/scala3doc/src/dotty/dokka/transformers/ImplicitMembersExtensionTransformer.scala b/scala3doc/src/dotty/dokka/transformers/ImplicitMembersExtensionTransformer.scala new file mode 100644 index 000000000000..978589cd12d2 --- /dev/null +++ b/scala3doc/src/dotty/dokka/transformers/ImplicitMembersExtensionTransformer.scala @@ -0,0 +1,50 @@ +package dotty.dokka + +import org.jetbrains.dokka.transformers.documentation.DocumentableTransformer +import org.jetbrains.dokka.model._ +import collection.JavaConverters +import collection.JavaConverters._ +import org.jetbrains.dokka.plugability.DokkaContext +import org.jetbrains.dokka.links.DRI +import org.jetbrains.dokka.model.properties._ + +import dotty.dokka.model._ +import dotty.dokka.model.api._ + +class ImplicitMembersExtensionTransformer(ctx: DokkaContext) extends DocumentableTransformer: + override def invoke(original: DModule, context: DokkaContext): DModule = + val classlikeMap = original.driMap + + def expandMember(outerMembers: Seq[Member])(c: Member): Member = + val companion = c match + case classlike: DClass => ClasslikeExtension.getFrom(classlike).flatMap(_.companion).map(classlikeMap) + case _ => None + + val implictSources = outerMembers ++ companion.toSeq // We can expand this on companion object from parents, generic etc. + + val MyDri = c.getDri + def collectApplicableMembers(source: Member): Seq[Member] = source.allMembers.flatMap { + case m @ Member(_, _, _, Kind.Extension(ExtensionTarget(_, _, MyDri)), Origin.DefinedWithin) => + Seq(m.withOrigin(Origin.ExtensionFrom(source.name, source.dri)).withKind(Kind.Def)) + case m @ Member(_, _, _, conversionProvider: ImplicitConversionProvider, Origin.DefinedWithin) => + conversionProvider.conversion match + case Some(ImplicitConversion(MyDri, to)) => + classlikeMap.get(to).toSeq.flatMap { owner => + val newMembers = owner.allMembers.filter(_.origin match + case Origin.DefinedWithin => true + case Origin.InheritedFrom(_, _) => true + case _ => false + ) + newMembers.map(_.withOrigin(Origin.ImplicitlyAddedBy(owner.name, owner.dri))) + } + case _ => + Nil + case _ => + None + } + + val newImplicitMembers = implictSources.flatMap(collectApplicableMembers).distinct + val expandedMembers = c.allMembers.map(expandMember(newImplicitMembers ++ Seq(c))) + c.withMembers(newImplicitMembers ++ expandedMembers) + + original.updatePackages(_.map(expandMember(Nil)(_).asInstanceOf[DPackage])) diff --git a/scala3doc/src/dotty/dokka/transformers/InheritanceInformationTransformer.scala b/scala3doc/src/dotty/dokka/transformers/InheritanceInformationTransformer.scala new file mode 100644 index 000000000000..2b24c55ac967 --- /dev/null +++ b/scala3doc/src/dotty/dokka/transformers/InheritanceInformationTransformer.scala @@ -0,0 +1,28 @@ +package dotty.dokka + +import org.jetbrains.dokka.transformers.documentation.DocumentableTransformer +import org.jetbrains.dokka.model._ +import collection.JavaConverters._ +import org.jetbrains.dokka.plugability.DokkaContext +import org.jetbrains.dokka.links.DRI +import org.jetbrains.dokka.model.properties._ + +import dotty.dokka.model._ +import dotty.dokka.model.api._ + + +class InheritanceInformationTransformer(val ctx: DokkaContext) extends DocumentableTransformer: + override def invoke(original: DModule, context: DokkaContext): DModule = + val subtypes = getSupertypes(original).groupBy(_._1).transform((k, v) => v.map(_._2)) + original.updateMembers(m => m.withKnownChildren(subtypes.getOrElse(m.dri, Nil))) + + private def getSupertypes(d: Documentable): Seq[(DRI, LinkToType)] = d match { + case m: DModule => m.getPackages.asScala.toList.flatMap(p => getSupertypes(p)) + case c: Member => + val selfMapping = if !c.kind.isInstanceOf[Classlike] then Nil else + val selfLink = c.asLink + c.parents.map(_._2 -> selfLink) + + c.allMembers.flatMap(getSupertypes) ++ selfMapping + case null => List.empty + } diff --git a/scala3doc/src/dotty/dokka/transformers/PackageHierarchyTransformer.scala b/scala3doc/src/dotty/dokka/transformers/PackageHierarchyTransformer.scala new file mode 100644 index 000000000000..820f5b5b9f3f --- /dev/null +++ b/scala3doc/src/dotty/dokka/transformers/PackageHierarchyTransformer.scala @@ -0,0 +1,74 @@ +package dotty.dokka + +import org.jetbrains.dokka.transformers.pages.{PageTransformer} +import org.jetbrains.dokka.pages._ +import collection.JavaConverters +import collection.JavaConverters._ +import org.jetbrains.dokka.plugability.DokkaContext + +class PackageHierarchyTransformer(context: DokkaContext) extends PageTransformer: + override def invoke(input: RootPageNode): RootPageNode = input match { + case m: ModulePageNode => rearangePackagePages(m) + case other => { + context.getLogger.warn("PackageHierarchyTransformer: Couldn't transform root because root is not ModulePageNode") + other + } + } + + def rearangePackagePages(page: ModulePageNode): ModulePageNode = { + val (h1, h2) = page.getChildren.asScala.partition{ + case p: PackagePageNode => true + case other => false + } + + val (packagePages, otherPages) = (h1.collect{ case p: PackagePageNode => p}.toSeq, h2.collect{ case q: PageNode => q }.toSeq ) + + def isParent(possibleParent: Seq[String], comparedChildren: Seq[String]): Boolean = comparedChildren.startsWith(possibleParent) + + def getRelativeName(parent: Seq[String], child: Seq[String]): Seq[String] = child.slice(parent.size, child.size) + + def relativePageName(parentName: Seq[String], childName: Seq[String], childPage: PageNode): PageNode = + childPage.modified( + childName.slice(parentName.size, childName.size).mkString("",".",""), + childPage.getChildren + ) + + def buildPackageTree( + depth: Int, + remaining: Seq[(Seq[String], PackagePageNode)], + processsed: Seq[(Seq[String], PackagePageNode)] + ): Seq[PackagePageNode] = { + val (currentDepth,rest) = remaining.partition((tokens, page) => tokens.size == depth) + val newProcessed = currentDepth.map( (tokens, page) => { + val newPage = page.modified( + page.getName, + (processsed + .filter((childTokens, child) => isParent(tokens, childTokens)) + .map((childTokens, child) => relativePageName(tokens, childTokens, child)) + ++ page.getChildren.asScala).asJava, + ) + (tokens, newPage) + } + ) + val oldFilteredProcessed = processsed + .filter( (tokens, page) => + currentDepth.forall( (parentTokens, parentPage) => + !isParent(parentTokens, tokens) + ) + ) + + if(depth == 1) (newProcessed ++ oldFilteredProcessed).map(_(1)) + else buildPackageTree(depth - 1, rest, newProcessed ++ oldFilteredProcessed) + } + + val packagePagesWithTokens = packagePages.map(page => (("""\.""".r.split(page.getName)).toSeq, page)) + + val maxDepthElem = packagePagesWithTokens.maxBy( (tokens, page) => tokens.size ) + + page.modified( + page.getName, + (otherPages ++ buildPackageTree(maxDepthElem(0).size, packagePagesWithTokens, Seq.empty)).asJava + ) + + + } \ No newline at end of file diff --git a/scala3doc/src/dotty/dokka/transformers/ScalaCommentToContentConverter.scala b/scala3doc/src/dotty/dokka/transformers/ScalaCommentToContentConverter.scala new file mode 100644 index 000000000000..81567297d903 --- /dev/null +++ b/scala3doc/src/dotty/dokka/transformers/ScalaCommentToContentConverter.scala @@ -0,0 +1,30 @@ +package dotty.dokka + +import org.jetbrains.dokka._ +import org.jetbrains.dokka.model.doc._ +import org.jetbrains.dokka.model.properties.PropertyContainer +import org.jetbrains.dokka.pages._ +import collection.JavaConverters._ +import org.jetbrains.dokka.base.transformers.pages.comments.{DocTagToContentConverter, CommentsToContentConverter} +import java.util.{Set => JSet, List => JList} + + +object ScalaCommentToContentConverter extends CommentsToContentConverter: + val defaultConverter = DocTagToContentConverter() + override def buildContent( + docTag: DocTag, + dci: DCI, + sourceSets: JSet[? <: DokkaConfiguration$DokkaSourceSet], + styles: JSet[? <: Style], + extra: PropertyContainer[ContentNode] + ): JList[ContentNode] = docTag match { + case h: Html => List( + HtmlContentNode( + h.getChildren.asScala.collect{case c: Text => c}.head.getBody, + dci, + sourceSets.asScala.toSet.toDisplay.asScala.toSet, + styles.asScala.toSet + ) + ).asJava + case other => defaultConverter.buildContent(other, dci, sourceSets, styles, extra) + } \ No newline at end of file diff --git a/scala3doc/src/dotty/dokka/transformers/ScalaSourceLinksTransformer.scala b/scala3doc/src/dotty/dokka/transformers/ScalaSourceLinksTransformer.scala new file mode 100644 index 000000000000..2b4dfb521abd --- /dev/null +++ b/scala3doc/src/dotty/dokka/transformers/ScalaSourceLinksTransformer.scala @@ -0,0 +1,61 @@ +package dotty.dokka + +import org.jetbrains.dokka.transformers.documentation.DocumentableTransformer +import org.jetbrains.dokka.pages._ +import collection.JavaConverters +import collection.JavaConverters._ +import org.jetbrains.dokka.plugability.DokkaContext +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.{DokkaConfiguration$DokkaSourceSet, DokkaConfiguration$SourceLinkDefinition} +import org.jetbrains.dokka.DokkaConfiguration +import org.jetbrains.dokka.utilities.DokkaLogger +import org.jetbrains.dokka.base.signatures.SignatureProvider +import org.jetbrains.dokka.base.transformers.pages.comments.CommentsToContentConverter +import org.jetbrains.dokka.model.properties._ +import dotty.dokka.model.api._ + +class ScalaSourceLinksTransformer( + val ctx: DokkaContext, + val commentsToContentConverter: CommentsToContentConverter, + val signatureProvider: SignatureProvider, + val logger: DokkaLogger +) extends DocumentableTransformer: + + val sourceLinks = ctx.getConfiguration.getSourceSets.asScala.flatMap(s => s.getSourceLinks.asScala.map(l => SourceLink(l, s))) + val pageBuilder = ScalaPageContentBuilder(commentsToContentConverter, signatureProvider, logger) + + case class SourceLink(val path: String, val url: String, val lineSuffix: Option[String], val sourceSetData: DokkaConfiguration.DokkaSourceSet) + + object SourceLink { + def apply(sourceLinkDef: DokkaConfiguration$SourceLinkDefinition, sourceSetData: DokkaConfiguration.DokkaSourceSet): SourceLink = + SourceLink(sourceLinkDef.getLocalDirectory, sourceLinkDef.getRemoteUrl.toString, Option(sourceLinkDef.getRemoteLineSuffix), sourceSetData) + } + + + override def invoke(input: DModule, context: DokkaContext): DModule = + input.updateMembers { + case c0: (Member & WithSources & WithExtraProperties[_]) => + val c = c0.asInstanceOf[Member & WithSources & WithExtraProperties[Member]] + c.withNewExtras(c.getExtra plus getSourceLinks(c)) + case c => c + } + + + private def getSourceLinks(doc: WithSources): ExtraProperty[Member] = { + val urls = doc.getSources.asScala.toMap.flatMap{ + case (key,value) => sourceLinks.find(s => value.getPath.contains(s.path) && key == s.sourceSetData).map( + link => (key, createLink(value, link)) + ) + }.collect{ + case (key, Some(value)) => (key,value) + }.toMap + + SourceLinks(urls) + } + + private def createLink(source: DocumentableSource, link: SourceLink): Option[String] = source match { + case s: TastyDocumentableSource => Some(s.lineNumber).map( line => + link.url + s.path.split(link.path)(1) + link.lineSuffix.map(_ + (line + 1)).getOrElse("") //TASTY enumerates lines from 0 + ) + case other => None + } diff --git a/scala3doc/src/dotty/dokka/translators/FilterAttributes.scala b/scala3doc/src/dotty/dokka/translators/FilterAttributes.scala new file mode 100644 index 000000000000..0e6987a2e69d --- /dev/null +++ b/scala3doc/src/dotty/dokka/translators/FilterAttributes.scala @@ -0,0 +1,57 @@ +package dotty.dokka.translators + +import org.jetbrains.dokka.base.translators.documentables.{DefaultPageCreator, PageContentBuilder, PageContentBuilder$DocumentableContentBuilder} +import org.jetbrains.dokka.base.signatures.SignatureProvider +import org.jetbrains.dokka.base.transformers.pages.comments.CommentsToContentConverter +import org.jetbrains.dokka.transformers.documentation.DocumentableToPageTranslator +import org.jetbrains.dokka.utilities.DokkaLogger +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.pages._ +import collection.JavaConverters._ +import org.jetbrains.dokka.model.properties._ +import org.jetbrains.dokka.base.transformers.documentables.CallableExtensions +import org.jetbrains.dokka.DokkaConfiguration$DokkaSourceSet +import org.jetbrains.dokka.base.resolvers.anchors._ +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.properties.PropertyContainer +import org.jetbrains.dokka.model.doc._ +import dotty.dokka.model.api._ + +import dotty.dokka._ + +object FilterAttributes: + def attributesFor(documentable: Documentable): Map[String, String] = + val base = visibity(documentable) ++ visibity(documentable) ++ origin(documentable) ++ keywords(documentable) + base.filter(_._2.nonEmpty) + + private def keywords(documentable: Documentable): Map[String, String] = documentable match + case v: Member => + Map("keywords" -> v.modifiers.map(_.name).mkString(",")) + case null => + Map.empty + + + private def visibity(documentable: Documentable): Map[String, String] = documentable match + case v: Member => + Map("visibility" -> v.visibility.name) + case null => + Map.empty + + + private def origin(documentable: Documentable): Map[String, String] = documentable match + case v: Member => + v.origin match + case Origin.InheritedFrom(name, _) => Map("inherited" -> name) + case Origin.ImplicitlyAddedBy(name, _) => Map("implicitly" -> s"by $name") + case Origin.ExtensionFrom(name, _) => Map("extension" -> s"from $name") + case _ => Map.empty + case null => + Map.empty + + def defaultValues = Map( + "inherited" -> "Not inherited", + "implicitly" -> "Explicit method", + "extension" -> "Standard member", + "keywords" -> "no keywords", + "visibility" -> "public", + ) diff --git a/scala3doc/src/dotty/dokka/translators/HierarchyDiagramBuilder.scala b/scala3doc/src/dotty/dokka/translators/HierarchyDiagramBuilder.scala new file mode 100644 index 000000000000..7f4757214e9d --- /dev/null +++ b/scala3doc/src/dotty/dokka/translators/HierarchyDiagramBuilder.scala @@ -0,0 +1,23 @@ +package dotty.dokka + +import org.jetbrains.dokka.model.Bound +import org.jetbrains.dokka.links.DRI +import dotty.dokka.model._ +import dotty.dokka.model.api._ + + +object HierarchyDiagramBuilder { + def build(m: Member): HierarchyDiagram = { + val mainVertex = Vertex(0, m.asLink) + + val supertypesEdges = m.parents.zipWithIndex.map { case (member, index) => + Edge(mainVertex, Vertex(index + 1, member)) + } + + val subtypesEdges = m.knownChildren.zipWithIndex.map { case (member, index) => + Edge(Vertex(index + m.knownChildren.size + 1, member), mainVertex) + } + + HierarchyDiagram(supertypesEdges ++ subtypesEdges) + } +} \ No newline at end of file diff --git a/scala3doc/src/dotty/dokka/translators/ScalaContentBuilder.scala b/scala3doc/src/dotty/dokka/translators/ScalaContentBuilder.scala new file mode 100644 index 000000000000..5004554a7008 --- /dev/null +++ b/scala3doc/src/dotty/dokka/translators/ScalaContentBuilder.scala @@ -0,0 +1,595 @@ +package dotty.dokka + +import dotty.dokka.translators.FilterAttributes +import org.jetbrains.dokka.base.translators.documentables.{DefaultPageCreator, PageContentBuilder, PageContentBuilder$DocumentableContentBuilder} +import org.jetbrains.dokka.base.signatures.SignatureProvider +import org.jetbrains.dokka.base.transformers.pages.comments.CommentsToContentConverter +import org.jetbrains.dokka.transformers.documentation.DocumentableToPageTranslator +import org.jetbrains.dokka.utilities.DokkaLogger +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.pages._ +import collection.JavaConverters._ +import org.jetbrains.dokka.model.properties._ +import org.jetbrains.dokka.base.transformers.documentables.CallableExtensions +import org.jetbrains.dokka.DokkaConfiguration$DokkaSourceSet +import org.jetbrains.dokka.base.resolvers.anchors._ +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.properties.PropertyContainer +import org.jetbrains.dokka.model.doc._ +import dotty.dokka.model.api.{Kind => _, Link => SLink, _} + + +case class DocumentableSubGroup(val title: Signature, val extensions: Seq[Documentable]) + +case class DocumentableGroup(name: Option[String | Documentable], documenables: Seq[Documentable | DocumentableSubGroup]) + +class ScalaPageContentBuilder( + val commentsConverter: CommentsToContentConverter, + val signatureProvider: SignatureProvider, + val logger: DokkaLogger +) { + + def contentForDRI( + dri: DRI, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet], + kind: Kind = ContentKind.Main, + styles: Set[Style] = Set(), + extra: PropertyContainer[ContentNode] = PropertyContainer.Companion.empty(), + buildBlock: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ): ContentGroup = buildBlock( + ScalaDocumentableContentBuilder(Set(dri), sourceSets, kind, styles, extra) + ).buildContent() + + def contentForDRIs( + dris: Set[DRI], + sourceSets: Set[DokkaConfiguration$DokkaSourceSet], + kind: Kind = ContentKind.Main, + styles: Set[Style] = Set(), + extra: PropertyContainer[ContentNode] = PropertyContainer.Companion.empty(), + buildBlock: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ): ContentGroup = buildBlock( + ScalaDocumentableContentBuilder(dris, sourceSets, kind, styles, extra) + ).buildContent() + + def contentForDocumentable( + d: Documentable, + kind: Kind = ContentKind.Main, + styles: Set[Style] = Set(), + extra: PropertyContainer[ContentNode] = PropertyContainer.Companion.empty(), + buildBlock: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ): ContentGroup = { + buildBlock( + ScalaDocumentableContentBuilder(Set(d.getDri), d.getSourceSets.asScala.toSet, kind, styles, extra) + ).buildContent() + } + + case class ScalaTableBuilder( + val mainDRI: Set[DRI], + val mainSourcesetData: Set[DokkaConfiguration$DokkaSourceSet], + val mainKind: Kind, + val mainStyles: Set[Style], + val mainExtra: PropertyContainer[ContentNode], + val cells: List[ContentGroup] = List() + ) { + private def addChild(c: ContentGroup) = copy(cells = cells :+ c) + + def cell( + dri: Set[DRI] = mainDRI, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + kind: Kind = mainKind, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra)( + buildBlock: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ): ScalaTableBuilder = addChild(contentForDRIs(dri, sourceSets, kind, styles, extra, buildBlock)) + + def build() = cells + } + + case class ScalaDivergentBuilder( + val groupID: ContentDivergentGroup.GroupID, + val mainDRI: Set[DRI], + val mainKind: Kind, + val mainStyles: Set[Style], + val mainExtra: PropertyContainer[ContentNode], + val implicitlySourceSetHinted: Boolean, + val instances: List[ContentDivergentInstance] = List() + ) { + private def addChild(c: ContentDivergentInstance) = copy(instances = instances :+ c) + + def buildContent() = ContentDivergentGroup( + instances.asJava, + DCI(mainDRI.asJava, mainKind), + mainStyles.asJava, + mainExtra, + groupID, + implicitlySourceSetHinted + ) + + def instance( + dri: Set[DRI], + sourceSets: Set[DokkaConfiguration$DokkaSourceSet], + kind: Kind = mainKind, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + )( + buildBlock: ScalaPageContentBuilder#ScalaDivergentInstanceBuilder => ScalaPageContentBuilder#ScalaDivergentInstanceBuilder + ): ScalaDivergentBuilder = addChild( + buildBlock(ScalaDivergentInstanceBuilder(dri, kind, sourceSets, styles, extra)).buildContent() + ) + } + + case class ScalaDivergentInstanceBuilder( + val mainDRI: Set[DRI], + val mainKind: Kind, + val mainSourcesetData: Set[DokkaConfiguration$DokkaSourceSet], + val mainStyles: Set[Style], + val mainExtra: PropertyContainer[ContentNode], + val before: Option[ContentNode] = None, + val divergent: ContentNode = null, + val after: Option[ContentNode] = None + ) { + def buildContent() = ContentDivergentInstance( + before.getOrElse(null), + if divergent != null then divergent else throw IllegalStateException("Divergent part is mandatory"), + after.getOrElse(null), + DCI(mainDRI.asJava, mainKind), + mainSourcesetData.toDisplay, + mainStyles.asJava, + mainExtra + ) + + def before( + dri: Set[DRI] = mainDRI, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + kind: Kind = mainKind, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + )( + buildBlock: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ): ScalaDivergentInstanceBuilder = copy( + before = Some(contentForDRIs(dri, sourceSets, kind, styles, extra, buildBlock)) + ) + + def divergent( + dri: Set[DRI] = mainDRI, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + kind: Kind = ContentKind.Main, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + )( + buildBlock: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ): ScalaDivergentInstanceBuilder = copy( + divergent = contentForDRIs(dri, sourceSets, kind, styles, extra, buildBlock) + ) + + def after( + dri: Set[DRI] = mainDRI, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + kind: Kind = ContentKind.Main, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + )( + buildBlock: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ): ScalaDivergentInstanceBuilder = copy( + after = Some( + contentForDRIs(dri, sourceSets, kind, styles, extra, buildBlock) + ) + ) + } + + + case class ScalaDocumentableContentBuilder( + val mainDRI: Set[DRI], + val mainSourcesetData: Set[DokkaConfiguration$DokkaSourceSet], + val mainKind: Kind, + val mainStyles: Set[Style], + val mainExtra: PropertyContainer[ContentNode], + val children: List[ContentNode] = List() + ) { + + def addChild(c: ContentNode) = copy(children = children :+ c) + + def addChildren(c: Seq[ContentNode]) = copy(children = children ++ c) + + def reset() = copy(children = Nil) + + def buildContent() = ContentGroup( + children.asJava, + DCI(mainDRI.asJava, mainKind), + mainSourcesetData.toDisplay, + mainStyles.asJava, + mainExtra + ) + + def group( + dri: Set[DRI] = mainDRI, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + kind: Kind = mainKind, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + )( + buildBlock: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ): ScalaDocumentableContentBuilder = addChild( + contentForDRIs(dri, sourceSets, kind, styles, extra, buildBlock) + ) + + def header( + level: Int, + text: String, + kind: Kind = ContentKind.Main, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + )( + buildBlock: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder = p => p + ): ScalaDocumentableContentBuilder = addChild( + ContentHeader( + level, + contentForDRIs( + mainDRI, + sourceSets, + kind, + styles, + extra plus SimpleAttr("anchor", "\\s".r.replaceAllIn(text, "").toLowerCase()), + bdr => {buildBlock(bdr.text(text, kind = kind))} + ) + ) + ) + + def cover( + text: String, + kind: Kind = ContentKind.Main, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + )( + buildBlock: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder = p => p + ): ScalaDocumentableContentBuilder = header(1, text, kind, sourceSets, styles, extra){buildBlock} + + def signature(d: Documentable) = addChildren(signatureProvider.signature(d).asScala.toList) + + def defaultHeaders = List( + contentForDRIs( + dris = mainDRI, + sourceSets = mainSourcesetData, + buildBlock = bdr => {bdr.text("Name")} + ), + contentForDRIs( + dris = mainDRI, + sourceSets = mainSourcesetData, + buildBlock = bdr => {bdr.text("Summary")} + ) + ) + + def table( + kind: Kind = ContentKind.Main, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra, + headers: List[ContentGroup] = List.empty + )( + buildBlock: ScalaPageContentBuilder#ScalaTableBuilder => ScalaPageContentBuilder#ScalaTableBuilder + ) = addChild( + ContentTable( + headers.asJava, + buildBlock(ScalaTableBuilder(mainDRI, sourceSets, kind, styles, extra)).build().asJava, + DCI(mainDRI.asJava, kind), + sourceSets.toDisplay, + styles.asJava, + extra + ) + ) + + def text( + text: String, + kind: Kind = ContentKind.Main, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + ) = addChild( + buildText(text, kind, sourceSets, styles, extra) + ) + + private def buildText( + text: String, + kind: Kind = ContentKind.Main, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + ) = ContentText(text, DCI(mainDRI.asJava, kind), sourceSets.toDisplay, styles.asJava, extra) + + + def dotDiagram( + diagram: HierarchyDiagram, + kind: Kind = ContentKind.Main, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + ) = addChild(HierarchyDiagramContentNode(diagram, DCI(mainDRI.asJava, kind), sourceSets.toDisplay.asScala.toSet, styles, extra)) + + def groupingBlock[A, T <: Documentable, G <: List[(A, List[T])]]( + name: String, + elements: G, + kind: Kind = ContentKind.Main, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = Set(), + extra: PropertyContainer[ContentNode] = mainExtra, + renderWhenEmpty: Boolean = false, + needsSorting: Boolean = true, + headers: List[ContentGroup] = List(), + needsAnchors: Boolean = true, + omitSplitterOnSingletons: Boolean = true + )( + groupSplitterFunc: (ScalaPageContentBuilder#ScalaDocumentableContentBuilder, A) => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + )( + elementFunc: (ScalaPageContentBuilder#ScalaDocumentableContentBuilder, T) => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ) = if (renderWhenEmpty || !elements.flatMap(_._2).isEmpty) { + header(3, name, kind, styles = styles, extra = extra plus SimpleAttr.Companion.header(name))() + .group(styles = Set(ContentStyle.WithExtraAttributes), extra = extra plus SimpleAttr.Companion.header(name)){ bdr => + elements.foldLeft(bdr){ (b, groupped) => + val (key, values) = groupped + (if(values.size > 1 || (values.size == 1 && !omitSplitterOnSingletons)) b.group()(bd => groupSplitterFunc(bd, key)) else b) + .table(kind = kind, headers = headers, styles = styles, extra = extra plus SimpleAttr.Companion.header(name)){ tablebdr => + values.foldLeft(tablebdr){ (tablebdr, elem) => + tablebdr.cell(Set(elem.getDri), elem.getSourceSets.asScala.toSet, kind, styles, extra){ cellbdr => + elementFunc(cellbdr, elem) + } + } + } + } + } + } else this + + def list[T]( + elements: List[T], + prefix: String = "", + suffix: String = "", + separator: String = ", ", + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData + )( + elemOp: (ScalaPageContentBuilder#ScalaDocumentableContentBuilder, T) => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ) = if(!elements.isEmpty){ + val withPrefix = (if(!prefix.isEmpty) text(prefix, sourceSets = sourceSets) else this) + val insertedElems = elements.dropRight(1).foldLeft[ScalaPageContentBuilder#ScalaDocumentableContentBuilder](withPrefix){ (bdr, elem) => + elemOp(bdr, elem).text(separator, sourceSets = sourceSets) + } + val withLast = elemOp(insertedElems, elements.last) + if(!suffix.isEmpty) withLast.text(suffix, sourceSets = sourceSets) else withLast + } else this + + def driLink( + text: String, + address: DRI, + kind: Kind = ContentKind.Main, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + ) = addChild( + ContentDRILink( + List(buildText(text, kind, sourceSets, styles, extra)).asJava, + address, + DCI(mainDRI.asJava, kind), + sourceSets.toDisplay, + Set().asJava, + PropertyContainer.Companion.empty() + ) + ) + + def resolvedLink( + text: String, + address: String, + kind: Kind = ContentKind.Main, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + ) = addChild( + ContentResolvedLink( + List(buildText(text, kind, sourceSets, styles, extra)).asJava, + address, + DCI(mainDRI.asJava, kind), + sourceSets.toDisplay, + Set().asJava, + PropertyContainer.Companion.empty() + ) + ) + + def linkWithContent( + address: DRI, + kind: Kind = ContentKind.Main, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + )( + buildBlock: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ) = addChild( + ContentDRILink( + contentForDRIs(mainDRI, sourceSets, kind, styles, extra, buildBlock).getChildren, + address, + DCI(mainDRI.asJava, kind), + sourceSets.toDisplay, + Set().asJava, + PropertyContainer.Companion.empty() + ) + ) + + def comment( + docTag: DocTag, + kind: Kind = ContentKind.Comment, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + ) = addChild( + contentForDRIs(mainDRI, sourceSets, kind, styles, extra, bdr => bdr.addChildren( + rawComment(docTag, kind, sourceSets, styles, extra) + )) + ) + + def rawComment( + docTag: DocTag, + kind: Kind = ContentKind.Comment, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra + ) = commentsConverter.buildContent( + docTag, + DCI(mainDRI.asJava, kind), + sourceSets.asJava, + Set().asJava, + PropertyContainer.Companion.empty() + ).asScala.toSeq + + def divergentGroup( + groupId: ContentDivergentGroup.GroupID, + dri: Set[DRI] = mainDRI, + kind: Kind = ContentKind.Main, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra, + implicitlySourceSetHinted: Boolean = true + )( + buildBlock: ScalaPageContentBuilder#ScalaDivergentBuilder => ScalaPageContentBuilder#ScalaDivergentBuilder + ) = addChild( + buildBlock(ScalaDivergentBuilder(groupId, dri, kind, styles, extra, implicitlySourceSetHinted)).buildContent() + ) + + def sourceSetDependentHint( + dri: Set[DRI] = mainDRI, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + kind: Kind = ContentKind.Main, + styles: Set[Style] = mainStyles, + extra: PropertyContainer[ContentNode] = mainExtra, + )( + buildBlock: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ) = addChild( + PlatformHintedContent( + contentForDRIs(dri, sourceSets, kind, styles, extra, buildBlock), + sourceSets.toDisplay + ) + ) + + type Self = ScalaPageContentBuilder#ScalaDocumentableContentBuilder + + def documentableTab(name: String)(children: DocumentableGroup*): Self = + def buildSignature(d: Documentable) = + ScalaSignatureProvider.rawSignature(d, InlineSignatureBuilder()).asInstanceOf[InlineSignatureBuilder] + + def buildAnnotations(d: Member) = + InlineSignatureBuilder().annotationsBlock(d).asInstanceOf[InlineSignatureBuilder].names.reverse + + def documentableElement(documentable: Member): DocumentableElement = + val docs = documentable.getDocumentation.asScala.values.headOption.flatMap(_.getChildren.asScala.headOption) + val signatureBuilder = buildSignature(documentable) + val originInfo = documentable.origin match { + case Origin.ImplicitlyAddedBy(name, dri) => Signature("Implicitly added by ", SLink(name, dri)) + case Origin.ExtensionFrom(name, dri) => Signature("Extension method from ", SLink(name, dri)) + case _ => Nil + } + + DocumentableElement( + buildAnnotations(documentable), + signatureBuilder.preName.reverse, + documentable.getName, + signatureBuilder.names.reverse, + docs.fold(Nil)(d => reset().rawComment(d.getRoot)), + originInfo, + FilterAttributes.attributesFor(documentable), + asParams(documentable.getDri) + ) + + def element(e: Documentable | DocumentableSubGroup): DocumentableElement | DocumentableElementGroup = e match + case e: Documentable => documentableElement(e) + case e: DocumentableSubGroup => + DocumentableElementGroup( + e.title, + e.extensions.map(documentableElement), + asParams(mainDRI) + ) + + if (children.forall(_.documenables.isEmpty)) this else + header(3, name, mainKind,mainSourcesetData, mainStyles, mainExtra plus SimpleAttr.Companion.header(name))() + .group(styles = Set(ContentStyle.WithExtraAttributes), extra = mainExtra plus SimpleAttr.Companion.header(name)){ bdr => + children.foldLeft(bdr){ (bdr, list) => + if list.documenables.isEmpty then bdr + else + val header = list.name match + case Some(o: Documentable) => + buildSignature(o).names.reverse + case option => + option.toSeq.map(_.toString) + + + bdr.addChild(DocumentableList(header, list.documenables.map(element), asParams(mainDRI))) + } + } + + def documentableFilter() = addChild(DocumentableFilter(asParams(mainDRI))) + + def asParams(dri: DRI): ContentNodeParams = asParams(Set(dri)) + + def asParams(dri: Set[DRI]): ContentNodeParams = ContentNodeParams( + new DCI(dri.asJava, mainKind), + mainSourcesetData.toDisplay, + mainStyles, + mainExtra + ) + + def divergentBlock[A, T <: Documentable, G <: List[(A, List[T])]]( + name: String, + elements: G, + kind: Kind = ContentKind.Main, + sourceSets: Set[DokkaConfiguration$DokkaSourceSet] = mainSourcesetData, + styles: Set[Style] = Set(), + extra: PropertyContainer[ContentNode] = mainExtra, + renderWhenEmpty: Boolean = false, + needsSorting: Boolean = true, + headers: List[ContentGroup] = List(), + needsAnchors: Boolean = true, + omitSplitterOnSingletons: Boolean = true + )( + groupSplitterFunc: (ScalaPageContentBuilder#ScalaDocumentableContentBuilder, A) => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ) = if (renderWhenEmpty || !elements.flatMap(_._2).isEmpty) { + header(3, name, kind, styles = styles, extra = extra plus SimpleAttr.Companion.header(name))() + .group(styles = Set(ContentStyle.WithExtraAttributes), extra = extra plus SimpleAttr.Companion.header(name)){ bdr => + elements.foldLeft(bdr){ (b, groupped) => + val (key, values) = groupped + (if(values.size > 1 || (values.size == 1 && !omitSplitterOnSingletons)) b.group()(bd => groupSplitterFunc(bd, key)) else b) + .table(kind = kind, headers = headers, styles = styles, extra = extra plus SimpleAttr.Companion.header(name)){ tablebdr => + values.groupBy(_.getName).foldLeft(tablebdr){ case (tablebdr,(elemName, divergentElems)) => tablebdr + .cell( + dri = divergentElems.map(_.getDri).toSet, + sourceSets = divergentElems.flatMap(_.getSourceSets.asScala).toSet, + kind = kind + ){ cellbdr => cellbdr + .driLink(elemName, divergentElems.head.getDri, kind = ContentKind.Main) + .divergentGroup(ContentDivergentGroup.GroupID(name)){ divBdr => + divergentElems.foldLeft(divBdr){ (bdr, elem) => + bdr.instance(Set(elem.getDri), elem.getSourceSets.asScala.toSet){ insBdr => insBdr + .before(){ befbdr => befbdr + .contentForBrief(elem) + } + .divergent(){ divDivBdr => divDivBdr + .group(){ gbdr => gbdr + .signature(elem) + } + } + } + } + } + } + + } + } + } + } + } else this + + def contentForBrief(d: Documentable): ScalaDocumentableContentBuilder = + d.getDocumentation.asScala.foldLeft(this){ case (builder, (ss, docs)) => + docs.getChildren.asScala.headOption.map(_.getRoot) match { + case Some(dt) => builder.group(sourceSets = Set(ss), kind = ContentKind.BriefComment){ bldr => bldr.comment(dt) } + case None => builder + } + } + + } +} diff --git a/scala3doc/src/dotty/dokka/translators/ScalaDocumentableToPageTranslator.scala b/scala3doc/src/dotty/dokka/translators/ScalaDocumentableToPageTranslator.scala new file mode 100644 index 000000000000..ac5ebd810832 --- /dev/null +++ b/scala3doc/src/dotty/dokka/translators/ScalaDocumentableToPageTranslator.scala @@ -0,0 +1,16 @@ +package dotty.dokka + +import org.jetbrains.dokka.base.signatures.SignatureProvider +import org.jetbrains.dokka.base.transformers.pages.comments.CommentsToContentConverter +import org.jetbrains.dokka.model.DModule +import org.jetbrains.dokka.pages.ModulePageNode +import org.jetbrains.dokka.transformers.documentation.DocumentableToPageTranslator +import org.jetbrains.dokka.utilities.DokkaLogger + +class ScalaDocumentableToPageTranslator( + val commentsToContentConverter: CommentsToContentConverter, + val signatureProvider: SignatureProvider, + val logger: DokkaLogger +) extends DocumentableToPageTranslator { + override def invoke(module: DModule): ModulePageNode = ScalaPageCreator(commentsToContentConverter, signatureProvider, logger).pageForModule(module) +} diff --git a/scala3doc/src/dotty/dokka/translators/ScalaPageCreator.scala b/scala3doc/src/dotty/dokka/translators/ScalaPageCreator.scala new file mode 100644 index 000000000000..8f2b27ddc254 --- /dev/null +++ b/scala3doc/src/dotty/dokka/translators/ScalaPageCreator.scala @@ -0,0 +1,440 @@ +package dotty.dokka + +import scala.collection.mutable +import scala.collection.mutable.ListBuffer +import scala.util.chaining._ +import org.jetbrains.dokka.base.translators.documentables.{DefaultPageCreator, PageContentBuilder} +import org.jetbrains.dokka.base.translators.documentables.PageContentBuilder$DocumentableContentBuilder +import org.jetbrains.dokka.base.signatures.SignatureProvider +import org.jetbrains.dokka.base.transformers.pages.comments.CommentsToContentConverter +import org.jetbrains.dokka.transformers.documentation.DocumentableToPageTranslator +import org.jetbrains.dokka.utilities.DokkaLogger +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.pages._ +import collection.JavaConverters._ +import org.jetbrains.dokka.model.properties._ +import org.jetbrains.dokka.base.transformers.documentables.CallableExtensions +import org.jetbrains.dokka.DokkaConfiguration$DokkaSourceSet +import org.jetbrains.dokka.base.resolvers.anchors._ +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.model.doc._ +import org.jetbrains.dokka.links.DRIKt.getParent +import dotty.dokka.model.api._ +import dotty.dokka.model.api.Kind +import dotty.dokka.model.api.Link + +type DocBuilder = ScalaPageContentBuilder#ScalaDocumentableContentBuilder + +class ScalaPageCreator( + commentsToContentConverter: CommentsToContentConverter, + signatureProvider: SignatureProvider, + val logger: DokkaLogger +) extends DefaultPageCreator(commentsToContentConverter, signatureProvider, logger): + + private val contentBuilder = ScalaPageContentBuilder(commentsToContentConverter, signatureProvider, logger) + + override def pageForModule(m: DModule): ModulePageNode = super.pageForModule(m) + + private def updatePageNameForMember(page: PageNode, p: Member) = + val name = p.kind match + case Kind.Extension(_) => s"extension_${page.getName}" + case _ => page.getName + + page.modified(name, page.getChildren) + + private def pagesForMembers(p: Member): Seq[PageNode] = + p.allMembers.filter(_.origin == Origin.DefinedWithin).collect { + case f: DFunction => updatePageNameForMember(pageForFunction(f), f) + case c: DClass => updatePageNameForMember(pageForDClass(c), c) + } + + override def pageForPackage(p: DPackage): PackagePageNode = + val originalPage = super.pageForPackage(p) + val originalPages: Seq[PageNode] = originalPage.getChildren.asScala.toList + val allPage: Seq[PageNode] = originalPages ++ pagesForMembers(p) + originalPage.modified(p.getName, allPage.asJava) + + override def pageForClasslike(c: DClasslike): ClasslikePageNode = c match { + case clazz: DClass => pageForDClass(clazz) + case other => throw UnsupportedOperationException("Only DClass classlike is supported.") + } + + def pageForDClass(c: DClass): ClasslikePageNode = { + val constructors = c.getConstructors + + val ext = c.get(ClasslikeExtension) + + val name = if c.kind == Kind.Object && ext.companion.isDefined then c.getName + "$" else c.getName + + ClasslikePageNode( + name, + contentForClasslike(c), + JSet(c.getDri), + c, + (constructors.asScala.map(pageForFunction) ++ + c.getClasslikes.asScala.map(pageForClasslike) ++ + c.getFunctions.asScala.map(pageForFunction) ++ + pagesForMembers(c)).asJava, + List.empty.asJava + ) + + } + + override def pageForFunction(f: DFunction) = super.pageForFunction(f) + + override def contentForModule(m: DModule) = { + def buildBlock = (builder: DocBuilder) => builder + .group(kind = ContentKind.Cover) { gbuilder => gbuilder + .cover(m.getName)() + .descriptionIfNotEmpty(m) + } + .addChildren(contentForComments(m).asScala.toSeq) + .groupingBlock( + "Packages", + List("" -> m.getPackages.asScala.toList), + kind = ContentKind.Packages, + sourceSets = m.getSourceSets.asScala.toSet + )( + (bdr, elem) => bdr + ) { (bdr, elem) => bdr + .driLink(elem.getName, elem.getDri) + } + + contentBuilder.contentForDocumentable(m, buildBlock = buildBlock) + } + + override def contentForPackage(p: DPackage) = { + def buildBlock = (builder: DocBuilder) => builder + .group(kind = ContentKind.Cover) { gbuilder => gbuilder + .cover(p.getName)() + .descriptionIfNotEmpty(p) + } + .documentableFilter() + .group(styles = Set(ContentStyle.TabbedContent)) { b => b + .contentForScope(p) + } + + contentBuilder.contentForDocumentable(p, buildBlock = buildBlock) + } + + override def contentForClasslike(c: DClasslike) = c match { + case d: DClass => contentForClass(d) + case other => throw UnsupportedOperationException("Only DClass classlike is supported.") + } + + def contentForClass(c: DClass) = { + def buildBlock = (builder: DocBuilder) => builder + .group(kind = ContentKind.Cover, sourceSets = c.getSourceSets.asScala.toSet) { gbdr => gbdr + .cover(c.getName)() + .sourceSetDependentHint(Set(c.getDri), c.getSourceSets.asScala.toSet) { sbdr => sbdr + .signature(c) + .contentForDescription(c) + } + } + .documentableFilter() + .group(styles = Set(ContentStyle.TabbedContent)) { b => b + .contentForScope(c) + .contentForEnum(c) + .contentForConstructors(c) + .contentForTypesInfo(c) + } + contentBuilder.contentForDocumentable(c, buildBlock = buildBlock) + } + + override def contentForMember(d: Documentable) = { + def buildBlock = (builder: DocBuilder) => builder + .group(kind = ContentKind.Cover){ bd => bd.cover(d.getName)() } + .divergentGroup( + ContentDivergentGroup.GroupID("member") + ) { divbdr => divbdr + .instance(Set(d.getDri), sourceSets = d.getSourceSets.asScala.toSet) { insbdr => insbdr + .before(){ bbdr => bbdr + .contentForDescription(d) + .contentForComments(d) + } + .divergent(kind = ContentKind.Symbol) { dbdr => dbdr + .signature(d) + } + } + } + contentBuilder.contentForDocumentable(d, buildBlock = buildBlock) + } + + override def contentForFunction(f: DFunction) = contentForMember(f) + + extension (b: DocBuilder): + def descriptionIfNotEmpty(d: Documentable): DocBuilder = { + val desc = contentForDescription(d).asScala.toSeq + val res = if desc.isEmpty then b else b + .sourceSetDependentHint( + Set(d.getDri), + d.getSourceSets.asScala.toSet, + kind = ContentKind.SourceSetDependentHint, + styles = Set(TextStyle.UnderCoverText) + ) { sourceSetBuilder => sourceSetBuilder + .addChildren(desc) + } + res + } + + def contentForComments(d: Documentable) = b + + def contentForDescription(d: Documentable) = { + val specialTags = Set[Class[_]](classOf[Description]) + + type SourceSet = DokkaConfiguration$DokkaSourceSet + + val tags: List[(SourceSet, TagWrapper)] = + d.getDocumentation.asScala.toList.flatMap( (pd, doc) => doc.getChildren.asScala.map(pd -> _).toList ) + + val platforms = d.getSourceSets.asScala.toSet + + val description = tags.collect{ case (pd, d: Description) => (pd, d) }.drop(1).groupBy(_(0)).map( (key, value) => key -> value.map(_(1))) + + /** Collect the key-value pairs from `iter` into a `Map` with a `cleanup` step, + * keeping the original order of the pairs. + */ + def collectInMap[K, E, V]( + iter: Iterator[(K, E)] + )( + cleanup: List[E] => V + ): collection.Map[K, V] = { + val lhm = mutable.LinkedHashMap.empty[K, ListBuffer[E]] + iter.foreach { case (k, e) => + lhm.updateWith(k) { + case None => Some(ListBuffer.empty.append(e)) + case Some(buf) => + buf.append(e) + Some(buf) + } + } + lhm.iterator.map { case (key, buf) => key -> cleanup(buf.result)}.to(mutable.LinkedHashMap) + } + + val unnamedTags: collection.Map[(SourceSet, Class[_]), List[TagWrapper]] = + collectInMap { + tags.iterator + .filterNot { t => + t(1).isInstanceOf[NamedTagWrapper] || specialTags.contains(t(1).getClass) + }.map { t => + (t(0), t(1).getClass) -> t(1) + } + }(cleanup = identity) + + val namedTags: collection.Map[ + String, + Either[ + collection.Map[SourceSet, NamedTagWrapper], + collection.Map[(SourceSet, String), ScalaTagWrapper.NestedNamedTag], + ], + ] = { + val grouped = collectInMap { + tags.iterator.collect { + case (sourcesets, n: NamedTagWrapper) => + (n.getName, n.isInstanceOf[ScalaTagWrapper.NestedNamedTag]) -> (sourcesets, n) + } + }(cleanup = identity) + + grouped.iterator.map { + case ((name, true), values) => + val groupedValues = + values.iterator.map { + case (sourcesets, t) => + val tag = t.asInstanceOf[ScalaTagWrapper.NestedNamedTag] + (sourcesets, tag.subname) -> tag + }.to(mutable.LinkedHashMap) + name -> Right(groupedValues) + case ((name, false), values) => + name -> Left(values.to(mutable.LinkedHashMap)) + }.to(mutable.LinkedHashMap) + } + + b.group(Set(d.getDri), styles = Set(TextStyle.Block, TableStyle.Borderless)) { bdr => + val b1 = description.foldLeft(bdr){ + case (bdr, (key, value)) => bdr + .group(sourceSets = Set(key)){ gbdr => + value.foldLeft(gbdr) { (gbdr, tag) => gbdr + .comment(tag.getRoot) + } + } + } + + b1.table(kind = ContentKind.Comment, styles = Set(TableStyle.DescriptionList)){ tbdr => + val withUnnamedTags = unnamedTags.foldLeft(tbdr){ case (bdr, (key, value) ) => bdr + .cell(sourceSets = Set(key(0))){ b => b + .text(key(1).getSimpleName, styles = Set(TextStyle.Bold)) + } + .cell(sourceSets = Set(key(0))) { b => b + .list(value, separator = ""){ (bdr, elem) => bdr + .comment(elem.getRoot) + } + } + } + + val withNamedTags = namedTags.foldLeft(withUnnamedTags){ + case (bdr, (key, Left(value))) => + value.foldLeft(bdr){ case (bdr, (sourceSets, v)) => bdr + .cell(sourceSets = Set(sourceSets)){ b => b + .text(key) + } + .cell(sourceSets = Set(sourceSets)){ b => b + .comment(v.getRoot) + } + } + case (bdr, (key, Right(groupedValues))) => bdr + .cell(sourceSets = d.getSourceSets.asScala.toSet){ b => b + .text(key) + } + .cell(sourceSets = d.getSourceSets.asScala.toSet)(_.table(kind = ContentKind.Comment, styles = Set(TableStyle.NestedDescriptionList)){ tbdr => + groupedValues.foldLeft(tbdr){ case (bdr, ((sourceSets, _), v)) => bdr + .cell(sourceSets = Set(sourceSets)){ b => b + .comment(v.identTag) + } + .cell(sourceSets = Set(sourceSets)){ b => b + .comment(v.descTag) + } + } + }) + } + + val withCompanion = d match { + case d: DClass => + val ext = d.get(ClasslikeExtension) + val co = ext.companion + co.fold(withNamedTags) { co => withNamedTags + .cell(sourceSets = d.getSourceSets.asScala.toSet){ b => b + .text("Companion") + } + .cell(sourceSets = d.getSourceSets.asScala.toSet){ b => b + .driLink( + d.kind match { + case Kind.Object => "class" + case _ => "object" + }, + co + ) + } + } + case _ => withNamedTags + } + + d match{ + case d: (WithSources & WithExtraProperties[_]) if d.get(SourceLinks) != null && !d.get(SourceLinks).links.isEmpty => d.get(SourceLinks).links.foldLeft(withCompanion){ + case (bdr, (sourceSet, link)) => bdr + .cell(sourceSets = Set(sourceSet)){ b => b + .text("Source") + } + .cell(sourceSets = Set(sourceSet)){ b => b + .resolvedLink("(source)", link) + } + } + case other => withCompanion + } + } + } + } + + def contentForScope(s: Documentable & WithScope & WithExtraProperties[_]) = + def groupExtensions(extensions: Seq[Member]): Seq[DocumentableSubGroup] = + extensions.groupBy(_.kind).map { + case (Kind.Extension(on), members) => + val signature = Signature(s"extension (${on.name}: ") join on.signature join Signature(")") + DocumentableSubGroup(signature, members.toSeq) + case other => sys.error(s"unexpected value: $other") + }.toSeq + + + val (definedMethods, inheritedMethods) = s.membersBy(_.kind == Kind.Def) + val (definedFields, inheritedFiles) = s.membersBy(m => m.kind == Kind.Val || m.kind == Kind.Var) + val (definedClasslikes, inheritedClasslikes) = s.membersBy(m => m.kind.isInstanceOf[Classlike]) + val (definedTypes, inheritedTypes) = s.membersBy(_.kind.isInstanceOf[Kind.Type]) + val (definedGivens, inheritedGives) = s.membersBy(_.kind.isInstanceOf[Kind.Given]) + val (definedExtensions, inheritedExtensions) = s.membersBy(_.kind.isInstanceOf[Kind.Extension]) + val (definedImplicits, inheritedImplicits) = s.membersBy(_.kind.isInstanceOf[Kind.Implicit]) + + b + .contentForComments(s) + .documentableTab("Type members")( + DocumentableGroup(Some("Types"), definedTypes), + DocumentableGroup(Some("Classlikes"), definedClasslikes), + DocumentableGroup(Some("Inherited types"), inheritedTypes), + DocumentableGroup(Some("Inherited classlikes"), inheritedClasslikes) + ) + .documentableTab("Methods")( + DocumentableGroup(Some("Defined methods"), definedMethods), + DocumentableGroup(Some("Inherited methods"), inheritedMethods), + ) + .documentableTab("Value members")( + DocumentableGroup(Some("Defined value members"), definedFields), + DocumentableGroup(Some("Inherited value members"), inheritedFiles) + ) + .documentableTab("Givens")( + DocumentableGroup(Some("Defined givens"), definedGivens), + DocumentableGroup(Some("Inherited givens"), inheritedGives) + ) + .documentableTab("Extensions")( + DocumentableGroup(Some("Defined extensions"), groupExtensions(definedExtensions)), + DocumentableGroup(Some("Inherited extensions"), groupExtensions(inheritedExtensions)) + ) + .documentableTab("Implicits")( + DocumentableGroup(Some("Defined implicits"), definedImplicits), + DocumentableGroup(Some("Inherited implicits"), inheritedImplicits) + ) + + + def contentForEnum(c: DClass) = + b.documentableTab("Enum entries")( + DocumentableGroup(None, c.membersBy(_.kind == Kind.EnumCase)._1) // Enum entries cannot be inherited + ) + + + def contentForConstructors(c: DClass) = + b.documentableTab("Constructors")( + DocumentableGroup(None, c.getConstructors.asScala.toList) + ) + + + def contentForTypesInfo(c: DClass) = + val supertypes = c.parents + val subtypes = c.knownChildren + + def contentForTypeLink(builder: DocBuilder, link: LinkToType): DocBuilder = + builder.group(styles = Set(TextStyle.Paragraph)) { builder => + link.signature.foldLeft(builder.text(link.kind.name).text(" ")){ (builder, sigElement) => sigElement match + case Link(name, dri) => builder.driLink(name, dri) + case str: String => builder.text(str) + } + } + + val withSupertypes = if supertypes.isEmpty then b else + b.header(2, "Linear supertypes")() + .group( + kind = ContentKind.Comment, + styles = Set(ContentStyle.WithExtraAttributes), + extra = PropertyContainer.Companion.empty plus SimpleAttr.Companion.header("Linear supertypes") + ){ gbdr => gbdr + .group(kind = ContentKind.Symbol, styles = Set(TextStyle.Monospace)){ grbdr => grbdr + .list(supertypes.toList, separator = "")(contentForTypeLink) + } + } + + val withSubtypes = if (subtypes.isEmpty) withSupertypes else + withSupertypes.header(2, "Known subtypes")() + .group( + kind = ContentKind.Comment, + styles = Set(ContentStyle.WithExtraAttributes), + extra = PropertyContainer.Companion.empty plus SimpleAttr.Companion.header("Known subtypes") + ) { _.group(kind = ContentKind.Symbol, styles = Set(TextStyle.Monospace)) { + _.list(subtypes.toList, separator="")(contentForTypeLink) + } + } + + if subtypes.isEmpty && supertypes.isEmpty then withSubtypes else + withSubtypes.header(2, "Type hierarchy")().group( + kind = ContentKind.Comment, + styles = Set(ContentStyle.WithExtraAttributes), + extra = PropertyContainer.Companion.empty plus SimpleAttr.Companion.header("Type hierarchy") + ) { _.group(kind = ContentKind.Symbol, styles = Set(TextStyle.Monospace)) { + _.dotDiagram(HierarchyDiagramBuilder.build(c)) + } + } diff --git a/scala3doc/src/dotty/dokka/translators/ScalaSignatureProvider.scala b/scala3doc/src/dotty/dokka/translators/ScalaSignatureProvider.scala new file mode 100644 index 000000000000..4346574d32e3 --- /dev/null +++ b/scala3doc/src/dotty/dokka/translators/ScalaSignatureProvider.scala @@ -0,0 +1,195 @@ +package dotty.dokka + +import org.jetbrains.dokka.base.signatures._ +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.model.properties.{WithExtraProperties} +import org.jetbrains.dokka.pages._ +import org.jetbrains.dokka.base.signatures.KotlinSignatureProvider +import org.jetbrains.dokka.base.transformers.pages.comments.CommentsToContentConverter +import org.jetbrains.dokka.utilities.DokkaLogger +import collection.JavaConverters._ +import org.jetbrains.dokka.base.translators.documentables._ +import org.jetbrains.dokka.model.properties.PropertyContainer +import java.util.function.Consumer +import kotlin.jvm.functions.Function2 +import org.jetbrains.dokka.links.DRI +import dotty.dokka.model.api.{Kind, _} + + +class ScalaSignatureProvider(contentConverter: CommentsToContentConverter, logger: DokkaLogger) extends SignatureProvider with ScalaSignatureUtils: + private val default = new KotlinSignatureProvider(contentConverter, logger) + private val styles = Set(TextStyle.Monospace).asInstanceOf[Set[Style]] + private val contentBuilder = new ScalaPageContentBuilder(contentConverter, this, logger) + + private def signatureContent(d: Documentable)( + func: ScalaPageContentBuilder#ScalaDocumentableContentBuilder => ScalaPageContentBuilder#ScalaDocumentableContentBuilder + ) = contentBuilder.contentForDocumentable(d, kind = ContentKind.Symbol, styles = styles, buildBlock = func) + + + case class ContentNodeBuilder(builder: ScalaPageContentBuilder#ScalaDocumentableContentBuilder) extends SignatureBuilder{ + def text(str: String): SignatureBuilder = ContentNodeBuilder(builder.text(str)) + def driLink(text: String, dri: DRI): SignatureBuilder = ContentNodeBuilder(builder.driLink(text, dri)) + } + + override def signature(documentable: Documentable) = + JList(signatureContent(documentable){ builder => + val withAnnotations = ContentNodeBuilder(builder).annotationsBlock(documentable) + val res = ScalaSignatureProvider.rawSignature(documentable, withAnnotations) + res.asInstanceOf[ContentNodeBuilder].builder + }) + +object ScalaSignatureProvider: + def rawSignature(documentable: Documentable, builder: SignatureBuilder): SignatureBuilder = + documentable match + case extension: DFunction if extension.kind.isInstanceOf[Kind.Extension] => + extensionSignature(extension, builder) + case method: DFunction if method.kind.isInstanceOf[Kind.Given] => + givenMethodSignature(method, builder) + case method: DFunction => + methodSignature(method, builder) + case enumEntry: DClass if enumEntry.kind == Kind.EnumCase => + enumEntrySignature(enumEntry, builder) + case clazz: DClass => + classSignature(clazz, builder) + case enumProperty: DProperty if enumProperty.kind == Kind.EnumCase => + enumPropertySignature(enumProperty, builder) + case property: DProperty => + propertySignature(property, builder) + case parameter: DParameter => + parameterSignature(parameter, builder) + case _ => + ??? + + + private def enumEntrySignature(entry: DClass, bdr: SignatureBuilder): SignatureBuilder = + val ext = entry.get(ClasslikeExtension) + val withPrefixes: SignatureBuilder = bdr + .text("case ") + .name(entry.getName, entry.getDri) + .generics(entry) + + val withParameters = ext.constructor.toSeq.foldLeft(withPrefixes){ (bdr, elem) => + bdr.functionParameters(elem) + } + parentsSignature(entry, withParameters) + + private def enumPropertySignature(entry: DProperty, builder: SignatureBuilder): SignatureBuilder = + val modifiedType = entry.getType match + case t: TypeConstructor => GenericTypeConstructor( + t.getDri, + t.getProjections.asScala.map{ + case t: UnresolvedBound if t.getName == " & " => UnresolvedBound(" with "); + case other => other + }.asJava, + null + ) + case other => other + + builder + .text("case ") + .name(entry.getName, entry.getDri) + .text(" extends ") + .typeSignature(modifiedType) + + private def parentsSignature(d: DClass, builder: SignatureBuilder): SignatureBuilder = + d.directParents match + case Nil => builder + case extendType :: withTypes => + val extendPart = builder.text(" extends ").signature(extendType) + withTypes.foldLeft(extendPart)((bdr, tpe) => bdr.text(" with ").signature(tpe)) + + private def classSignature(clazz: DClass, builder: SignatureBuilder): SignatureBuilder = + val ext = clazz.get(ClasslikeExtension) + val prefixes = builder + .modifiersAndVisibility(clazz, clazz.kind.name) + .name(clazz.getName, clazz.getDri) + .generics(clazz) + + val withGenerics = ext.constructor.toSeq.foldLeft(prefixes){ (bdr, elem) => + bdr.functionParameters(elem) + } + parentsSignature(clazz, withGenerics) + + private def extensionSignature(extension: DFunction, builder: SignatureBuilder): SignatureBuilder = + val extendedSymbol = if (extension.isRightAssociative()) { + extension.getParameters.asScala(extension.get(MethodExtension).parametersListSizes(0)) + } else { + extension.getParameters.asScala(0) + } + val withSinature = builder + .modifiersAndVisibility(extension, "def") + .name(extension.getName, extension.getDri) + .generics(extension) + .functionParameters(extension) + + if extension.isConstructor then withSinature + else withSinature.text(":").text(" ").typeSignature(extension.getType) + + private def givenMethodSignature(method: DFunction, builder: SignatureBuilder): SignatureBuilder = method.kind match + case Kind.Given(Some(instance), _) => + builder.text("given ") + .name(method.getName, method.getDri) + .text(" as ") + .signature(instance) + case _ => + builder.text("given ").name(method.getName, method.getDri) + + + private def methodSignature(method: DFunction, builder: SignatureBuilder): SignatureBuilder = + val bdr = builder + .modifiersAndVisibility(method, "def") + .name(method.getName, method.getDri) + .generics(method) + .functionParameters(method) + if !method.isConstructor then + bdr + .text(":") + .text(" ") + .typeSignature(method.getType) + else bdr + + + private def propertySignature(property: DProperty, builder: SignatureBuilder): SignatureBuilder = + property.kind match + case _: Kind.Given => givenPropertySignature(property, builder) + case tpe: Kind.Type => typeSignature(tpe, property, builder) + case other => fieldSignature(property, other.name, builder) + + + private def typeSignature(tpe: Kind.Type, typeDef: DProperty, builder: SignatureBuilder): SignatureBuilder = + val bdr = builder + .modifiersAndVisibility(typeDef, if tpe.opaque then "opaque type" else "type") + .name(typeDef.getName, typeDef.getDri) + .generics(typeDef) + if(!tpe.opaque){ + (if tpe.concreate then bdr.text(" = ") else bdr) + .typeSignature(typeDef.getType) + } else bdr + + + private def givenPropertySignature(property: DProperty, builder: SignatureBuilder): SignatureBuilder = + val bdr = builder + .text("given ") + .name(property.getName, property.getDri) + + property.kind match + case Kind.Given(Some(instance), _) => + bdr.text(" as ").signature(instance) + case _ => bdr + + private def fieldSignature(property: DProperty, kind: String, builder: SignatureBuilder): SignatureBuilder = + builder + .modifiersAndVisibility(property, kind) + .name(property.getName, property.getDri) + .text(":") + .text(" ") + .typeSignature(property.getType) + + private def parameterSignature(parameter: DParameter, builder: SignatureBuilder): SignatureBuilder = + val ext = parameter.get(ParameterExtension) + builder + .text(if ext.isGrouped then "extension (" else "(") + .text(parameter.getName) + .text(": ") + .typeSignature(parameter.getType) + .text(")") diff --git a/scala3doc/src/dotty/dokka/translators/ScalaSignatureUtils.scala b/scala3doc/src/dotty/dokka/translators/ScalaSignatureUtils.scala new file mode 100644 index 000000000000..9c49ff1eba90 --- /dev/null +++ b/scala3doc/src/dotty/dokka/translators/ScalaSignatureUtils.scala @@ -0,0 +1,121 @@ +package dotty.dokka + +import org.jetbrains.dokka.base.signatures._ +import org.jetbrains.dokka.base.translators.documentables.PageContentBuilder +import org.jetbrains.dokka.links.DRI +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.model.properties.WithExtraProperties +import org.jetbrains.dokka.pages._ +import collection.JavaConverters._ +import dotty.dokka.model.api.{Kind, _} + +case class InlineSignatureBuilder(names: Signature = Nil, preName: Signature = Nil) extends SignatureBuilder: + override def text(str: String): SignatureBuilder = copy(names = str +: names) + override def name(str: String, dri: DRI): SignatureBuilder = copy(names = Nil, preName = names) + override def driLink(text: String, dri: DRI): SignatureBuilder = copy(names = Link(text, dri) +: names) + +object InlineSignatureBuilder: + def typeSignatureFor(d: Documentable): Signature = + ScalaSignatureProvider.rawSignature(d, InlineSignatureBuilder()).asInstanceOf[InlineSignatureBuilder].names.reverse + +trait SignatureBuilder extends ScalaSignatureUtils { + def text(str: String): SignatureBuilder + def name(str: String, dri: DRI) = driLink(str, dri) + def driLink(text: String, dri: DRI): SignatureBuilder + + def signature(s: Signature) = s.foldLeft(this){ (b, e) => e match + case Link(name, dri) => b.driLink(name, dri) + case txt: String => b.text(txt) + } + + def list[E]( + elements: List[E], + prefix: String = "", + suffix: String = "", + separator: String = ", ", + )( + elemOp: (SignatureBuilder, E) => SignatureBuilder + ): SignatureBuilder = elements match { + case Nil => this + case head :: tail => + tail.foldLeft(elemOp(text(prefix), head))((b, e) => elemOp(b.text(separator), e)).text(suffix) + } + + def annotationsBlock(d: Member): SignatureBuilder = + d.annotations.foldLeft(this){ (bdr, annotation) => bdr.buildAnnotation(annotation)} + + def annotationsInline(d: Documentable with WithExtraProperties[_]): SignatureBuilder = + d.annotations.foldLeft(this){ (bdr, annotation) => bdr.buildAnnotation(annotation) } + + private def buildAnnotation(a: Annotation): SignatureBuilder = + text("@").driLink(a.dri.getClassNames, a.dri).buildAnnotationParams(a).text(" ") + + private def buildAnnotationParams(a: Annotation): SignatureBuilder = + if !a.params.isEmpty then + list(a.params, "(", ")", ", "){ (bdr, param) => bdr.buildAnnotationParameter(param)} + else this + + private def addParameterName(txt: Option[String]): SignatureBuilder = txt match { + case Some(name) => this.text(s"$txt = ") + case _ => this + } + + private def buildAnnotationParameter(a: Annotation.AnnotationParameter): SignatureBuilder = a match { + case Annotation.PrimitiveParameter(name, value) => + addParameterName(name).text(value) + case Annotation.LinkParameter(name, dri, text) => + addParameterName(name).driLink(text, dri) + case Annotation.UnresolvedParameter(name, value) => + addParameterName(name).text(value) + } + + def modifiersAndVisibility(t: Documentable with WithAbstraction with WithVisibility with WithExtraProperties[_], kind: String) = + import org.jetbrains.dokka.model.properties._ + val extras = t.getExtra.getMap() + val (prefixMods, suffixMods) = t.modifiers.partition(_.prefix) + val all = prefixMods.map(_.name) ++ Seq(t.visibility.asSignature) ++ suffixMods.map(_.name) + + text(all.toSignatureString()).text(kind + " ") + + def typeSignature(b: Projection): SignatureBuilder = b match { + case tc: TypeConstructor => + tc.getProjections.asScala.foldLeft(this) { (bdr, elem) => elem match { + case text: UnresolvedBound => bdr.text(text.getName) + case link: TypeParameter => + bdr.driLink(link.getName, link.getDri) + case other => + bdr.text(s"TODO($other)") + } + } + case other => + text(s"TODO: $other") + } + + def generics(on: WithGenerics) = list(on.getGenerics.asScala.toList, "[", "]"){ (bdr, e) => + val bldr = bdr.text(e.getName) + e.getBounds.asScala.foldLeft(bldr)( (b, bound) => b.typeSignature(bound)) + } + + def functionParameters(method: DFunction) = + val methodExtension = method.get(MethodExtension) + val receiverPos = if method.isRightAssociative() then methodExtension.parametersListSizes(0) else 0 + val (bldr, index) = methodExtension.parametersListSizes.foldLeft(this, 0){ + case ((builder, from), size) => + val toIndex = from + size + if from == toIndex then (builder.text("()"), toIndex) + else if !method.kind.isInstanceOf[Kind.Extension] || from != receiverPos then + val b = builder.list(method.getParameters.subList(from, toIndex).asScala.toList, "(", ")"){ (bdr, param) => bdr + .annotationsInline(param) + .text(param.getName) + .text(": ") + .typeSignature(param.getType) + } + (b, toIndex) + else (builder, toIndex) + } + bldr +} + +trait ScalaSignatureUtils: + extension (tokens: Seq[String]) def toSignatureString(): String = + tokens.filter(_.trim.nonEmpty).mkString(""," "," ") diff --git a/scala3doc/src/dotty/dokka/utils.scala b/scala3doc/src/dotty/dokka/utils.scala new file mode 100644 index 000000000000..17b1447f70ec --- /dev/null +++ b/scala3doc/src/dotty/dokka/utils.scala @@ -0,0 +1,101 @@ +package dotty.dokka + +import org.jetbrains.dokka.model.properties._ +import org.jetbrains.dokka.base.signatures._ +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.pages._ +import org.jetbrains.dokka.links._ +import org.jetbrains.dokka.base.signatures.KotlinSignatureProvider +import org.jetbrains.dokka.base.transformers.pages.comments.CommentsToContentConverter +import org.jetbrains.dokka.utilities.DokkaLogger +import collection.JavaConverters._ +import org.jetbrains.dokka.base.translators.documentables._ +import org.jetbrains.dokka.model.properties.PropertyContainer +import java.util.function.Consumer +import kotlin.jvm.functions.Function2 +import java.util.{List => JList, Set => JSet, Map => JMap} +import org.jetbrains.dokka.DokkaConfiguration$DokkaSourceSet +import org.jetbrains.dokka.plugability._ +import kotlin.jvm.JvmClassMappingKt.getKotlinClass + + +extension [V] (a: WithExtraProperties[_]): + def get(key: ExtraProperty.Key[_, V]): V = a.getExtra().getMap().get(key).asInstanceOf[V] + +extension [E <: WithExtraProperties[E]] (a: E): + def put(value: ExtraProperty[_ >: E]): E = // TODO remove some of the InstanceOf + a.withNewExtras(a.getExtra plus value).asInstanceOf[E] + +extension [V] (map: JMap[DokkaConfiguration$DokkaSourceSet, V]): + def defaultValue: V = map.values.asScala.toSeq(0) + +extension (sourceSets: Set[DokkaConfiguration$DokkaSourceSet]): + def toDisplay = sourceSets.map(DisplaySourceSet(_)).asJava + +class BaseKey[T, V] extends ExtraProperty.Key[T, V]: + override def mergeStrategyFor(left: V, right: V): MergeStrategy[T] = + MergeStrategy.Remove.INSTANCE.asInstanceOf[MergeStrategy[T]] + + def definedIn(e: T): Boolean = e match + case e: WithExtraProperties[_] => e.getExtra.getMap.containsKey(this) + case _ => false + + + def getFrom(e: T): Option[V] = e match + case e: WithExtraProperties[_] => getFromExtra(e, this) + case _ => None + +def getFromExtra[V](e: WithExtraProperties[_], k: ExtraProperty.Key[_, V]): Option[V] = + Option(e.getExtra.getMap.get(k)).asInstanceOf[Option[V]] + + +extension (f: DFunction): + def isRightAssociative(): Boolean = f.getName.endsWith(":") + +object JList: + def apply[T](elem: T): JList[T] = List(elem).asJava + def apply[T]() = List[T]().asJava + +object JSet: + def apply[T](elem: T): JSet[T] = Set(elem).asJava + def apply[T]() = Set[T]().asJava + +def modifyContentGroup(originalContentNodeWithParents: Seq[ContentGroup], modifiedContentNode: ContentGroup): ContentGroup = + originalContentNodeWithParents match { + case head :: tail => tail match { + case tailHead :: tailTail => + val newChildren = tailHead.getChildren.asScala.map(c => if c != head then c else modifiedContentNode) + modifyContentGroup( + tailTail, + tailHead.copy( + newChildren.asJava, + tailHead.getDci, + tailHead.getSourceSets, + tailHead.getStyle, + tailHead.getExtra + ) + ) + case _ => head + } + case _ => modifiedContentNode + } + +def getContentGroupWithParents(root: ContentGroup, condition: ContentGroup => Boolean): Seq[ContentGroup] = { + def getFirstMatch(list: List[ContentNode]): Seq[ContentGroup] = list match { + case head :: tail => head match { + case g: ContentGroup => + val res = getContentGroupWithParents(g, condition) + if(!res.isEmpty) res + else getFirstMatch(tail) + case _ => getFirstMatch(tail) + } + + case _ => Seq() + } + if(condition(root)) Seq(root) + else { + val res = getFirstMatch(root.getChildren.asScala.toList) + if(!res.isEmpty) res ++ Seq(root) + else Seq() + } +} \ No newline at end of file diff --git a/scala3doc/src/dotty/renderers/DotDiagramBuilder.scala b/scala3doc/src/dotty/renderers/DotDiagramBuilder.scala new file mode 100644 index 000000000000..649f7dc58ac5 --- /dev/null +++ b/scala3doc/src/dotty/renderers/DotDiagramBuilder.scala @@ -0,0 +1,46 @@ +package dotty.dokka + +import dotty.dokka.model._ +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka.links.DRI +import org.jetbrains.dokka.base.resolvers.local.LocationProvider +import org.jetbrains.dokka.pages._ +import dotty.dokka.model.api.Kind +import HTML._ +import dotty.dokka.model.api._ + +object DotDiagramBuilder: + def build(diagram: HierarchyDiagram, renderer: SignatureRenderer): String = + val vertecies = diagram.edges.flatMap(edge => Seq(edge.from, edge.to)).distinct.map { vertex => + s"""node${vertex.id} [label="${getHtmlLabel(vertex, renderer)}", style="${getStyle(vertex)}"];\n""" + }.mkString + + val edges = diagram.edges.map { edge => + s"""node${edge.from.id} -> node${edge.to.id};\n""" + }.mkString + + s""" digraph g { + | $vertecies + | + | $edges + |} + |""".stripMargin + + + private def getStyle(vertex: Vertex) = vertex.body.kind match + case Kind.Class => "fill: #45AD7D;" + case Kind.Object => "fill: #285577;" + case Kind.Trait => "fill: #1CAACF;" + case Kind.Enum => "fill: #B66722;" + case Kind.EnumCase => "fill: #B66722;" + case other => sys.error(s"unexpected value: $other") + + + private def getHtmlLabel(vertex: Vertex, renderer: SignatureRenderer): String = + span(style := "color: #FFFFFF;")( + vertex.body.kind.name, + " ", + span(style := "text-decoration: underline;")( + vertex.body.signature.map(renderer.renderElementWith(_, style := "color: #FFFFFF;")) + ) + ).toString.replace("\"", "\\\"") diff --git a/scala3doc/src/dotty/renderers/ScalaHtmlRenderer.scala b/scala3doc/src/dotty/renderers/ScalaHtmlRenderer.scala new file mode 100644 index 000000000000..68b1b7da7e57 --- /dev/null +++ b/scala3doc/src/dotty/renderers/ScalaHtmlRenderer.scala @@ -0,0 +1,281 @@ +package dotty.dokka + +import org.jetbrains.dokka.plugability.DokkaContext +import org.jetbrains.dokka.pages._ +import org.jetbrains.dokka.model._ +import org.jetbrains.dokka._ +import HTML._ +import collection.JavaConverters._ +import com.virtuslab.dokka.site.SiteRenderer +import com.virtuslab.dokka.site.BaseStaticSiteProcessor +import java.net.URI +import java.util.{List => JList, Set => JSet} +import kotlinx.html.FlowContent +import kotlinx.html.stream.StreamKt +import kotlinx.html.Gen_consumer_tagsKt +import org.jetbrains.dokka.links.DRI +import dotty.dokka.model.api.Link +import dotty.dokka.model.api.HierarchyDiagram +import org.jetbrains.dokka.base.resolvers.local.LocationProvider + + +class SignatureRenderer(pageContext: ContentPage, sourceSetRestriciton: JSet[DisplaySourceSet], locationProvider: LocationProvider): + def link(dri: DRI): Option[String] = Option(locationProvider.resolve(dri, sourceSetRestriciton, pageContext)) + + def renderLink(name: String, dri: DRI, modifiers: AppliedAttr*) = + link(dri) match + case Some(link) => a(href := link, modifiers)(name) + case None if modifiers.isEmpty => raw(name) + case _ => span(modifiers)(name) + + + def renderElementWith(e: String | (String, DRI) | Link, modifiers: AppliedAttr*) = e match + case (name, dri) => renderLink(name, dri, modifiers:_*) + case name: String => raw(name) + case Link(name, dri) => renderLink(name, dri, modifiers:_*) + + + def renderElement(e: String | (String, DRI) | Link) = renderElementWith(e) + +class ScalaHtmlRenderer(ctx: DokkaContext) extends SiteRenderer(ctx) { + + lazy val sourceSets = ctx.getConfiguration.getSourceSets.asScala + .map(s => DisplaySourceSetKt.toDisplaySourceSet(s.asInstanceOf[DokkaConfiguration$DokkaSourceSet])).toSet.asJava + + type FlowContentConsumer = kotlin.jvm.functions.Function1[? >: kotlinx.html.FlowContent, kotlin.Unit] + + override def buildTable(f: FlowContent, node: ContentTable, pageContext: ContentPage, sourceSetRestriciton: JSet[DisplaySourceSet]) = { + val nodeStyles = node.getStyle.asScala.toSet + if nodeStyles.contains(TableStyle.DescriptionList) || nodeStyles.contains(TableStyle.NestedDescriptionList) then + withHtml(f, buildDescriptionList(node, pageContext, sourceSetRestriciton)) + else super.buildTable(f, node, pageContext, sourceSetRestriciton) + } + + override def wrapGroup(f: FlowContent, node: ContentGroup, pageContext: ContentPage, childrenCallback: FlowContentConsumer) = { + val additionalClasses = node.getStyle.asScala.map(_.toString.toLowerCase).mkString("", ",", "") + def buildSymbol: String = div(cls := s"symbol $additionalClasses")( + raw( + buildWithKotlinx(childrenCallback).toString + ) + ).toString + if node.getDci.getKind == ContentKind.Symbol && node.getStyle.asScala.toSet.contains(TextStyle.Monospace) then withHtml(f, buildSymbol) else super.wrapGroup(f, node, pageContext, childrenCallback) + } + + override def buildContentNode(f: FlowContent, node: ContentNode, pageContext: ContentPage, sourceSetRestriciton: JSet[DisplaySourceSet]) = { + node match { + case n: HtmlContentNode => withHtml(f, raw(n.body).toString) + case n: HierarchyDiagramContentNode => buildDiagram(f, n.diagram, pageContext) + case n: DocumentableList => + val ss = if sourceSetRestriciton == null then Set.empty.asJava else sourceSetRestriciton + withHtml(f, buildDocumentableList(n, pageContext, ss).toString()) + case n: DocumentableFilter => withHtml(f, buildDocumentableFilter.toString) + case other => super.buildContentNode(f, node, pageContext, sourceSetRestriciton) + } + } + + private val anchor = raw(""" + <svg width="24" height="24" viewBox="0 0 24 24" fill="darkgray" xmlns="http://www.w3.org/2000/svg"> + <path d="M21.2496 5.3C20.3496 4.5 19.2496 4 18.0496 4C16.8496 4 15.6496 4.5 14.8496 5.3L10.3496 9.8L11.7496 11.2L16.2496 6.7C17.2496 5.7 18.8496 5.7 19.8496 6.7C20.8496 7.7 20.8496 9.3 19.8496 10.3L15.3496 14.8L16.7496 16.2L21.2496 11.7C22.1496 10.8 22.5496 9.7 22.5496 8.5C22.5496 7.3 22.1496 6.2 21.2496 5.3Z"></path> + <path d="M8.35 16.7998C7.35 17.7998 5.75 17.7998 4.75 16.7998C3.75 15.7998 3.75 14.1998 4.75 13.1998L9.25 8.6998L7.85 7.2998L3.35 11.7998C1.55 13.5998 1.55 16.3998 3.35 18.1998C4.25 19.0998 5.35 19.4998 6.55 19.4998C7.75 19.4998 8.85 19.0998 9.75 18.1998L14.25 13.6998L12.85 12.2998L8.35 16.7998Z"></path> + </svg> + """) + + + + private def buildDocumentableList(n: DocumentableList, pageContext: ContentPage, sourceSetRestriciton: JSet[DisplaySourceSet]) = + def render(n: ContentNode) = raw(buildWithKotlinx(n, pageContext, null)) + + val renderer = SignatureRenderer(pageContext, sourceSets, getLocationProvider) + import renderer._ + + def buildDocumentable(element: DocumentableElement) = + def topLevelAttr = Seq(cls := "documentableElement") ++ element.attributes.map{ case (n, v) => Attr(s"data-f-$n") := v } + val kind = element.modifiers.takeRight(1) + val otherModifiers = element.modifiers.dropRight(1) + + div(topLevelAttr:_*)( + div(cls := "annotations monospace")(element.annotations.map(renderElement)), + div( + a(href:=link(element.params.dri).getOrElse("#"), cls := "documentableAnchor")(anchor), + span(cls := "modifiers monospace")( + span(cls := "other-modifiers")(otherModifiers.map(renderElement)), + span(cls := "kind")(kind.map(renderElement)), + ), + renderLink(element.name, element.params.dri, cls := "documentableName monospace"), + span(cls := "signature monospace")(element.signature.map(renderElement)), + div( + div(cls := "originInfo")(element.originInfo.map(renderElement)), + div(cls := "documentableBrief")(element.brief.map(render)), + ) + ), + + ) + + div(cls := "documentableList")( + if(n.groupName.isEmpty) raw("") else h3(cls := "documentableHeader")(n.groupName.map(renderElement)), + n.elements.flatMap { + case element: DocumentableElement => + Seq(buildDocumentable(element)) + case group: DocumentableElementGroup => + h4(cls := "documentable-extension-target")( + group.header.map(renderElement) + ) +: group.elements.map(buildDocumentable) + } + ) + + private def buildDocumentableFilter = div(cls := "documentableFilter")( + div(cls := "filterUpperContainer")( + button(cls := "filterToggleButton")( + raw(""" + <svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"> + <path d="M0 0h24v24H0z" fill="none"/> + <path d="M10 6L8.59 7.41 13.17 12l-4.58 4.59L10 18l6-6z"/> + </svg> + """) + ), + input(cls := "filterableInput", placeholder := "Filter all members") + ), + div(cls := "filterLowerContainer")() + ) + + def buildDescriptionList(node: ContentTable, pageContext: ContentPage, sourceSetRestriciton: JSet[DisplaySourceSet]) = { + val children = node.getChildren.asScala.toList.zipWithIndex + val nodeStyles = node.getStyle.asScala.toSet + val classes = if nodeStyles.contains(TableStyle.NestedDescriptionList) then "paramsdesc" else "attributes" + dl(cls := classes)( + children.map((e, i) => + if(i % 2 == 0) + dt( + raw( + buildWithKotlinx(e, pageContext, sourceSetRestriciton) + ) + ) + else + dd( + raw( + buildWithKotlinx(e, pageContext, sourceSetRestriciton) + ) + ) + ) + ).toString + } + + override def buildCodeBlock( + f: FlowContent, + code: ContentCodeBlock, + pageContext: ContentPage, + ): Unit = { + // we cannot use Scalatags, because we need to call buildContentNode + import kotlinx.html.{Gen_consumer_tagsKt => dsl} + val c = f.getConsumer + val U = kotlin.Unit.INSTANCE + + dsl.div(c, "sample-container", { e => + dsl.pre(c, null, { e => + val codeClass = code.getStyle.asScala.iterator.map(_.toString.toLowerCase).mkString("", " ", " language-scala") + dsl.code(c, codeClass, { e => + e.getAttributes.put("theme", "idea") + code.getChildren.asScala.foreach(buildContentNode(f, _, pageContext, /*sourceSetRestriction*/ null)) + U + }) + U + }) + U + }) + } + + def buildDiagram(f: FlowContent, diagram: HierarchyDiagram, pageContext: ContentPage) = + val renderer = SignatureRenderer(pageContext, sourceSets, getLocationProvider) + withHtml(f, div( id := "inheritance-diagram")( + svg(id := "graph"), + script(`type` := "text/dot", id := "dot")(raw(DotDiagramBuilder.build(diagram, renderer))), + ).toString() + ) + + override def buildHtml(page: PageNode, resources: JList[String], kotlinxContent: FlowContentConsumer): String = + val (pageTitle, pageResources, fromTemplate) = page match + case static: BaseStaticSiteProcessor.StaticPageNode => + val res = if static.hasFrame then resources else static.resources + val title = static.getLoadedTemplate.getTemplateFile.title + (title, res, !static.hasFrame) + case _ => + (page.getName, resources, false) + html( + head( + meta(charset := "utf-8"), + meta(name := "viewport", content := "width=device-width, initial-scale=1"), + title(pageTitle), + linkResources(page, pageResources.asScala).toSeq, + script(raw(s"""var pathToRoot = "${getLocationProvider.pathToRoot(page)}";""")) + ), + body( + if fromTemplate then + raw(buildWithKotlinx(kotlinxContent)) + else + div(id := "container")( + div(id := "leftColumn")( + div(id := "logo"), + div(id := "paneSearch"), + nav(id := "sideMenu"), + ), + div(id := "main")( + div (id := "leftToggler")( + span(cls := "icon-toggler") + ), + div(id := "searchBar"), + main( + raw(buildWithKotlinx(kotlinxContent)) + ), + footer( + span(cls := "go-to-top-icon")( + a(href := "#container")( + span(cls:="icon-vertical_align_top"), + raw(" Back to top") + ) + ), + span(cls := "pull-right")( + raw("Generated by "), + a(href := "https://github.com/lampepfl/scala3doc")("Scala3doc") + ) + ) + ) + ), + script(`type` := "text/javascript", src := resolveRoot(page, "scripts/pages.js")), + script(`type` := "text/javascript", src := resolveRoot(page, "scripts/main.js")) + ) + ).toString + + private def resolveRoot(page: PageNode, path: String) = + getLocationProvider.pathToRoot(page) + path + + private def linkResources(page: PageNode, resources: Iterable[String]): Iterable[AppliedTag] = + def fileExtension(url: String): String = + val param = url.indexOf('?') + val end = if param < 0 then url.length else param + val point = url.lastIndexOf('.', end) + url.substring(point+1, end) + + def resolveLink(url: String): String = + if URI(url).isAbsolute then url else resolveRoot(page, url) + + for res <- resources yield + fileExtension(res) match + case "css" => link(rel := "stylesheet", href := resolveLink(res)) + case "js" => script(`type` := "text/javascript", src := resolveLink(res), defer := "true") + case _ => raw(res) + + private def buildWithKotlinx(node: ContentNode, pageContext: ContentPage, sourceSetRestriction: JSet[DisplaySourceSet]): String = + Gen_consumer_tagsKt.div( + StreamKt.createHTML(true, false), + null, + (div) => {build(node, div, pageContext, sourceSetRestriction); kotlin.Unit.INSTANCE} + ).toString.stripPrefix("<div>").stripSuffix("</div>\n") + + private def buildWithKotlinx(func: FlowContentConsumer): String = + Gen_consumer_tagsKt.div( + StreamKt.createHTML(true, false), + null, + func + ).toString.stripPrefix("<div>").stripSuffix("</div>\n") + +} diff --git a/scala3doc/src/dotty/renderers/html.scala b/scala3doc/src/dotty/renderers/html.scala new file mode 100644 index 000000000000..a97f325fa348 --- /dev/null +++ b/scala3doc/src/dotty/renderers/html.scala @@ -0,0 +1,86 @@ +package dotty.dokka + +/** + * This is trivial html renderer using api inspired by ScalaTags + * It probably could be more efficient but for now on it should be good enough. + */ +object HTML: + type AttrArg = AppliedAttr | Seq[AppliedAttr] + type TagArg = AppliedTag | Seq[AppliedTag] | String | Seq[String] + + case class Tag(name: String): + def apply(tags: TagArg*): AppliedTag = apply()(tags:_*) + def apply(first: AttrArg, rest: AttrArg*): AppliedTag = apply((first +: rest):_*)() + def apply(attrs: AttrArg*)(tags: TagArg*): AppliedTag = { + val sb = StringBuilder() + sb.append(s"<$name") + attrs.foreach{ + case s: Seq[AppliedAttr] => + s.foreach(sb.append(" ").append) + case e: AppliedAttr => + sb.append(" ").append(e) + } + sb.append(">") + tags.foreach{ + case t: AppliedTag => + sb.append(t) + case s: String => + sb.append(s) + case s: Seq[AppliedTag | String] => + s.foreach{ + case a: AppliedTag => + sb.append(a) + case s: String => + sb.append(s) + } + } + sb.append(s"</$name>") + sb + } + + case class Attr(name: String): + def :=(value: String): AppliedAttr = AppliedAttr(s"""$name="$value"""") + + opaque type AppliedTag = StringBuilder + + opaque type AppliedAttr = String + + val div = Tag("div") + val span = Tag("span") + val a = Tag("a") + val h1 = Tag("h1") + val h2 = Tag("h2") + val h3 = Tag("h3") + val h4 = Tag("h4") + val dl = Tag("dl") + val dd = Tag("dd") + val dt = Tag("dt") + val svg = Tag("svg") + val button = Tag("button") + val input = Tag("input") + val script = Tag("script") + val link = Tag("link") + val footer = Tag("footer") + val html = Tag("html") + val head = Tag("head") + val meta = Tag("meta") + val main = Tag("main") + val title = Tag("title") + val body = Tag("body") + val nav = Tag("nav") + + val cls = Attr("class") + val href = Attr("href") + val style = Attr("style") + val id = Attr("id") + val `type` = Attr("type") + val placeholder = Attr("placeholder") + val defer = Attr("defer") + val src = Attr("src") + val rel = Attr("rel") + val charset = Attr("charset") + val name = Attr("name") + val content = Attr("content") + + def raw(content: String): AppliedTag = AppliedTag(content) + \ No newline at end of file diff --git a/scala3doc/src/dotty/tools/dottydoc/Main.scala b/scala3doc/src/dotty/tools/dottydoc/Main.scala new file mode 100644 index 000000000000..0b5fc6e82a3a --- /dev/null +++ b/scala3doc/src/dotty/tools/dottydoc/Main.scala @@ -0,0 +1,84 @@ +package dotty.tools +package dottydoc + +import dotty.dokka.{Args, RawArgs, DocConfiguration, DottyDokkaConfig} + +import org.jetbrains.dokka._ +import org.jetbrains.dokka.utilities._ +import org.jetbrains.dokka.plugability._ + +import dotc.core.Contexts._ +import dotc.reporting.Reporter +import dotc.{ Compiler, Driver } +import dotc.config._ + +import dotty.tools.dotc.config.Settings.Setting.value + +import java.io.File + +/** Main object for SBT. + * + * See [[this.process]]. + */ +object Main extends Driver { + + /** Actual entrypoint from SBT. + * + * Internal SBT code for `sbt doc` locates this precise method with + * reflection, and passes to us both `args` and `rootCtx`. "Internal" here + * means that it's painful to modify this code with a plugin. + * + * `args` contains arguments both for us and for the compiler (see code on + * how they're split). + */ + override def process(args: Array[String], rootCtx: Context): Reporter = { + // split args into ours and Dotty's + val (dokkaStrArgs, compilerArgs) = { + args.partitionMap { arg => + // our options start with this magic prefix, inserted by the SBT plugin + val magicPrefix = "--+DOC+" + if arg startsWith magicPrefix then + Left(arg stripPrefix magicPrefix) + else + Right(arg) + } + } + + val (filesToCompile, ctx) = setup(compilerArgs, rootCtx) + given Context = ctx + + // parse Dokka args + // note: all required args should be set with SBT settings, + // to make it easier to set and override them + val dokkaArgs = { + val dokkaRawArgs = new RawArgs + val requiredArgs = Seq( + "--tastyRoots", "", // hack, value is not used in SBT but required in CLI + // we extract some settings from Dotty options since that's how SBT passes them + "--name", ctx.settings.projectName.value, + "--projectTitle", ctx.settings.projectName.value, + "--dest", ctx.settings.outputDir.value.toString, + ) + + val allArgs = requiredArgs ++ dokkaStrArgs + println(s"Running scala3doc with arguments: $allArgs") + val parser = org.kohsuke.args4j.CmdLineParser(dokkaRawArgs) + try { + parser.parseArgument(allArgs : _*) + } catch { + case ex: org.kohsuke.args4j.CmdLineException => + // compiler errors are reported in SBT + dotc.report.error(s"Error when parsing Scala3doc options: ${ex.getMessage}") + throw ex + } + dokkaRawArgs.toArgs + } + + val config = DocConfiguration.Sbt(dokkaArgs, filesToCompile, ctx) + val dokkaCfg = new DottyDokkaConfig(config) + new DokkaGenerator(dokkaCfg, DokkaConsoleLogger.INSTANCE).generate() + + rootCtx.reporter + } + +} diff --git a/scala3doc/test/dotty/dokka/DottyTestRunner.scala b/scala3doc/test/dotty/dokka/DottyTestRunner.scala new file mode 100644 index 000000000000..7377a462c008 --- /dev/null +++ b/scala3doc/test/dotty/dokka/DottyTestRunner.scala @@ -0,0 +1,127 @@ +package dotty.dokka +import org.jetbrains.dokka.utilities.{DokkaConsoleLogger, DokkaLogger} +import org.jetbrains.dokka.testApi.logger.TestLogger +import org.jetbrains.dokka.testApi.testRunner._ +import org.jetbrains.dokka.plugability.DokkaPlugin +import java.io.File +import dotty.dokka.{DocConfiguration, DottyDokkaConfig} +import collection.JavaConverters._ +import org.junit.rules.TemporaryFolder +import org.junit.{Test, Rule} +import org.junit.Assert._ +import org.junit.rules.ErrorCollector +import org.jetbrains.dokka.testApi.testRunner.AbstractCoreTest$TestBuilder +import scala.io.Source +import org.jetbrains.dokka.pages._ +import org.jetbrains.dokka.pages.ContentNodesKt +import org.jetbrains.dokka._ +import collection.JavaConverters._ +import scala.math.max +import org.jetbrains.dokka.pages.ContentNodesKt +import dotty.dokka.model.api.Link + +abstract class DottyAbstractCoreTest extends AbstractCoreTest: + private def getTempDir() : TemporaryFolder = + val folder = new TemporaryFolder() + folder.create() + folder + + private def args = Args( + name = "test", + tastyRoots = Nil , + classpath = System.getProperty("java.class.path"), + None, + output = getTempDir().getRoot, + projectVersion = "1.0", + projectTitle = None, + projectLogo = None, + defaultSyntax = None, + sourceLinks = List.empty + ) + + def listPages(tastyDir: String): Seq[ContentPage] = + var signatures: Seq[ContentPage] = Nil + val tests = new AbstractCoreTest$TestBuilder() + + + def getAllContentPages(root: PageNode) : Seq[ContentPage] = root match + case c: ContentPage => Seq(c) ++ c.getChildren.asScala.flatMap(getAllContentPages) + case default => default.getChildren.asScala.toSeq.flatMap(getAllContentPages) + + tests.setPagesTransformationStage { root => + val res = root.getChildren.asScala.flatMap(getAllContentPages) + signatures = res.toSeq + kotlin.Unit.INSTANCE + } + + def listTastyFiles(f: File): Seq[File] = + assertTrue(s"Tasty root dir does not exisits: $f", f.isDirectory()) + val (files, dirs) = f.listFiles().partition(_.isFile) + files.toIndexedSeq.filter(_.getName.endsWith(".tasty")) ++ dirs.flatMap(listTastyFiles) + + val tastyFiles = tastyDir.split(File.pathSeparatorChar).toList.flatMap(p => listTastyFiles(new File(p))).map(_.toString) + + val config = new DottyDokkaConfig(DocConfiguration.Standalone(args, tastyFiles, Nil)) + DokkaTestGenerator( + config, + new TestLogger(DokkaConsoleLogger.INSTANCE), + tests.build(), + Nil.asJava + ).generate() + + signatures + + def signaturesFromDocumentation(tastyDir: String): Seq[String] = + def flattenToText(node: ContentNode) : Seq[String] = node match + case t: ContentText => Seq(t.getText) + case c: ContentComposite => + c.getChildren.asScala.flatMap(flattenToText).toSeq + case l: DocumentableElement => + (l.annotations ++ Seq(" ") ++ l.modifiers ++ Seq(l.name) ++ l.signature).map { + case s: String => s + case (s: String, _) => s + case Link(s: String, _) => s + } + case _ => Seq() + + def all(p: ContentNode => Boolean)(n: ContentNode): Seq[ContentNode] = + if p(n) then Seq(n) else n.getChildren.asScala.toSeq.flatMap(all(p)) + + + val pages = listPages(tastyDir) + val nodes = pages.flatMap(p => all(_.isInstanceOf[DocumentableElement])(p.getContent)) + nodes.map(flattenToText(_).mkString.trim) + + def signaturesFromSource(s: Source): SignaturesFromSource = + val ExpectedRegex = ".+//expected: (.+)".r + val UnexpectedRegex = "(.+)//unexpected".r + + // e.g. to remove '(0)' from object IAmACaseObject extends CaseImplementThis/*<-*/(0)/*->*/ + val CommentRegexp = """\/\*<-\*\/[^\/]+\/\*->\*\/""" + + extension (s: String) def doesntStartWithAnyOfThese(c: Char*) = c.forall(char => !s.startsWith(char.toString)) + val lines = s.getLines().map(_.trim).toList + .filter(_.doesntStartWithAnyOfThese('=',':','{','}')) + .filterNot(_.trim.isEmpty) + .filterNot(_.startsWith("//")) + + val expectedSignatures = lines.flatMap { + case UnexpectedRegex(_) => None + case ExpectedRegex(signature) => Some(signature) + case other => + Some(other.replaceAll(CommentRegexp, "").replaceAll(" +", " ")) + } + + val unexpectedSignatures = lines.collect { + case UnexpectedRegex(signature) => signature.trim + } + + SignaturesFromSource(expectedSignatures, unexpectedSignatures) + + val _collector = new ErrorCollector(); + @Rule + def collector = _collector + def reportError(msg: String) = collector.addError(new AssertionError(msg)) + + +case class SignaturesFromSource(expected: Seq[String], unexpected: Seq[String]) diff --git a/scala3doc/test/dotty/dokka/MultipleFileTest.scala b/scala3doc/test/dotty/dokka/MultipleFileTest.scala new file mode 100644 index 000000000000..d4f93ea83cf7 --- /dev/null +++ b/scala3doc/test/dotty/dokka/MultipleFileTest.scala @@ -0,0 +1,92 @@ +package dotty.dokka + +import org.junit.{Test, Rule} +import org.junit.Assert._ +import org.junit.rules.ErrorCollector +import org.jetbrains.dokka.testApi.testRunner.AbstractCoreTest$TestBuilder +import scala.io.Source +import org.jetbrains.dokka.pages._ +import org.jetbrains.dokka.pages.ContentNodesKt +import org.jetbrains.dokka._ +import scala.jdk.CollectionConverters._ +import scala.math.max + +object MultipleFileTest{ + val classlikeKinds = Seq("class", "object", "trait") // TODO add docs for packages + val members = Seq("type", "def", "val", "var") + val all = classlikeKinds ++ members +} + +abstract class MultipleFileTest(val sourceFiles: List[String], val tastyFolders: List[String], signatureKinds: Seq[String], ignoreUndocumentedSignatures: Boolean = false +) extends DottyAbstractCoreTest: + private val _collector = new ErrorCollector(); + + // This should work correctly except for names in backticks and operator names containing a colon + def extractSymbolName(signature: String) = + val Pattern = s"""(?s).*(?:${signatureKinds.mkString("|")}) ([^\\[(: \\n\\t]+).*""".r + signature match { + case Pattern(name) => name + case x => "NULL" + } + + def matchSignature(s: String, signatureList: List[String]): Seq[String] = + val symbolName = extractSymbolName(s) + val candidates = signatureList.filter(extractSymbolName(_) == symbolName) + + candidates.filter(_ == s) match { + case Nil => + val candidateMsg = + if candidates.isEmpty then s"No candidate found for symbol name $symbolName" + else s"Candidates:\n${candidates.mkString("\n")}\n" + + //reportError(s"No match for:\n$s\n$candidateMsg") All test would fail because of documented inherited methods + //println(s"No match for:\n$s\n$candidateMsg") + Nil + case matching => + matching + } + + @Test + def testSignatures(): Unit = + def cleanup(s: String) = s.replace("\n", " ").replaceAll(" +", " ") + + val allFromSource = sourceFiles.map{ file => + val all = signaturesFromSource(Source.fromFile(s"${BuildInfo.test_testcasesSourceRoot}/tests/$file.scala")) + (all.expected, all.unexpected) + } + + val expectedFromSource = allFromSource.map(_._1).flatten.filter(extractSymbolName(_) != "NULL").map(cleanup) + val unexpectedFromSource = allFromSource.map(_._2).flatten.filter(extractSymbolName(_) != "NULL").map(cleanup) + val unexpectedSignatureSymbolNames = unexpectedFromSource.map(extractSymbolName) + + val allFromDocumentation = tastyFolders.flatMap(folder => signaturesFromDocumentation(s"${BuildInfo.test_testcasesOutputDir}/tests/$folder")) + val fromDocumentation = allFromDocumentation.filter(extractSymbolName(_) != "NULL").map(cleanup) + + val documentedSignatures = fromDocumentation.flatMap(matchSignature(_, expectedFromSource)).toSet + val missingSignatures = expectedFromSource.filterNot(documentedSignatures.contains) + + val unexpectedSignatures = + fromDocumentation.filter(s => unexpectedSignatureSymbolNames.contains(extractSymbolName(s))).toSet + + val reportMissingSignatures = !ignoreUndocumentedSignatures && missingSignatures.nonEmpty + val reportUnexpectedSignatures = unexpectedSignatures.nonEmpty + + if reportMissingSignatures || reportUnexpectedSignatures then + val missingSignaturesMessage = Option.when(reportMissingSignatures) + (s"Not documented signatures:\n${missingSignatures.mkString("\n")}") + + val unexpectedSignaturesMessage = Option.when(reportUnexpectedSignatures) + (s"Unexpectedly documented signatures:\n${unexpectedSignatures.mkString("\n")}") + + val allSignaturesMessage = + s""" + |All documented signatures: + |${documentedSignatures.mkString("\n")} + | + |All expected signatures from source: + |${expectedFromSource.mkString("\n")} + """.stripMargin + + val errorMessages = missingSignaturesMessage ++ unexpectedSignaturesMessage ++ Some(allSignaturesMessage) + + reportError(errorMessages.mkString("\n", "\n\n", "\n")) diff --git a/scala3doc/test/dotty/dokka/SignatureTests.scala b/scala3doc/test/dotty/dokka/SignatureTests.scala new file mode 100644 index 000000000000..48a556b64387 --- /dev/null +++ b/scala3doc/test/dotty/dokka/SignatureTests.scala @@ -0,0 +1,52 @@ +package dotty.dokka + +class GenericSignatures extends SingleFileTest("genericSignatures", Seq("class")) + +class ObjectSignatures extends SingleFileTest("objectSignatures", Seq("object")) + +class TraitSignatures extends SingleFileTest("traitSignatures", Seq("trait")) + + +// We do not support companion objects properly in tests +class ClassSignatureTestSourceTest extends SingleFileTest("classSignatureTestSource", + SingleFileTest.all.filterNot(Seq("val", "var", "object").contains)) + +// TODO we still cannot filter out all constructor-based fields +class SignatureTestSourceTest extends SingleFileTest("signatureTestSource", SingleFileTest.all) + +class ModifiersSignatureTest extends SingleFileTest("modifiersSignatureTestSource", SingleFileTest.all) + +class Visibility extends SingleFileTest("visibility", SingleFileTest.all) + + +class GenericMethodsTest extends SingleFileTest("genericMethods", Seq("def")) + +class MethodsAndConstructors extends SingleFileTest("methodsAndConstructors", Seq("def")) + +class TypesSignatures extends SingleFileTest("typesSignatures", SingleFileTest.all) + +class FieldsSignatures extends SingleFileTest("fieldsSignatures", SingleFileTest.all.filter(_ != "object")) + +class NestedSignatures extends SingleFileTest("nested", SingleFileTest.all) + +class CompanionObjectSignatures extends SingleFileTest("companionObjectSignatures", SingleFileTest.all) + +class PackageSymbolSignatures extends SingleFileTest("packageSymbolSignatures", SingleFileTest.all) + +class PackageObjectSymbolSignatures extends SingleFileTest("packageObjectSymbolSignatures", SingleFileTest.all.filter(_ != "object")) + +class MergedPackageSignatures extends MultipleFileTest(List("mergedPackage1", "mergedPackage2", "mergedPackage3"), List("mergedPackage"), SingleFileTest.all.filter(_ != "object")) + +class ExtensionMethodSignature extends SingleFileTest("extensionMethodSignatures", SingleFileTest.all) + +class ClassModifiers extends SingleFileTest("classModifiers", SingleFileTest.classlikeKinds) + +// class EnumSignatures extends SingleFileTest("enumSignatures", SingleFileTest.all) + +class StructuralTypes extends SingleFileTest("structuralTypes", SingleFileTest.members) + +class OpaqueTypes extends SingleFileTest("opaqueTypes", SingleFileTest.all) + +// class GivenSignatures extends SingleFileTest("givenSignatures", SingleFileTest.all) + +class Annotations extends SingleFileTest("annotations", SingleFileTest.all) diff --git a/scala3doc/test/dotty/dokka/SingleFileTest.scala b/scala3doc/test/dotty/dokka/SingleFileTest.scala new file mode 100644 index 000000000000..83864eb06c72 --- /dev/null +++ b/scala3doc/test/dotty/dokka/SingleFileTest.scala @@ -0,0 +1,30 @@ +package dotty.dokka + +import org.junit.{Test, Rule} +import org.junit.Assert._ +import org.junit.rules.ErrorCollector +import org.jetbrains.dokka.testApi.testRunner.AbstractCoreTest$TestBuilder +import scala.io.Source +import org.jetbrains.dokka.pages._ +import org.jetbrains.dokka.pages.ContentNodesKt +import org.jetbrains.dokka._ +import scala.jdk.CollectionConverters._ +import scala.math.max + +object SingleFileTest { + val classlikeKinds = Seq("class", "object", "trait", "enum") // TODO add docs for packages + val members = Seq("type", "def", "val", "var") + val all = classlikeKinds ++ members +} + +abstract class SingleFileTest( + val fileName: String, + signatureKinds: Seq[String], + ignoreUndocumentedSignatures: Boolean = false +) extends MultipleFileTest( + List(fileName), + List(fileName), + signatureKinds, + ignoreUndocumentedSignatures +) + diff --git a/scala3doc/test/dotty/dokka/tasty/comments/CommentParserTest.scala b/scala3doc/test/dotty/dokka/tasty/comments/CommentParserTest.scala new file mode 100644 index 000000000000..1612c2e0ca5f --- /dev/null +++ b/scala3doc/test/dotty/dokka/tasty/comments/CommentParserTest.scala @@ -0,0 +1,380 @@ +package dotty.dokka.tasty.comments + +import scala.jdk.CollectionConverters._ +import scala.collection.mutable +import scala.collection.mutable.ListBuffer + +import org.jetbrains.dokka.model.{doc => dkkd} + +import org.junit.Test + +class CommentParserTest { + import CommentParserTest._ + + @Test def testMdBlockCode(): Unit = { + val mdp = MarkdownCommentParser(null) + val str = """```scala + |is.an("actual code block") + |with.multiple("lines") + |```""".stripMargin + + val res = mdp.markupToDokka(mdp.stringToMarkup(str)) + assertSame(res, + dkk.p()(dkk.codeBlock()( + dkk.text("""is.an("actual code block") + |with.multiple("lines") + |""".stripMargin) + )), + ) + } + + @Test def testMdIndentedCode(): Unit = { + val mdp = MarkdownCommentParser(null) + val str = """ is.an("actual code block") + | with.multiple("lines")""".stripMargin + + val res = mdp.markupToDokka(mdp.stringToMarkup(str)) + assertSame(res, + dkk.p()(dkk.codeBlock()( + dkk.text("""is.an("actual code block") + |with.multiple("lines")""".stripMargin) + ))) + } + + @Test def testMdAutolinks(): Unit = { + val mdp = MarkdownCommentParser(null) + val link = "http://www.google.com" + val str = s"This is an autolink: $link" + val res = mdp.markupToDokka(mdp.stringToMarkup(str)) + + assertSame(res, + { import dkk._ + p(p( + text("This is an autolink: "), + a("href" -> link)(text(link)), + )) + }) + } + + @Test def testMdWrappedAutolinks(): Unit = { + val mdp = MarkdownCommentParser(null) + val link = "http://www.google.com" + val str = s"This is an autolink: <$link>" + val res = mdp.markupToDokka(mdp.stringToMarkup(str)) + + assertSame(res, + { import dkk._ + p(p( + text("This is an autolink: "), + a("href" -> link)(text(link)), + )) + }) + } + + @Test def testMdList(): Unit = { + val mdp = MarkdownCommentParser(null) + val str = + """* a + | - a.a + | - a.b + | - a.c + |* b + | 1. b.1 + | 1. b.2 + | 1. b.3 + | * b.3.a + | * b.3.b + | * b.3.c""".stripMargin + + val res = mdp.markupToDokka(mdp.stringToMarkup(str)) + assertSame(res, + { import dkk._ + p( + ul( + li( + p(text("a")), + ul( + li(p(text("a.a"))), + li(p(text("a.b"))), + li(p(text("a.c"))), + )), + li( + p(text("b")), + ol( + li(p(text("b.1"))), + li(p(text("b.2"))), + li( + p(text("b.3")), + ul( + li(p(text("b.3.a"))), + li(p(text("b.3.b"))), + li(p(text("b.3.c"))), + )))))) + }, + ) + } + + @Test def testWikiList(): Unit = { + val mdp = WikiCommentParser(null) + val str = + """ - a + | - a.a + | - a.b + | - a.c + | - b + | 1. b.1 + | 1. b.2 + | 1. b.3 + | a. b.3.a + | a. b.3.b + | a. b.3.c""".stripMargin + + val res = mdp.markupToDokka(mdp.stringToMarkup(str)) + assertSame(res, + { import dkk._ + p( + ul( + li( + p(text("a")), + ul( + li(p(text("a.a"))), + li(p(text("a.b"))), + li(p(text("a.c"))), + )), + li( + p(text("b")), + ol( + li(p(text("b.1"))), + li(p(text("b.2"))), + li( + p(text("b.3")), + ol( + li(p(text("b.3.a"))), + li(p(text("b.3.b"))), + li(p(text("b.3.c"))), + )))))) + }, + ) + } +} + +object CommentParserTest { + + enum TagComparison { + case OK(self: Class[_], children: List[TagComparison], params: List[ParamComparison]) + case Mismatch(expCls: Class[_], gotCls: Class[_], gotTag: dkkd.DocTag) + case TextOK(text: String, children: List[TagComparison], params: List[ParamComparison]) + case TextMismatch(expStr: String, gotStr: String) + case Missing(tag: dkkd.DocTag) + case Extra(tag: dkkd.DocTag) + } + + enum ParamComparison { + case OK(name: String, value: String) + case Mismatch(name: String, exp: String, got: String) + case Missing(name: String, value: String) + case Extra(name: String, value: String) + } + + class TagComparisonBuilder( + val parent: Option[TagComparisonBuilder] + ) { + private var failed: Boolean = false + private var abortedWith: Option[TagComparison] = None + private val childrenBld: ListBuffer[TagComparison] = ListBuffer.empty + private val paramsBld: ListBuffer[ParamComparison] = ListBuffer.empty + + def emit(cmp: ParamComparison): Unit = { + cmp match { + case _: ParamComparison.OK => ; + case _ => + fail() + } + paramsBld.append(cmp) + } + + def emit(cmp: TagComparison): Unit = { + cmp match { + case _: (TagComparison.OK | TagComparison.TextOK) => ; + case _ => fail() + } + childrenBld.append(cmp) + } + + def child: TagComparisonBuilder = + TagComparisonBuilder(parent = Some(this)) + + def abort(res: TagComparison): Unit = { + failed = true + abortedWith = Some(res) + parent.foreach(_.fail()) + } + + def fail(): Unit = { + failed = true + parent.foreach(_.fail()) + } + + def hasFailed = failed + + def result(exp: dkkd.DocTag) = abortedWith.getOrElse(exp match { + case exp: dkkd.Text => + TagComparison.TextOK(exp.getBody, childrenBld.result, paramsBld.result) + case exp => + TagComparison.OK(exp.getClass, childrenBld.result, paramsBld.result) + }) + } + + def compareTags(exp: dkkd.DocTag, got: dkkd.DocTag): (TagComparison, Boolean) = { + val bld = TagComparisonBuilder(parent = None) + doCompareTags(bld)(exp, got) + (bld.result(exp), bld.hasFailed) + } + + def doCompareTags(bld: TagComparisonBuilder)(exp: dkkd.DocTag, got: dkkd.DocTag): Unit = + (exp, got) match { + case (_, _) if exp.getClass != got.getClass => + bld.abort(TagComparison.Mismatch(expCls = exp.getClass, gotCls = got.getClass, gotTag = got)) + case (exp: dkkd.Text, got: dkkd.Text) if exp.getBody != got.getBody => + bld.abort(TagComparison.TextMismatch(expStr = exp.getBody, gotStr = got.getBody)) + case _ => + val propmap = mutable.Map.empty[String, ParamComparison] + got.getParams.asScala.foreach { (k, v) => + propmap(k) = ParamComparison.Extra(k, v) + } + exp.getParams.asScala.foreach { (k, v) => + propmap.get(k) match { + case None => + propmap(k) = ParamComparison.Missing(k, v) + case Some(ParamComparison.Extra(_, gotV)) => + if gotV == v then + propmap(k) = ParamComparison.OK(k, v) + else + propmap(k) = ParamComparison.Mismatch(k, exp = v, got = gotV) + case other => + sys.error(s"unexpected param comparison: $other") + } + } + propmap.values.foreach(bld.emit) + val expIter = exp.getChildren.asScala.iterator + val gotIter = got.getChildren.asScala.iterator + while expIter.hasNext || gotIter.hasNext do + if !expIter.hasNext then + bld.emit(TagComparison.Extra(gotIter.next)) + else if !gotIter.hasNext then + bld.emit(TagComparison.Missing(expIter.next)) + else { + val exp = expIter.next + val got = gotIter.next + val childBld = bld.child + doCompareTags(childBld)(exp, got) + bld.emit(childBld.result(exp)) + } + } + + + def doRender(bld: StringBuilder, indent: Int)(cmp: TagComparison): Unit = { + def doIndent(ind: Int = indent): Unit = + bld ++= " " * ind + + def doLn(ln: String, ind: Int = indent): Unit = + doIndent(ind) + bld ++= ln + bld += '\n' + + def doText(text: String, ind: Int = indent): Unit = + var firstLine = true + text.linesIterator.foreach { ln => + if !firstLine then bld ++= "\n" else firstLine = false + doIndent(ind) + bld ++= ln + bld + } + + def renderTag(indent: Int)(tag: dkkd.DocTag): Unit = { + tag match { + case tag: dkkd.Text => + doLn("Text:", indent) + doText(tag.getBody, indent + 2) + case tag => + doIndent(indent) + bld ++= tag.getClass.getSimpleName + bld ++= "(\n" + var firstChild = true + tag.getChildren.asScala.foreach { c => + if !firstChild then bld += '\n' else firstChild = false + renderTag(indent + 2)(c) + } + bld ++= "\n" + doIndent(indent) + bld ++= ")" + } + } + + cmp match { + case TagComparison.TextOK(text, children, props) => + doLn("Text:") + doText(text, indent + 2) + case TagComparison.TextMismatch(expStr, gotStr) => + doLn("!!! MISMATCH: Text:") + doLn("Expected:", indent + 2) + doText(expStr, indent + 4) + bld += '\n' + doLn("Got:", indent + 2) + doText(gotStr, indent + 4) + case TagComparison.OK(cls, children, props) => + doIndent() + bld ++= cls.getSimpleName + bld ++= "(\n" + var firstChild = true + children.foreach { c => + if !firstChild then bld += '\n' else firstChild = false + doRender(bld, indent + 2)(c) + } + bld ++= "\n" + doIndent() + bld ++= ")" + case TagComparison.Mismatch(expCls, gotCls, gotTag) => + doLn(s"!!! MISMATCH: expected=${expCls.getSimpleName}, got=${gotCls.getSimpleName}, tag:") + renderTag(indent + 2)(gotTag) + case TagComparison.Extra(tag) => + doLn(s"!!! EXTRA:") + renderTag(indent + 2)(tag) + case TagComparison.Missing(tag) => + doLn(s"!!! MISSING:") + renderTag(indent + 2)(tag) + } + } + + def assertSame(got: dkkd.DocTag, exp: dkkd.DocTag, explode: Boolean = false) = { + val (comparison, failure) = compareTags(exp, got) + if explode || failure then { + val bld = new StringBuilder + bld += '\n' + doRender(bld, indent = 0)(comparison) + bld += '\n' + throw new java.lang.AssertionError(bld.toString) + } + } + + + object DkkDbg { + case class See(n: dkkd.DocTag, c: Seq[See]) { + def show(sb: StringBuilder, indent: Int): Unit = { + sb ++= " " * indent + sb ++= n.toString + sb ++= "\n" + c.foreach { s => s.show(sb, indent + 2) } + } + + override def toString = { + val sb = new StringBuilder + show(sb, 0) + sb.toString + } + } + + def see(n: dkkd.DocTag): See = + See(n, n.getChildren.asScala.map(see).toList) + } +} diff --git a/scala3doc/test/dotty/dokka/tasty/comments/MarkdownConverterTests.scala b/scala3doc/test/dotty/dokka/tasty/comments/MarkdownConverterTests.scala new file mode 100644 index 000000000000..64beb9c12ace --- /dev/null +++ b/scala3doc/test/dotty/dokka/tasty/comments/MarkdownConverterTests.scala @@ -0,0 +1,12 @@ +package dotty.dokka.tasty.comments + +import org.junit.{Test, Rule} +import org.junit.Assert.{assertSame, assertTrue, assertEquals} + +class MarkdownConverterTests { + @Test def test(): Unit = { + assertEquals(("a", "b c d"), MarkdownConverter.splitWikiLink("a b c d")) + assertEquals(("a", "b\\ c d"), MarkdownConverter.splitWikiLink("a b\\ c d")) + assertEquals(("a\\ b", "c d"), MarkdownConverter.splitWikiLink("a\\ b c d")) + } +} diff --git a/scala3doc/test/dotty/dokka/tasty/comments/MemberLookupTests.scala b/scala3doc/test/dotty/dokka/tasty/comments/MemberLookupTests.scala new file mode 100644 index 000000000000..ba77280f9616 --- /dev/null +++ b/scala3doc/test/dotty/dokka/tasty/comments/MemberLookupTests.scala @@ -0,0 +1,142 @@ +package dotty.dokka.tasty.comments + +import scala.tasty.Reflection + +import org.junit.{Test, Rule} +import org.junit.Assert.{assertSame, assertTrue} +import dotty.dokka.BuildInfo + +class LookupTestCases[R <: Reflection](val r: R) { + + def testAll(): Unit = { + testOwnerlessLookup() + testOwnedLookup() + testStrictMemberLookup() + } + + def testOwnerlessLookup(): Unit = { + val cases = List[(String, Sym)]( + "tests.A" -> cls("tests.A"), + "tests.A$" -> cls("tests.A$"), + "tests.Methods.simple" -> cls("tests.Methods").fun("simple"), + ) + + cases.foreach { case (query, Sym(sym)) => + val Some((lookedUp, _)) = MemberLookup.lookupOpt(parseQuery(query), None) + assertSame(query, sym, lookedUp) + } + } + + def testOwnedLookup(): Unit = { + val cases = List[((Sym, String), Sym)]( + cls("tests.A") -> "tests.Methods.simple" -> cls("tests.Methods").fun("simple"), + cls("tests.A") -> "tests#Methods#simple" -> cls("tests.Methods").fun("simple"), + + cls("tests.A") -> "method" -> cls("tests.A").fun("method"), + cls("tests.A") -> "#method" -> cls("tests.A").fun("method"), + cls("tests.A") -> "method*" -> cls("tests.A").fun("method"), + cls("tests.A") -> "method[T]*" -> cls("tests.A").fun("method"), + cls("tests.A") -> "method(str:String*" -> cls("tests.A").fun("method"), + + cls("tests.A") -> "tests.B" -> cls("tests.B"), + cls("tests.A") -> "tests.B$" -> cls("tests.B$"), + + cls("tests.A") -> "AA" -> cls("tests.A").tpe("AA"), + cls("tests.A") -> "#AA" -> cls("tests.A").tpe("AA"), + cls("tests.A") -> "AA!" -> cls("tests.A").tpe("AA"), + cls("tests.A") -> "AA$" -> cls("tests.A").fld("AA"), + + cls("tests.C") -> "CC" -> cls("tests.C").fld("CC"), + cls("tests.C") -> "CC$" -> cls("tests.C").fld("CC"), + cls("tests.C") -> "CC!" -> cls("tests.C").tpe("CC"), + + cls("tests.A").fun("method") -> "AA" -> cls("tests.A").tpe("AA"), + cls("tests.A").fun("method") -> "AA!" -> cls("tests.A").tpe("AA"), + cls("tests.A").fun("method") -> "AA$" -> cls("tests.A").fld("AA"), + + cls("tests.Methods").fun("simple") -> "generic" -> cls("tests.Methods").fun("generic"), + cls("tests.Methods").fun("simple") -> "#generic" -> cls("tests.Methods").fun("generic"), + + cls("tests.A").fun("method") -> "B" -> cls("tests.B"), + cls("tests.A").fun("method") -> "B$" -> cls("tests.B$"), + ) + + cases.foreach { case ((Sym(owner), query), Sym(target)) => + val Some((lookedUp, _)) = MemberLookup.lookup(parseQuery(query), owner) + assertSame(s"$owner / $query", target, lookedUp) + } + } + + def testStrictMemberLookup(): Unit = { + val owner = cls("tests.A").symbol + val query = "#A" + + assertTrue("strict member lookup should not look outside", MemberLookup.lookup(parseQuery(query), owner).isEmpty) + } + + given r.type = r + + def parseQuery(query: String): Query = { + val Right(parsed) = QueryParser(query).tryReadQuery() + parsed + } + + case class Sym(symbol: r.Symbol) { + def fld(name: String) = + def hackResolveModule(s: r.Symbol): r.Symbol = + if s.flags.is(r.Flags.Object) then s.moduleClass else s + Sym(hackResolveModule(symbol.field(name))) + def fun(name: String) = + val List(sym) = symbol.method(name) + Sym(sym) + def tpe(name: String) = Sym(symbol.typeMember(name)) + } + + def cls(fqn: String) = Sym(r.Symbol.classSymbol(fqn)) +} + +class MemberLookupTests { + + @Test + def test(): Unit = { + import scala.tasty.inspector.TastyInspector + class Inspector extends TastyInspector: + var alreadyRan: Boolean = false + + override def processCompilationUnit(using ctx: quoted.QuoteContext)(root: ctx.reflect.Tree): Unit = + if !alreadyRan then + this.test()(using ctx.reflect) + alreadyRan = true + + def test()(using r: Reflection): Unit = { + import dotty.dokka.tasty.comments.MemberLookup + + val cases = LookupTestCases[r.type](r) + + cases.testAll() + } + + Inspector().inspectTastyFiles(listOurClasses()) + } + + def listOurClasses(): List[String] = { + import java.io.File + import scala.collection.mutable.ListBuffer + + val classRoot = new File(BuildInfo.test_testcasesOutputDir) + + def go(bld: ListBuffer[String])(file: File): Unit = + file.listFiles.foreach { f => + if f.isFile() then + if f.toString.endsWith(".tasty") then bld.append(f.toString) + else go(bld)(f) + } + + if classRoot.isDirectory then + val bld = new ListBuffer[String] + go(bld)(classRoot) + bld.result + else + sys.error(s"Class root could not be found: $classRoot") + } +} diff --git a/scala3doc/test/dotty/dokka/tasty/comments/QueryParserTests.scala b/scala3doc/test/dotty/dokka/tasty/comments/QueryParserTests.scala new file mode 100644 index 000000000000..8793f50ecfd8 --- /dev/null +++ b/scala3doc/test/dotty/dokka/tasty/comments/QueryParserTests.scala @@ -0,0 +1,74 @@ +package dotty.dokka.tasty.comments + +import org.junit.{Test, Rule} +import org.junit.Assert.{assertSame, assertTrue, assertEquals} + +class QueryParserTests { + @Test def test() = { + import Query._ + + def l2q(shorthand: ((String | Qual), Char)*)(last: String): QuerySegment = { + if shorthand.isEmpty then Query.Id(last) else { + val head = shorthand.head + val tail = shorthand.tail + head match { + case ((id: String), ch) => Query.QualifiedId(Query.Qual.Id(id), ch, l2q(tail : _*)(last)) + case ((qual: Qual), ch) => Query.QualifiedId(qual, ch, l2q(tail : _*)(last)) + } + } + } + + extension [A <: String | Qual](self: A) def dot = (self, '.') + extension [A <: String | Qual](self: A) def hash = (self, '#') + + testSuccess("#abc", StrictMemberId("abc")) + testSuccess("a.b.c#d", l2q("a".dot, "b".dot, "c".hash)("d")) + + testSuccess("`a.b c#d`", Id("a.b c#d")) + testSuccess("#`a.b c#d`", StrictMemberId("a.b c#d")) + + testSuccess("a.`b.c#d e`.g", l2q("a".dot, "b.c#d e".dot)("g")) + testSuccess("a.`b.c#d e`#g", l2q("a".dot, "b.c#d e".hash)("g")) + + testSuccess("this.foo", l2q(Qual.This.dot)("foo")) + testSuccess("package.foo", l2q(Qual.Package.dot)("foo")) + + testSuccess("`this`.foo", l2q("this".dot)("foo")) + testSuccess("`package`.foo", l2q("package".dot)("foo")) + + testSuccess("#foo(ignoredOverloadDefinition*", StrictMemberId("foo")) + testSuccess("#bar[ignoredOverloadDefinition*", StrictMemberId("bar")) + + testSuccess("\\#abc", Id("#abc")) + testSuccess("a\\.b", Id("a.b")) + testSuccess("a\\#b", Id("a#b")) + testSuccess("ab\\ ", Id("ab ")) + + testSuccess("#foo\\(ignoredOverloadDefinition*", StrictMemberId("foo(ignoredOverloadDefinition*")) + testSuccess("#bar\\[ignoredOverloadDefinition*", StrictMemberId("bar[ignoredOverloadDefinition*")) + + testFailAt("#", 1) + testFailAt("#`", 2) + testFailAt("``", 2) + testFailAt("`abc", 4) + + testFailAt("ab..cd", 3) + testFailAt("ab.#cd", 3) + testFailAt("ab#.cd", 3) + + testFailAt("\\`", 1) + testFailAt("ab\\`", 3) + } + + private def parse(input: String) = QueryParser(input).tryReadQuery() + + private def testSuccess(input: String, expected: Query) = { + val Right(got) = parse(input) + assertEquals(expected, got) + } + + private def testFailAt(input: String, char: Int) = { + val Left(err) = parse(input) + assertEquals(s"expected to fail at $char : $input", char, err.at) + } +}