diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala index 73520dffb925..557385744ac2 100644 --- a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala @@ -54,19 +54,8 @@ trait MacroRuntimes extends JavaReflectionRuntimes { /** Macro classloader that is used to resolve and run macro implementations. * Loads classes from from -cp (aka the library classpath). * Is also capable of detecting REPL and reusing its classloader. - * - * When -Xmacro-jit is enabled, we sometimes fallback to on-the-fly compilation of macro implementations, - * which compiles implementations into a virtual directory (very much like REPL does) and then conjures - * a classloader mapped to that virtual directory. */ - private lazy val defaultMacroClassloaderCache = { - def attemptClose(loader: ClassLoader): Unit = loader match { - case u: URLClassLoader => debuglog("Closing macro runtime classloader"); u.close() - case afcl: AbstractFileClassLoader => attemptClose(afcl.getParent) - case _ => ??? - } - perRunCaches.newGeneric(findMacroClassLoader, attemptClose _) - } + private lazy val defaultMacroClassloaderCache: () => ClassLoader = perRunCaches.newGeneric(findMacroClassLoader()) def defaultMacroClassloader: ClassLoader = defaultMacroClassloaderCache() /** Abstracts away resolution of macro runtimes. diff --git a/src/compiler/scala/tools/nsc/CloseableRegistry.scala b/src/compiler/scala/tools/nsc/CloseableRegistry.scala new file mode 100644 index 000000000000..9812a2136263 --- /dev/null +++ b/src/compiler/scala/tools/nsc/CloseableRegistry.scala @@ -0,0 +1,34 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc + +import scala.util.control.NonFatal + +/** Registry for resources to close when `Global` is closed */ +final class CloseableRegistry { + private[this] var closeables: List[java.io.Closeable] = Nil + final def registerClosable(c: java.io.Closeable): Unit = { + closeables ::= c + } + + def close(): Unit = { + for (c <- closeables) { + try { + c.close() + } catch { + case NonFatal(_) => + } + } + closeables = Nil + } +} diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 159021bdacaf..46386beb58e7 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -21,7 +21,7 @@ trait CompilationUnits { global: Global => /** An object representing a missing compilation unit. */ object NoCompilationUnit extends CompilationUnit(NoSourceFile) { - override lazy val isJava = false + override val isJava = false override def exists = false override def toString() = "NoCompilationUnit" } @@ -153,7 +153,7 @@ trait CompilationUnits { global: Global => final def comment(pos: Position, msg: String): Unit = {} /** Is this about a .java source file? */ - lazy val isJava = source.file.name.endsWith(".java") + val isJava = source.file.name.endsWith(".java") override def toString() = source.toString() } diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index cb26b4d9d666..fcc829b2e64d 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -16,7 +16,14 @@ import java.net.URL import scala.tools.util.PathResolver class GenericRunnerSettings(error: String => Unit) extends Settings(error) { - lazy val classpathURLs: Seq[URL] = new PathResolver(this).resultAsURLs + lazy val classpathURLs: Seq[URL] = { + val registry = new CloseableRegistry + try { + new PathResolver(this, registry).resultAsURLs + } finally { + registry.close() + } + } val howtorun = ChoiceSetting( diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 93fd46d01887..47bd41e37b09 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -40,9 +40,11 @@ import scala.language.postfixOps import scala.tools.nsc.ast.{TreeGen => AstTreeGen} import scala.tools.nsc.classpath._ import scala.tools.nsc.profile.Profiler +import java.io.Closeable class Global(var currentSettings: Settings, reporter0: Reporter) extends SymbolTable + with Closeable with CompilationUnits with Plugins with PhaseAssembly @@ -400,12 +402,16 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def apply(unit: CompilationUnit): Unit + // run only the phases needed + protected def shouldSkipThisPhaseForJava: Boolean = { + this.id > (if (createJavadoc) currentRun.typerPhase.id + else currentRun.namerPhase.id) + } + /** Is current phase cancelled on this unit? */ def cancelled(unit: CompilationUnit) = { - // run the typer only if in `createJavadoc` mode - val maxJavaPhase = if (createJavadoc) currentRun.typerPhase.id else currentRun.namerPhase.id if (Thread.interrupted()) reporter.cancelled = true - reporter.cancelled || unit.isJava && this.id > maxJavaPhase + reporter.cancelled || unit.isJava && shouldSkipThisPhaseForJava } private def beforeUnit(unit: CompilationUnit): Unit = { @@ -817,7 +823,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Extend classpath of `platform` and rescan updated packages. */ def extendCompilerClassPath(urls: URL*): Unit = { - val urlClasspaths = urls.map(u => ClassPathFactory.newClassPath(AbstractFile.getURL(u), settings)) + val urlClasspaths = urls.map(u => ClassPathFactory.newClassPath(AbstractFile.getURL(u), settings, closeableRegistry)) val newClassPath = AggregateClassPath.createAggregate(platform.classPath +: urlClasspaths : _*) platform.currentClassPath = Some(newClassPath) invalidateClassPathEntries(urls.map(_.getPath): _*) @@ -879,7 +885,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } entries(classPath) find matchesCanonical match { case Some(oldEntry) => - Some(oldEntry -> ClassPathFactory.newClassPath(dir, settings)) + Some(oldEntry -> ClassPathFactory.newClassPath(dir, settings, closeableRegistry)) case None => error(s"Error adding entry to classpath. During invalidation, no entry named $path in classpath $classPath") None @@ -1706,6 +1712,13 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } def createJavadoc = false + + final val closeableRegistry: CloseableRegistry = new CloseableRegistry + + def close(): Unit = { + perRunCaches.clearAll() + closeableRegistry.close() + } } object Global { diff --git a/src/compiler/scala/tools/nsc/PickleExtractor.scala b/src/compiler/scala/tools/nsc/PickleExtractor.scala new file mode 100644 index 000000000000..23ae8f4338fb --- /dev/null +++ b/src/compiler/scala/tools/nsc/PickleExtractor.scala @@ -0,0 +1,133 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc + +import java.io.Closeable +import java.nio.file.attribute.BasicFileAttributes +import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor, _} + +import scala.collection.JavaConverters.{asScalaBufferConverter, bufferAsJavaListConverter, collectionAsScalaIterableConverter} +import scala.reflect.internal.pickling.ByteCodecs +import scala.reflect.io.RootPath +import scala.tools.asm.tree.ClassNode +import scala.tools.asm.{ClassReader, ClassWriter, Opcodes} + +object PickleExtractor { + + def main(args: Array[String]): Unit = { + args.toList match { + case input :: output :: Nil => + process(Paths.get(input), Paths.get(output)) + case _ => + } + } + def process(input: Path, output: Path): Unit = { + val inputPath = RootPath(input, writable = false) + val outputPath = RootPath(output, writable = true) + try { + val root = inputPath.root + Files.createDirectories(outputPath.root) + val visitor = new SimpleFileVisitor[Path] { + override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { + if (dir != root) { + val outputDir = outputPath.root.resolve(root.relativize(dir).toString) + Files.createDirectories(outputDir) + } + FileVisitResult.CONTINUE + } + override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { + if (file.getFileName.toString.endsWith(".class")) { + stripClassFile(Files.readAllBytes(file)) match { + case Class(out) => + Files.write(outputPath.root.resolve(root.relativize(file).toString), out) + case Pickle(out) => + Files.write(outputPath.root.resolve(root.relativize(file).toString.replaceAll(".class$", ".sig")), out) + case Skip => + } + } + FileVisitResult.CONTINUE + } + } + Files.walkFileTree(root, visitor) + } finally { + inputPath.close() + outputPath.close() + } + } + + def stripClassFile(classfile: Array[Byte]): OutputFile = { + val input = new ClassNode() + new ClassReader(classfile).accept(input, ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES | ClassReader.SKIP_CODE) + var output = new ClassNode() + output.name = input.name + output.access = input.access + output.version = input.version + + var foundScalaSig = false + + def isScalaAnnotation(desc: String) = (desc == "Lscala/reflect/ScalaSignature;" || desc == "Lscala/reflect/ScalaLongSignature;") && { + foundScalaSig = true + + true + } + + var pickleData: Array[Byte] = null + if (input.visibleAnnotations != null) { + input.visibleAnnotations.asScala.foreach { node => + if (node.desc == "Lscala/reflect/ScalaSignature;") { + val Array("bytes", data: String) = node.values.toArray() + val bytes = data.getBytes(java.nio.charset.StandardCharsets.UTF_8) + val len = ByteCodecs.decode(bytes) + pickleData = bytes.take(len) + } else if (node.desc == "Lscala/reflect/ScalaLongSignature;") { + val Array("bytes", data: java.util.Collection[String @unchecked]) = node.values.toArray() + val encoded = data.asScala.toArray flatMap (_.getBytes(java.nio.charset.StandardCharsets.UTF_8)) + val len = ByteCodecs.decode(encoded) + pickleData = encoded.take(len) + } + } + output.visibleAnnotations = input.visibleAnnotations.asScala.filter(node => isScalaAnnotation(node.desc) && { + true + }).asJava + } + var foundScalaAttr = false + if (input.attrs != null) { + output.attrs = input.attrs.asScala.filter(attr => (attr.`type` == "Scala" || attr.`type` == "ScalaSig") && { + foundScalaAttr = true; + true + }).asJava + } + val writer = new ClassWriter(Opcodes.ASM7_EXPERIMENTAL) + val isScalaRaw = foundScalaAttr && !foundScalaSig + if (isScalaRaw) Skip + else { + if (pickleData == null) { + output = input + output.accept(writer) + Class(writer.toByteArray) + } else { + output.accept(writer) + Pickle(pickleData) + } + } + } + + sealed abstract class OutputFile + + case object Skip extends OutputFile + + case class Class(content: Array[Byte]) extends OutputFile + + case class Pickle(content: Array[Byte]) extends OutputFile + +} diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala new file mode 100644 index 000000000000..a36f64cda7f4 --- /dev/null +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -0,0 +1,694 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc + +import java.io.File +import java.lang.Thread.UncaughtExceptionHandler +import java.nio.file.attribute.FileTime +import java.nio.file.{Files, Path, Paths} +import java.time.Instant +import java.util.Collections +import java.util.concurrent.atomic.AtomicInteger + +import javax.tools.ToolProvider + +import scala.collection.JavaConverters.asScalaIteratorConverter +import scala.collection.{immutable, mutable, parallel} +import scala.concurrent._ +import scala.concurrent.duration.Duration +import scala.reflect.internal.pickling.PickleBuffer +import scala.reflect.internal.util.FakePos +import scala.reflect.io.RootPath +import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} +import scala.tools.nsc.util.ClassPath +import scala.util.{Failure, Success, Try} + +class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy, argFiles: Seq[Path], useJars: Boolean) { + private val pickleCacheConfigured = System.getProperty("scala.pipeline.picklecache") + private val pickleCache: Path = { + if (pickleCacheConfigured == null) Files.createTempDirectory("scala.picklecache") + else { + Paths.get(pickleCacheConfigured) + } + } + private def cachePath(file: Path): Path = { + val newExtension = if (useJars) ".jar" else "" + changeExtension(pickleCache.resolve("./" + file).normalize(), newExtension) + } + + private val strippedAndExportedClassPath = mutable.HashMap[Path, Path]() + + /** Forward errors to the (current) reporter. */ + protected def scalacError(msg: String): Unit = { + reporter.error(FakePos("scalac"), msg + "\n scalac -help gives more information") + } + + private var reporter: Reporter = _ + + private object handler extends UncaughtExceptionHandler { + override def uncaughtException(t: Thread, e: Throwable): Unit = { + e.printStackTrace() + System.exit(-1) + } + } + + implicit val executor = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(parallelism), t => handler.uncaughtException(Thread.currentThread(), t)) + val fileManager = ToolProvider.getSystemJavaCompiler.getStandardFileManager(null, null, null) + def changeExtension(p: Path, newExtension: String): Path = { + val fileName = p.getFileName.toString + val changedFileName = fileName.lastIndexOf('.') match { + case -1 => fileName + newExtension + case n => fileName.substring(0, n) + newExtension + } + p.getParent.resolve(changedFileName) + } + + def registerPickleClassPath[G <: Global](output: Path, data: mutable.AnyRefMap[G#Symbol, PickleBuffer]): Unit = { + val jarPath = cachePath(output) + val root = RootPath(jarPath, writable = true) + Files.createDirectories(root.root) + + val dirs = mutable.Map[G#Symbol, Path]() + def packageDir(packSymbol: G#Symbol): Path = { + if (packSymbol.isEmptyPackageClass) root.root + else if (dirs.contains(packSymbol)) dirs(packSymbol) + else if (packSymbol.owner.isRoot) { + val subDir = root.root.resolve(packSymbol.encodedName) + Files.createDirectories(subDir) + dirs.put(packSymbol, subDir) + subDir + } else { + val base = packageDir(packSymbol.owner) + val subDir = base.resolve(packSymbol.encodedName) + Files.createDirectories(subDir) + dirs.put(packSymbol, subDir) + subDir + } + } + val written = new java.util.IdentityHashMap[AnyRef, Unit]() + try { + for ((symbol, pickle) <- data) { + if (!written.containsKey(pickle)) { + val base = packageDir(symbol.owner) + val primary = base.resolve(symbol.encodedName + ".sig") + Files.write(primary, pickle.bytes) + written.put(pickle, ()) + } + } + } finally { + root.close() + } + Files.setLastModifiedTime(jarPath, FileTime.from(Instant.now())) + strippedAndExportedClassPath.put(output.toRealPath().normalize(), jarPath) + } + + + def writeDotFile(dependsOn: mutable.LinkedHashMap[Task, List[Dependency]]): Unit = { + val builder = new java.lang.StringBuilder() + builder.append("digraph projects {\n") + for ((p, deps) <- dependsOn) { + //builder.append(" node \"[]").append(p.label).append("\";\n") + for (dep <- deps) { + builder.append(" \"").append(p.label).append("\" -> \"").append(dep.t.label).append("\" [") + if (dep.isMacro) builder.append("label=M") + else if (dep.isPlugin) builder.append("label=P") + builder.append("];\n") + } + } + builder.append("}\n") + val path = Paths.get("projects.dot") + Files.write(path, builder.toString.getBytes(java.nio.charset.StandardCharsets.UTF_8)) + println("Wrote project dependency graph to: " + path.toAbsolutePath) + } + + private case class Dependency(t: Task, isMacro: Boolean, isPlugin: Boolean) + + def process(): Boolean = { + println(s"parallelism = $parallelism, strategy = $strategy") + + reporter = new ConsoleReporter(new Settings(scalacError)) + + def commandFor(argFileArg: Path): Task = { + val ss = new Settings(scalacError) + val command = new CompilerCommand(("@" + argFileArg) :: Nil, ss) + Task(argFileArg, command, command.files) + } + + val projects: List[Task] = argFiles.toList.map(commandFor) + val numProjects = projects.size + val produces = mutable.LinkedHashMap[Path, Task]() + for (p <- projects) { + produces(p.outputDir) = p + } + val dependsOn = mutable.LinkedHashMap[Task, List[Dependency]]() + for (p <- projects) { + val macroDeps = p.macroClassPath.flatMap(p => produces.get(p)).toList.filterNot(_ == p).map(t => Dependency(t, isMacro = true, isPlugin = false)) + val pluginDeps = p.pluginClassPath.flatMap(p => produces.get(p)).toList.filterNot(_ == p).map(t => Dependency(t, isMacro = false, isPlugin = true)) + val classPathDeps = p.classPath.flatMap(p => produces.get(p)).toList.filterNot(_ == p).filterNot(p => macroDeps.exists(_.t == p)).map(t => Dependency(t, isMacro = false, isPlugin = false)) + dependsOn(p) = classPathDeps ++ macroDeps ++ pluginDeps + } + val dependedOn: Set[Task] = dependsOn.valuesIterator.flatten.map(_.t).toSet + val externalClassPath = projects.iterator.flatMap(_.classPath).filter(p => !produces.contains(p) && Files.exists(p)).toSet + + if (strategy != Traditional) { + val exportTimer = new Timer + exportTimer.start() + for (entry <- externalClassPath) { + val extracted = cachePath(entry) + val sourceTimeStamp = Files.getLastModifiedTime(entry) + if (Files.exists(extracted) && Files.getLastModifiedTime(extracted) == sourceTimeStamp) { + // println(s"Skipped export of pickles from $entry to $extracted (up to date)") + } else { + PickleExtractor.process(entry, extracted) + Files.setLastModifiedTime(extracted, sourceTimeStamp) + println(s"Exported pickles from $entry to $extracted") + Files.setLastModifiedTime(extracted, sourceTimeStamp) + } + strippedAndExportedClassPath(entry) = extracted + } + exportTimer.stop() + println(f"Exported external classpath in ${exportTimer.durationMs}%.0f ms") + } + + writeDotFile(dependsOn) + + val timer = new Timer + timer.start() + + def awaitAll(fs: Seq[Future[_]]): Future[_] = { + val done = Promise[Any]() + val allFutures = projects.flatMap(_.futures) + val count = allFutures.size + val counter = new AtomicInteger(count) + val handler = (a: Try[_]) => a match { + case f @ Failure(_) => + done.complete(f) + case Success(_) => + val remaining = counter.decrementAndGet() + if (remaining == 0) done.success(()) + } + + allFutures.foreach(_.onComplete(handler)) + done.future + } + + def awaitDone(): Unit = { + val allFutures: immutable.Seq[Future[_]] = projects.flatMap(_.futures) + val numAllFutures = allFutures.size + val awaitAllFutures: Future[_] = awaitAll(allFutures) + val numTasks = awaitAllFutures + var lastNumCompleted = allFutures.count(_.isCompleted) + while (true) try { + Await.result(awaitAllFutures, Duration(60, "s")) + timer.stop() + val numCompleted = allFutures.count(_.isCompleted) + println(s"PROGRESS: $numCompleted / $numAllFutures") + return + } catch { + case _: TimeoutException => + val numCompleted = allFutures.count(_.isCompleted) + if (numCompleted == lastNumCompleted) { + println(s"STALLED: $numCompleted / $numAllFutures") + println("Outline/Scala/Javac") + projects.map { + p => + def toX(b: Future[_]): String = b.value match { case None => "-"; case Some(Success(_)) => "x"; case Some(Failure(_)) => "!" } + val s = List(p.outlineDoneFuture, p.groupsDoneFuture, p.javaDoneFuture).map(toX).mkString(" ") + println(s + " " + p.label) + } + } else { + println(s"PROGRESS: $numCompleted / $numAllFutures") + } + } + } + strategy match { + case OutlineTypePipeline => + projects.foreach { p => + val isLeaf = !dependedOn.contains(p) + val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map { task => p.dependencyReadyFuture(task) }) + val f = if (isLeaf) { + for { + _ <- depsReady + _ <- { + p.outlineDone.complete(Success(())) + p.fullCompile() + Future.sequence(p.groups.map(_.done.future)) + } + } yield { + p.javaCompile() + } + } else { + for { + _ <- depsReady + _ <- { + p.outlineCompile() + p.outlineDone.future + } + _ <- { + p.fullCompile() + Future.sequence(p.groups.map(_.done.future)) + } + } yield { + p.javaCompile() + } + } + f.onComplete { _ => p.compiler.close() } + } + + awaitDone() + + for (p <- projects) { + val dependencies = dependsOn(p).map(_.t) + + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + + val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) + p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs + p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum + } + + if (parallelism == 1) { + val criticalPath = projects.maxBy(_.regularCriticalPathMs) + println(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + } else + println(f" Wall Clock: ${timer.durationMs}%.0f ms") + case Pipeline => + projects.foreach { p => + val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map(task => p.dependencyReadyFuture(task))) + val f = for { + _ <- depsReady + _ <- { + val isLeaf = !dependedOn.contains(p) + if (isLeaf) { + p.outlineDone.complete(Success(())) + p.fullCompile() + } else + p.fullCompileExportPickles() + // Start javac after scalac has completely finished + Future.sequence(p.groups.map(_.done.future)) + } + } yield { + p.javaCompile() + } + f.onComplete { _ => p.compiler.close() } + } + awaitDone() + + for (p <- projects) { + val dependencies = dependsOn(p).map(_.t) + + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + + val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) + p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs + p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum + } + + if (parallelism == 1) { + val criticalPath = projects.maxBy(_.regularCriticalPathMs) + println(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + } else + println(f" Wall Clock: ${timer.durationMs}%.0f ms") + case Traditional => + projects.foreach { p => + val f1 = Future.sequence(dependsOn.getOrElse(p, Nil).map(_.t.javaDone.future)) + val f2 = f1.flatMap { _ => + p.outlineDone.complete(Success(())) + p.fullCompile() + Future.sequence(p.groups.map(_.done.future)).map(_ => p.javaCompile()) + } + f2.onComplete { _ => p.compiler.close() } + } + awaitDone() + + for (p <- projects) { + val dependencies = dependsOn(p).map(_.t) + + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum + } + if (parallelism == 1) { + val maxFullCriticalPath: Double = projects.map(_.fullCriticalPathMs).max + println(f"Critical path: $maxFullCriticalPath%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + } else { + println(f"Wall Clock: ${timer.durationMs}%.0f ms") + } + } + + writeChromeTrace(projects) + deleteTempPickleCache() + true + } + + private def deleteTempPickleCache(): Unit = { + if (pickleCacheConfigured == null) { + AbstractFile.getDirectory(pickleCache.toFile).delete() + } + } + + private def writeChromeTrace(projects: List[Task]) = { + val trace = new java.lang.StringBuilder() + trace.append("""{"traceEvents": [""") + val sb = new mutable.StringBuilder(trace) + + def durationEvent(name: String, cat: String, t: Timer): String = { + s"""{"name": "$name", "cat": "$cat", "ph": "X", "ts": ${(t.startMicros).toLong}, "dur": ${(t.durationMicros).toLong}, "pid": 0, "tid": ${t.thread.getId}}""" + } + + def projectEvents(p: Task): List[String] = { + val events = List.newBuilder[String] + if (p.outlineTimer.durationMicros > 0d) { + val desc = if (strategy == OutlineTypePipeline) "outline-type" else "parser-to-pickler" + events += durationEvent(p.label, desc, p.outlineTimer) + events += durationEvent(p.label, "pickle-export", p.pickleExportTimer) + } + for ((g, ix) <- p.groups.zipWithIndex) { + if (g.timer.durationMicros > 0d) + events += durationEvent(p.label, "compile-" + ix, g.timer) + } + if (p.javaTimer.durationMicros > 0d) { + val desc = "javac" + events += durationEvent(p.label, desc, p.javaTimer) + } + events.result() + } + + projects.iterator.flatMap(projectEvents).addString(sb, ",\n") + trace.append("]}") + val traceFile = Paths.get(s"build-${label}.trace") + Files.write(traceFile, trace.toString.getBytes()) + println("Chrome trace written to " + traceFile.toAbsolutePath) + } + + case class Group(files: List[String]) { + val timer = new Timer + val done = Promise[Unit]() + } + + private case class Task(argsFile: Path, command: CompilerCommand, files: List[String]) { + val label = argsFile.toString.replaceAll("target/", "").replaceAll("""(.*)/(.*).args""", "$1:$2") + override def toString: String = argsFile.toString + def outputDir: Path = command.settings.outputDirs.getSingleOutput.get.file.toPath.toAbsolutePath.normalize() + private def expand(s: command.settings.PathSetting): List[Path] = { + ClassPath.expandPath(s.value, expandStar = true).map(s => Paths.get(s).toAbsolutePath.normalize()) + } + lazy val classPath: Seq[Path] = expand(command.settings.classpath) + lazy val macroClassPath: Seq[Path] = expand(command.settings.YmacroClasspath) + lazy val macroClassPathSet: Set[Path] = macroClassPath.toSet + lazy val pluginClassPath: Set[Path] = { + def asPath(p: String) = ClassPath split p + + val paths = command.settings.plugin.value filter (_ != "") flatMap (s => asPath(s) map (s => Paths.get(s))) + paths.toSet + } + def dependencyReadyFuture(dependency: Dependency) = if (dependency.isMacro) { + log(s"dependency is on macro classpath, will wait for .class files: ${dependency.t.label}") + dependency.t.javaDone.future + } else if (dependency.isPlugin) { + log(s"dependency is on plugin classpath, will wait for .class files: ${dependency.t.label}") + dependency.t.javaDone.future + } else + dependency.t.outlineDone.future + + + val cacheMacro = java.lang.Boolean.getBoolean("scala.pipeline.cache.macro.classloader") + val cachePlugin = java.lang.Boolean.getBoolean("scala.pipeline.cache.plugin.classloader") + if (cacheMacro) + command.settings.YcacheMacroClassLoader.value = "always" + if (cachePlugin) + command.settings.YcachePluginClassLoader.value = "always" + + if (strategy != Traditional) { + command.settings.YpickleJava.value = true + } + + val groups: List[Group] = { + val isScalaLibrary = files.exists(_.endsWith("Predef.scala")) + if (strategy != OutlineTypePipeline || isScalaLibrary) { + Group(files) :: Nil + } else { + command.settings.classpath.value = command.settings.outputDirs.getSingleOutput.get.toString + File.pathSeparator + command.settings.classpath.value + val length = files.length + val groups = (length.toDouble / 128).toInt.max(1) + files.grouped((length.toDouble / groups).ceil.toInt.max(1)).toList.map(Group(_)) + } + } + command.settings.outputDirs.getSingleOutput.get.file.mkdirs() + + val isGrouped = groups.size > 1 + + val outlineTimer = new Timer() + val pickleExportTimer = new Timer + val javaTimer = new Timer() + + var outlineCriticalPathMs = 0d + var regularCriticalPathMs = 0d + var fullCriticalPathMs = 0d + val outlineDone: Promise[Unit] = Promise[Unit]() + val outlineDoneFuture = outlineDone.future + val javaDone: Promise[Unit] = Promise[Unit]() + val javaDoneFuture: Future[_] = javaDone.future + val groupsDoneFuture: Future[List[Unit]] = Future.sequence(groups.map(_.done.future)) + val futures: List[Future[_]] = { + outlineDone.future :: javaDone.future :: groups.map(_.done.future) + } + + val originalClassPath: String = command.settings.classpath.value + + lazy val compiler: Global = try { + val result = newCompiler(command.settings) + val reporter = result.reporter + if (reporter.hasErrors) + reporter.flush() + else if (command.shouldStopWithInfo) + reporter.echo(command.getInfoMessage(result)) + result + } catch { + case t: Throwable => + t.printStackTrace() + throw t + } + + def outlineCompile(): Unit = { + outlineTimer.start() + try { + log("scalac outline: start") + command.settings.Youtline.value = true + command.settings.stopAfter.value = List("pickler") + command.settings.Ymacroexpand.value = command.settings.MacroExpand.None + val run1 = new compiler.Run() + run1 compile files + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, run1.symData) + outlineTimer.stop() + reporter.finish() + if (reporter.hasErrors) { + log("scalac outline: failed") + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } else { + log(f"scala outline: done ${outlineTimer.durationMs}%.0f ms") + outlineDone.complete(Success(())) + } + } catch { + case t: Throwable => + t.printStackTrace() + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } + } + + def fullCompile(): Unit = { + command.settings.Youtline.value = false + command.settings.stopAfter.value = Nil + command.settings.Ymacroexpand.value = command.settings.MacroExpand.Normal + + val groupCount = groups.size + for ((group, ix) <- groups.zipWithIndex) { + group.done.completeWith { + Future { + log(s"scalac (${ix + 1}/$groupCount): start") + group.timer.start() + val compiler2 = newCompiler(command.settings) + try { + val run2 = new compiler2.Run() + run2 compile group.files + compiler2.reporter.finish() + if (compiler2.reporter.hasErrors) { + group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } else { + group.done.complete(Success(())) + } + } finally { + compiler2.close() + group.timer.stop() + } + log(f"scalac (${ix + 1}/$groupCount): done ${group.timer.durationMs}%.0f ms") + } + } + } + } + + def fullCompileExportPickles(): Unit = { + assert(groups.size == 1) + val group = groups.head + log("scalac: start") + outlineTimer.start() + try { + val run2 = new compiler.Run() { + + override def advancePhase(): Unit = { + if (compiler.phase == this.picklerPhase) { + outlineTimer.stop() + log(f"scalac outline: done ${outlineTimer.durationMs}%.0f ms") + pickleExportTimer.start() + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, symData) + pickleExportTimer.stop() + log(f"scalac: exported pickles ${pickleExportTimer.durationMs}%.0f ms") + outlineDone.complete(Success(())) + group.timer.start() + } + super.advancePhase() + } + } + + run2 compile group.files + compiler.reporter.finish() + group.timer.stop() + if (compiler.reporter.hasErrors) { + log("scalac: failed") + if (!outlineDone.isCompleted) + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } else { + log(f"scalac: done ${group.timer.durationMs}%.0f ms") + // outlineDone.complete(Success(())) + group.done.complete(Success(())) + } + } catch { + case t: Throwable => + t.printStackTrace() + if (!outlineDone.isCompleted) + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + if (!group.done.isCompleted) + group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } + } + + def javaCompile(): Unit = { + val javaSources = files.filter(_.endsWith(".java")) + if (javaSources.nonEmpty) { + log("javac: start") + javaTimer.start() + javaDone.completeWith(Future { + val opts = java.util.Arrays.asList("-d", command.settings.outdir.value, "-cp", command.settings.outdir.value + File.pathSeparator + originalClassPath) + val compileTask = ToolProvider.getSystemJavaCompiler.getTask(null, null, null, opts, null, fileManager.getJavaFileObjects(javaSources.toArray: _*)) + compileTask.setProcessors(Collections.emptyList()) + compileTask.call() + javaTimer.stop() + log(f"javac: done ${javaTimer.durationMs}%.0f ms") + () + }) + } else { + javaDone.complete(Success(())) + } + } + def log(msg: String): Unit = println(this.label + ": " + msg) + } + + final class Timer() { + private var startNanos: Long = 0 + private var endNanos: Long = 0 + def start(): Unit = { + assert(startNanos == 0L) + startNanos = System.nanoTime + } + var thread: Thread = Thread.currentThread() + def stop(): Unit = { + thread = Thread.currentThread() + endNanos = System.nanoTime() + } + def startMs: Double = startNanos.toDouble / 1000 / 1000 + def durationMs: Double = { + val result = (endNanos - startNanos).toDouble / 1000 / 1000 + if (result < 0) + getClass + result + } + def startMicros: Double = startNanos.toDouble / 1000d + def durationMicros: Double = (endNanos - startNanos).toDouble / 1000d + } + + protected def newCompiler(settings: Settings): Global = { + if (strategy != Traditional) { + val classPath = ClassPath.expandPath(settings.classpath.value, expandStar = true) + val modifiedClassPath = classPath.map { entry => + val entryPath = Paths.get(entry) + if (Files.exists(entryPath)) + strippedAndExportedClassPath.getOrElse(entryPath.toRealPath().normalize(), entryPath).toString + else + entryPath + } + settings.classpath.value = modifiedClassPath.mkString(java.io.File.pathSeparator) + } + Global(settings) + } +} + +sealed abstract class BuildStrategy + +/** Outline type check to compute type signatures as pickles as an input to downstream compilation. */ +case object OutlineTypePipeline extends BuildStrategy + +case object Pipeline extends BuildStrategy + +/** Emit class files before triggering downstream compilation */ +case object Traditional extends BuildStrategy + +object PipelineMain { + def main(args: Array[String]): Unit = { + val strategies = List(OutlineTypePipeline, Pipeline, Traditional) + val strategy = strategies.find(_.productPrefix.equalsIgnoreCase(System.getProperty("scala.pipeline.strategy", "pipeline"))).get + val parallelism = java.lang.Integer.getInteger("scala.pipeline.parallelism", parallel.availableProcessors) + val useJars = java.lang.Boolean.getBoolean("scala.pipeline.use.jar") + val argFiles: Seq[Path] = args match { + case Array(path) if Files.isDirectory(Paths.get(path)) => + Files.walk(Paths.get(path)).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList + case _ => + args.map(Paths.get(_)) + } + val main = new PipelineMainClass("1", parallelism, strategy, argFiles, useJars) + val result = main.process() + if (!result) + System.exit(1) + else + System.exit(0) + } +} + +//object PipelineMainTest { +// def main(args: Array[String]): Unit = { +// var i = 0 +// val argsFiles = Files.walk(Paths.get("/code/guardian-frontend")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList +// for (_ <- 1 to 2; n <- List(parallel.availableProcessors); strat <- List(Pipeline)) { +// i += 1 +// val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles, useJars = false) +// println(s"====== ITERATION $i=======") +// val result = main.process() +// if (!result) +// System.exit(1) +// } +// System.exit(0) +// } +//} diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index 2d609dcb17a6..9cbdf1dcadab 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -27,7 +27,7 @@ trait JavaPlatform extends Platform { private[nsc] var currentClassPath: Option[ClassPath] = None protected[nsc] def classPath: ClassPath = { - if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result) + if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings, global.closeableRegistry).result) currentClassPath.get } diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala index fa9166483594..f2fb2b0224d7 100644 --- a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala @@ -14,7 +14,7 @@ package scala.tools.nsc.classpath import scala.reflect.io.{AbstractFile, VirtualDirectory} import scala.reflect.io.Path.string2path -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import FileUtils.AbstractFileOps import scala.tools.nsc.util.ClassPath @@ -22,11 +22,11 @@ import scala.tools.nsc.util.ClassPath * Provides factory methods for classpath. When creating classpath instances for a given path, * it uses proper type of classpath depending on a types of particular files containing sources or classes. */ -class ClassPathFactory(settings: Settings) { +class ClassPathFactory(settings: Settings, closeableRegistry: CloseableRegistry) { /** * Create a new classpath based on the abstract file. */ - def newClassPath(file: AbstractFile): ClassPath = ClassPathFactory.newClassPath(file, settings) + def newClassPath(file: AbstractFile): ClassPath = ClassPathFactory.newClassPath(file, settings, closeableRegistry) /** * Creators for sub classpaths which preserve this context. @@ -70,7 +70,7 @@ class ClassPathFactory(settings: Settings) { private def createSourcePath(file: AbstractFile): ClassPath = if (file.isJarOrZip) - ZipAndJarSourcePathFactory.create(file, settings) + ZipAndJarSourcePathFactory.create(file, settings, closeableRegistry) else if (file.isDirectory) DirectorySourcePath(file.file) else @@ -78,11 +78,11 @@ class ClassPathFactory(settings: Settings) { } object ClassPathFactory { - def newClassPath(file: AbstractFile, settings: Settings): ClassPath = file match { + def newClassPath(file: AbstractFile, settings: Settings, closeableRegistry: CloseableRegistry): ClassPath = file match { case vd: VirtualDirectory => VirtualDirectoryClassPath(vd) case _ => if (file.isJarOrZip) - ZipAndJarClassPathFactory.create(file, settings) + ZipAndJarClassPathFactory.create(file, settings, closeableRegistry) else if (file.isDirectory) DirectoryClassPath(file.file) else diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index 9f51672e79a6..c6ab18a1e484 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -12,7 +12,7 @@ package scala.tools.nsc.classpath -import java.io.File +import java.io.{Closeable, File} import java.net.{URI, URL} import java.nio.file.{FileSystems, Files, SimpleFileVisitor} import java.util.function.IntFunction @@ -25,6 +25,7 @@ import FileUtils._ import scala.collection.JavaConverters._ import scala.collection.immutable import scala.reflect.internal.JDK9Reflectors +import scala.tools.nsc.CloseableRegistry import scala.tools.nsc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} /** @@ -61,6 +62,7 @@ trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends ClassPath { private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { val dirForPackage = getDirectory(inPackage) + val nestedDirs: Array[F] = dirForPackage match { case None => emptyFiles case Some(directory) => listChildren(directory, Some(isPackage)) @@ -137,7 +139,7 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo object JrtClassPath { import java.nio.file._, java.net.URI - def apply(release: Option[String]): Option[ClassPath] = { + def apply(release: Option[String], closeableRegistry: CloseableRegistry): Option[ClassPath] = { import scala.util.Properties._ if (!isJavaAtLeast("9")) None else { @@ -154,7 +156,11 @@ object JrtClassPath { try { val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") if (Files.notExists(ctSym)) None - else Some(new CtSymClassPath(ctSym, v.toInt)) + else { + val classPath = new CtSymClassPath(ctSym, v.toInt) + closeableRegistry.registerClosable(classPath) + Some(classPath) + } } catch { case _: Throwable => None } @@ -230,7 +236,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No /** * Implementation `ClassPath` based on the $JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247 */ -final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { +final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths with Closeable { import java.nio.file.Path, java.nio.file._ private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null) @@ -276,7 +282,7 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas def asURLs: Seq[URL] = Nil def asClassPathStrings: Seq[String] = Nil - + override def close(): Unit = fileSystem.close() def findClassFile(className: String): Option[AbstractFile] = { if (!className.contains(".")) None else { diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala index 059a83da796c..aa4d81736195 100644 --- a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala +++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala @@ -24,7 +24,7 @@ object FileUtils { implicit class AbstractFileOps(val file: AbstractFile) extends AnyVal { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) - def isClass: Boolean = !file.isDirectory && file.hasExtension("class") + def isClass: Boolean = !file.isDirectory && (file.hasExtension("class") || file.hasExtension("sig")) def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) diff --git a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala index 5b157e9b386e..04ddc61b2107 100644 --- a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala @@ -35,7 +35,7 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi def isPackage(f: AbstractFile): Boolean = f.isPackage // mimic the behavior of the old nsc.util.DirectoryClassPath - def asURLs: Seq[URL] = Seq(new URL(dir.name)) + def asURLs: Seq[URL] = Seq(new URL("file://_VIRTUAL_/" + dir.name)) def asClassPathStrings: Seq[String] = Seq(dir.path) override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 6f8b9a55c0cd..acb41185353e 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -12,16 +12,19 @@ package scala.tools.nsc.classpath -import java.io.File +import java.io.{Closeable, File} import java.net.URL import java.nio.file.Files import java.nio.file.attribute.{BasicFileAttributes, FileTime} +import java.util.{Timer, TimerTask} +import java.util.concurrent.atomic.AtomicInteger import scala.annotation.tailrec import scala.reflect.io.{AbstractFile, FileZipArchive, ManifestResources} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import FileUtils._ +import scala.tools.nsc.io.Jar /** * A trait providing an optional cache for classpath entries obtained from zip and jar files. @@ -29,18 +32,20 @@ import FileUtils._ * when there are a lot of projects having a lot of common dependencies. */ sealed trait ZipAndJarFileLookupFactory { - private val cache = new FileBasedCache[ClassPath] - - def create(zipFile: AbstractFile, settings: Settings): ClassPath = { - if (settings.YdisableFlatCpCaching || zipFile.file == null) createForZipFile(zipFile, settings.releaseValue) - else createUsingCache(zipFile, settings) + private val cache = new FileBasedCache[ClassPath with Closeable] + + def create(zipFile: AbstractFile, settings: Settings, closeableRegistry: CloseableRegistry): ClassPath = { + cache.checkCacheability(zipFile.toURL :: Nil, checkStamps = true, disableCache = settings.YdisableFlatCpCaching.value || zipFile.file == null) match { + case Left(_) => + val result: ClassPath with Closeable = createForZipFile(zipFile, settings.releaseValue) + closeableRegistry.registerClosable(result) + result + case Right(Seq(path)) => + cache.getOrCreate(List(path), () => createForZipFile(zipFile, settings.releaseValue), closeableRegistry, checkStamps = true) + } } - protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath - - private def createUsingCache(zipFile: AbstractFile, settings: Settings): ClassPath = { - cache.getOrCreate(List(zipFile.file.toPath), () => createForZipFile(zipFile, settings.releaseValue)) - } + protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable } /** @@ -75,7 +80,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry: * Name: scala/Function2$mcFJD$sp.class */ - private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths { + private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths with Closeable { override def findClassFile(className: String): Option[AbstractFile] = { val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) classes(pkg).find(_.name == simpleClassName).map(_.file) @@ -84,6 +89,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { override def asClassPathStrings: Seq[String] = Seq(file.path) override def asURLs: Seq[URL] = file.toURLs() + override def close(): Unit = file.close() import ManifestResourcesClassPath.PackageFileInfo import ManifestResourcesClassPath.PackageInfo @@ -152,7 +158,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) } - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable = if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) else ZipArchiveClassPath(zipFile.file, release) @@ -183,28 +189,107 @@ object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource } - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = ZipArchiveSourcePath(zipFile.file) + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable = ZipArchiveSourcePath(zipFile.file) } final class FileBasedCache[T] { import java.nio.file.Path private case class Stamp(lastModified: FileTime, fileKey: Object) - private val cache = collection.mutable.Map.empty[Seq[Path], (Seq[Stamp], T)] + private case class Entry(stamps: Seq[Stamp], t: T) { + val referenceCount: AtomicInteger = new AtomicInteger(1) + } + private val cache = collection.mutable.Map.empty[Seq[Path], Entry] + + private def referenceCountDecrementer(e: Entry, paths: Seq[Path]): Closeable = new Closeable { + var closed = false + override def close(): Unit = { + if (!closed) { + closed = true + val count = e.referenceCount.decrementAndGet() + if (count == 0) { + e.t match { + case cl: Closeable => + FileBasedCache.timer match { + case Some(timer) => + val task = new TimerTask { + override def run(): Unit = { + cache.synchronized { + if (e.referenceCount.compareAndSet(0, -1)) { + cache.remove(paths) + cl.close() + } + } + } + } + timer.schedule(task, FileBasedCache.deferCloseMs.toLong) + case None => + cl.close() + } + case _ => + } + } + } + } + } - def getOrCreate(paths: Seq[Path], create: () => T): T = cache.synchronized { - val stamps = paths.map { path => + def checkCacheability(urls: Seq[URL], checkStamps: Boolean, disableCache: Boolean): Either[String, Seq[java.nio.file.Path]] = { + import scala.reflect.io.{AbstractFile, Path} + lazy val urlsAndFiles = urls.filterNot(_.getProtocol == "jrt").map(u => u -> AbstractFile.getURL(u)) + lazy val paths = urlsAndFiles.map(t => Path(t._2.file).jfile.toPath) + if (!checkStamps) Right(paths) + else if (disableCache) Left("caching is disabled due to a policy setting") + else { + val nonJarZips = urlsAndFiles.filter { case (url, file) => file == null || !Jar.isJarOrZip(file.file) } + if (nonJarZips.nonEmpty) Left(s"caching is disabled because of the following classpath elements: ${nonJarZips.map(_._1).mkString(", ")}.") + else Right(paths) + } + } + + def getOrCreate(paths: Seq[Path], create: () => T, closeableRegistry: CloseableRegistry, checkStamps: Boolean): T = cache.synchronized { + val stamps = if (!checkStamps) Nil else paths.map { path => + try { val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) val lastModified = attrs.lastModifiedTime() // only null on some platforms, but that's okay, we just use the last modified timestamp as our stamp val fileKey = attrs.fileKey() Stamp(lastModified, fileKey) + } catch { + case ex: java.nio.file.NoSuchFileException => + // Dummy stamp for (currently) non-existent file. + Stamp(FileTime.fromMillis(0), new Object) + } } cache.get(paths) match { - case Some((cachedStamps, cached)) if cachedStamps == stamps => cached + case Some(e@Entry(cachedStamps, cached)) => + if (!checkStamps || cachedStamps == stamps) { + // Cache hit + val count = e.referenceCount.incrementAndGet() + assert(count > 0, (stamps, count)) + closeableRegistry.registerClosable(referenceCountDecrementer(e, paths)) + cached + } else { + // Cache miss: we found an entry but the underlying files have been modified + cached match { + case c: Closeable => + if (e.referenceCount.get() == 0) { + c.close() + } else { + // TODO: What do do here? Maybe add to a list of closeables polled by a cleanup thread? + } + } + val value = create() + val entry = Entry(stamps, value) + cache.put(paths, entry) + closeableRegistry.registerClosable(referenceCountDecrementer(entry, paths)) + value + } case _ => + // Cache miss val value = create() - cache.put(paths, (stamps, value)) + val entry = Entry(stamps, value) + cache.put(paths, entry) + closeableRegistry.registerClosable(referenceCountDecrementer(entry, paths)) value } } @@ -215,3 +300,17 @@ final class FileBasedCache[T] { cache.clear() } } + +object FileBasedCache { + // The tension here is that too long a delay could lead to an error (on Windows) with an inability + // to overwrite the JAR. To short a delay and the entry could be evicted before a subsequent + // sub-project compilation is able to get a cache hit. A more comprehensive solution would be to + // involve build tools in the policy: they could close entries with refcount of zero when that + // entry's JAR is about to be overwritten. + private val deferCloseMs = Integer.getInteger("scalac.filebasedcache.defer.close.ms", 1000) + private val timer: Option[Timer] = { + if (deferCloseMs > 0) + Some(new java.util.Timer(true)) + else None + } +} diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala index 32ec4cde4485..c658d4c01664 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala @@ -12,7 +12,7 @@ package scala.tools.nsc.classpath -import java.io.File +import java.io.{Closeable, File} import java.net.URL import scala.collection.Seq import scala.reflect.io.AbstractFile @@ -25,7 +25,7 @@ import scala.tools.nsc.util.{ClassPath, ClassRepresentation} * It provides common logic for classes handling class and source files. * It's aware of things like e.g. META-INF directory which is correctly skipped. */ -trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPath { +trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPath with Closeable { val zipFile: File def release: Option[String] @@ -33,8 +33,8 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPa override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL) override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath) - private val archive = new FileZipArchive(zipFile, release) + override def close(): Unit = archive.close() override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { val prefix = PackageNameUtils.packagePrefix(inPackage) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index b76f67ccf6aa..9c0f2db89446 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -93,40 +93,7 @@ object Plugin { private val PluginXML = "scalac-plugin.xml" - private val pluginClassLoadersCache = new FileBasedCache[ScalaClassLoader]() - - /** Create a class loader with the specified locations plus - * the loader that loaded the Scala compiler. - * - * If the class loader has already been created before and the - * file stamps are the same, the previous loader is returned to - * mitigate the cost of dynamic classloading as it has been - * measured in https://github.com/scala/scala-dev/issues/458. - */ - private def loaderFor(locations: Seq[Path], disableCache: Boolean): ScalaClassLoader = { - def newLoader = () => { - val compilerLoader = classOf[Plugin].getClassLoader - val urls = locations map (_.toURL) - ScalaClassLoader fromURLs (urls, compilerLoader) - } - - if (disableCache || locations.exists(!Jar.isJarOrZip(_))) newLoader() - else pluginClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) - } - - /** Try to load a plugin description from the specified location. - */ - private def loadDescriptionFromJar(jarp: Path): Try[PluginDescription] = { - // XXX Return to this once we have more ARM support - def read(is: Option[InputStream]) = is match { - case None => throw new PluginLoadException(jarp.path, s"Missing $PluginXML in $jarp") - case Some(is) => PluginDescription.fromXML(is) - } - Try(new Jar(jarp.jfile).withEntryStream(PluginXML)(read)) - } - - private def loadDescriptionFromFile(f: Path): Try[PluginDescription] = - Try(PluginDescription.fromXML(new java.io.FileInputStream(f.jfile))) + private[nsc] val pluginClassLoadersCache = new FileBasedCache[ScalaClassLoader.URLClassLoader]() type AnyClass = Class[_] @@ -155,40 +122,26 @@ object Plugin { paths: List[List[Path]], dirs: List[Path], ignoring: List[String], - disableClassLoaderCache: Boolean): List[Try[AnyClass]] = + findPluginClassloader: (Seq[Path] => ClassLoader)): List[Try[AnyClass]] = { - // List[(jar, Try(descriptor))] in dir - def scan(d: Directory) = - d.files.toList sortBy (_.name) filter (Jar isJarOrZip _) map (j => (j, loadDescriptionFromJar(j))) - type PDResults = List[Try[(PluginDescription, ScalaClassLoader)]] - // scan plugin dirs for jars containing plugins, ignoring dirs with none and other jars - val fromDirs: PDResults = dirs filter (_.isDirectory) flatMap { d => - scan(d.toDirectory) collect { - case (j, Success(pd)) => Success((pd, loaderFor(Seq(j), disableClassLoaderCache))) + val fromLoaders = paths.map {path => + val loader = findPluginClassloader(path) + loader.getResource(PluginXML) match { + case null => Failure(new MissingPluginException(path)) + case url => + val inputStream = url.openStream + try { + Try((PluginDescription.fromXML(inputStream), loader)) + } finally { + inputStream.close() + } } } - // scan jar paths for plugins, taking the first plugin you find. - // a path element can be either a plugin.jar or an exploded dir. - def findDescriptor(ps: List[Path]) = { - def loop(qs: List[Path]): Try[PluginDescription] = qs match { - case Nil => Failure(new MissingPluginException(ps)) - case p :: rest => - if (p.isDirectory) loadDescriptionFromFile(p.toDirectory / PluginXML) orElse loop(rest) - else if (p.isFile) loadDescriptionFromJar(p.toFile) orElse loop(rest) - else loop(rest) - } - loop(ps) - } - val fromPaths: PDResults = paths map (p => (p, findDescriptor(p))) map { - case (p, Success(pd)) => Success((pd, loaderFor(p, disableClassLoaderCache))) - case (_, Failure(e)) => Failure(e) - } - val seen = mutable.HashSet[String]() - val enabled = (fromPaths ::: fromDirs) map { + val enabled = fromLoaders map { case Success((pd, loader)) if seen(pd.classname) => // a nod to scala/bug#7494, take the plugin classes distinctly Failure(new PluginLoadException(pd.name, s"Ignoring duplicate plugin ${pd.name} (${pd.classname})")) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index bba855ba541a..386bdc4ab1a8 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -13,7 +13,14 @@ package scala.tools.nsc package plugins +import java.net.URL + +import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.io.Path +import scala.tools.nsc +import scala.tools.nsc.io.Jar +import scala.tools.nsc.plugins.Plugin.pluginClassLoadersCache +import scala.tools.nsc.typechecker.Macros import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver.Defaults @@ -37,7 +44,7 @@ trait Plugins { global: Global => def injectDefault(s: String) = if (s.isEmpty) Defaults.scalaPluginPath else s asPath(settings.pluginsDir.value) map injectDefault map Path.apply } - val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value, settings.YcachePluginClassLoader.value == settings.CachePolicy.None.name) + val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value, findPluginClassLoader(_)) val (goods, errors) = maybes partition (_.isSuccess) // Explicit parameterization of recover to avoid -Xlint warning about inferred Any errors foreach (_.recover[Any] { @@ -53,6 +60,43 @@ trait Plugins { global: Global => classes map (Plugin.instantiate(_, this)) } + /** + * Locate or create the classloader to load a compiler plugin with `classpath`. + * + * Subclasses may override to customise the behaviour. + * + * @param classpath + * @return + */ + protected def findPluginClassLoader(classpath: Seq[Path]): ClassLoader = { + val policy = settings.YcachePluginClassLoader.value + val disableCache = policy == settings.CachePolicy.None.name + def newLoader = () => { + val compilerLoader = classOf[Plugin].getClassLoader + val urls = classpath map (_.toURL) + ScalaClassLoader fromURLs (urls, compilerLoader) + } + + // Create a class loader with the specified locations plus + // the loader that loaded the Scala compiler. + // + // If the class loader has already been created before and the + // file stamps are the same, the previous loader is returned to + // mitigate the cost of dynamic classloading as it has been + // measured in https://github.com/scala/scala-dev/issues/458. + + val cache = pluginClassLoadersCache + val checkStamps = policy == settings.CachePolicy.LastModified.name + cache.checkCacheability(classpath.map(_.toURL), checkStamps, disableCache) match { + case Left(msg) => + val loader = newLoader() + closeableRegistry.registerClosable(loader) + loader + case Right(paths) => + cache.getOrCreate(classpath.map(_.jfile.toPath()), newLoader, closeableRegistry, checkStamps) + } + } + protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList() /** Load all available plugins. Skips plugins that @@ -123,4 +167,38 @@ trait Plugins { global: Global => (for (plug <- roughPluginsList ; help <- plug.optionsHelp) yield { "\nOptions for plugin '%s':\n%s\n".format(plug.name, help) }).mkString + + /** Obtains a `ClassLoader` instance used for macro expansion. + * + * By default a new `ScalaClassLoader` is created using the classpath + * from global and the classloader of self as parent. + * + * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. + */ + protected[scala] def findMacroClassLoader(): ClassLoader = { + val classpath: Seq[URL] = if (settings.YmacroClasspath.isSetByUser) { + for { + file <- scala.tools.nsc.util.ClassPath.expandPath(settings.YmacroClasspath.value, true) + af <- Option(nsc.io.AbstractFile getDirectory file) + } yield af.file.toURI.toURL + } else global.classPath.asURLs + def newLoader: () => ScalaClassLoader.URLClassLoader = () => { + analyzer.macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) + ScalaClassLoader.fromURLs(classpath, getClass.getClassLoader) + } + + val policy = settings.YcacheMacroClassLoader.value + val cache = Macros.macroClassLoadersCache + val disableCache = policy == settings.CachePolicy.None.name + val checkStamps = policy == settings.CachePolicy.LastModified.name + cache.checkCacheability(classpath, checkStamps, disableCache) match { + case Left(msg) => + analyzer.macroLogVerbose(s"macro classloader: $msg.") + val loader = newLoader() + closeableRegistry.registerClosable(loader) + loader + case Right(paths) => + cache.getOrCreate(paths, newLoader, closeableRegistry, checkStamps) + } + } } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 5f46d0606710..804481ef709a 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -243,23 +243,27 @@ trait ScalaSettings extends AbsScalaSettings val YcacheMacroClassLoader = CachePolicy.setting("macro", "macros") val YpartialUnification = BooleanSetting ("-Ypartial-unification", "Enable partial unification in type constructor inference") val Yvirtpatmat = BooleanSetting ("-Yvirtpatmat", "Enable pattern matcher virtualization") + val Youtline = BooleanSetting ("-Youtline", "Don't compile method bodies. Use together with `-Ystop-afer:pickler to generate the pickled signatures for all source files.").internalOnly() val exposeEmptyPackage = BooleanSetting ("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "method") + val YmacroClasspath = PathSetting ("-Ymacro-classpath", "The classpath used to reflectively load macro implementations, default is the compilation classpath.", "") val YaddBackendThreads = IntSetting ("-Ybackend-parallelism", "maximum worker threads for backend", 1, Some((1,16)), (x: String) => None ) val YmaxQueue = IntSetting ("-Ybackend-worker-queue", "backend threads worker queue size", 0, Some((0,1000)), (x: String) => None ) val YjarCompressionLevel = IntSetting("-Yjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION, Some((Deflater.DEFAULT_COMPRESSION,Deflater.BEST_COMPRESSION)), (x: String) => None) + val YpickleJava = BooleanSetting("-Ypickle-java", "Pickler phase should compute pickles for .java defined symbols for use by build tools").internalOnly() sealed abstract class CachePolicy(val name: String, val help: String) object CachePolicy { def setting(style: String, styleLong: String) = ChoiceSetting(s"-Ycache-$style-class-loader", "policy", s"Policy for caching class loaders for $styleLong that are dynamically loaded.", values.map(_.name), None.name, values.map(_.help)) object None extends CachePolicy("none", "Don't cache class loader") object LastModified extends CachePolicy("last-modified", "Cache class loader, using file last-modified time to invalidate") + object Always extends CachePolicy("always", "Cache class loader with no invalidation") // TODO Jorge to add new policy. Think about whether there is a benefit to the user on offering this as a separate policy or unifying with the previous one. // object ZipMetadata extends CachePolicy("zip-metadata", "Cache classloade, using file last-modified time, then ZIP file metadata to invalidate") - def values: List[CachePolicy] = List(None, LastModified) + def values: List[CachePolicy] = List(None, LastModified, Always) } object optChoices extends MultiChoiceEnumeration { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala index a8d673663e8d..19be00dd686a 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala @@ -25,11 +25,8 @@ import scala.tools.nsc.io.AbstractFile * @author Philippe Altherr * @version 1.0, 23/03/2004 */ -class AbstractFileReader(val file: AbstractFile) { - - /** the buffer containing the file - */ - val buf: Array[Byte] = file.toByteArray +class AbstractFileReader(val file: AbstractFile, val buf: Array[Byte]) { + def this(file: AbstractFile) = this(file, file.toByteArray) /** the current input pointer */ diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 935a100effe8..c855f1c11bb6 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -152,14 +152,21 @@ abstract class ClassfileParser { def parse(file: AbstractFile, clazz: ClassSymbol, module: ModuleSymbol): Unit = { this.file = file pushBusy(clazz) { - this.in = new AbstractFileReader(file) this.clazz = clazz this.staticModule = module this.isScala = false - parseHeader() - this.pool = newConstantPool - parseClass() + this.in = new AbstractFileReader(file) + val magic = in.getInt(in.bp) + if (magic != JAVA_MAGIC && file.name.endsWith(".sig")) { + currentClass = TermName(clazz.javaClassName) + isScala = true + unpickler.unpickle(in.buf, 0, clazz, staticModule, file.name) + } else { + parseHeader() + this.pool = newConstantPool + parseClass() + } } } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 947b95f57baa..1fd7690763e5 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -90,6 +90,8 @@ abstract class Pickler extends SubComponent { throw e } } + + override protected def shouldSkipThisPhaseForJava: Boolean = !settings.YpickleJava.value } private class Pickle(root: Symbol) extends PickleBuffer(new Array[Byte](4096), -1, 0) { @@ -213,7 +215,7 @@ abstract class Pickler extends SubComponent { // initially, but seems not to work, as the bug shows). // Adding the LOCAL_CHILD is necessary to retain exhaustivity warnings under separate // compilation. See test neg/aladdin1055. - val parents = (if (sym.isTrait) List(definitions.ObjectTpe) else Nil) ::: List(sym.tpe) + val parents = if (sym.isTrait) List(definitions.ObjectTpe, sym.tpe) else List(sym.tpe) globals + sym.newClassWithInfo(tpnme.LOCAL_CHILD, parents, EmptyScope, pos = sym.pos) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index b068e43d1ad4..bc5ffd0ccd7c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -112,11 +112,13 @@ trait Analyzer extends AnyRef try { val typer = newTyper(rootContext(unit)) unit.body = typer.typed(unit.body) - for (workItem <- unit.toCheck) workItem() - if (settings.warnUnusedImport) - warnUnusedImports(unit) - if (settings.warnUnused.isSetByUser) - new checkUnused(typer).apply(unit) + if (!settings.Youtline.value) { + for (workItem <- unit.toCheck) workItem() + if (settings.warnUnusedImport) + warnUnusedImports(unit) + if (settings.warnUnused.isSetByUser) + new checkUnused(typer).apply(unit) + } } finally { unit.toCheck.clear() diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 5b543c8f494b..5b970fe7e79e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -50,6 +50,7 @@ trait Contexts { self: Analyzer => val javaAndScalaList = JavaLangPackage :: ScalaPackage :: Nil val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil } + private lazy val NoJavaMemberFound = (NoType, NoSymbol) def ambiguousImports(imp1: ImportInfo, imp2: ImportInfo) = LookupAmbiguous(s"it is imported twice in the same scope by\n$imp1\nand $imp2") @@ -1024,7 +1025,7 @@ trait Contexts { self: Analyzer => imp.importedSymbol(name, requireExplicit, record) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false)) private[Contexts] def requiresQualifier(s: Symbol): Boolean = ( - s.owner.isClass + s.owner.isClass && !s.owner.isPackageClass && !s.isTypeParameterOrSkolem && !s.isExistentiallyBound @@ -1074,6 +1075,31 @@ trait Contexts { self: Analyzer => } } + final def javaFindMember(pre: Type, name: Name, qualifies: Symbol => Boolean): (Type, Symbol) = { + val sym = pre.member(name).filter(qualifies) + val preSym = pre.typeSymbol + if (sym.exists || preSym.isPackageClass || !preSym.isClass) (pre, sym) + else { + // In Java code, static innner classes, which we model as members of the companion object, + // can be referenced from an ident in a subclass or by a selection prefixed by the subclass. + val toSearch = if (preSym.isModuleClass) companionSymbolOf(pre.typeSymbol.sourceModule, this).baseClasses else preSym.baseClasses + toSearch.iterator.map { bc => + val pre1 = bc.typeOfThis + val found = pre1.decl(name) + found.filter(qualifies) match { + case NoSymbol => + val pre2 = companionSymbolOf(pre1.typeSymbol, this).typeOfThis + val found = pre2.decl(name).filter(qualifies) + found match { + case NoSymbol => NoJavaMemberFound + case sym => (pre2, sym) + } + case sym => (pre1, sym) + } + }.find(_._2 ne NoSymbol).getOrElse(NoJavaMemberFound) + } + } + } //class Context /** Find the symbol of a simple name starting from this context. @@ -1107,7 +1133,7 @@ trait Contexts { self: Analyzer => } ) def finishDefSym(sym: Symbol, pre0: Type): NameLookup = - if (thisContext.requiresQualifier(sym)) + if (!thisContext.unit.isJava && thisContext.requiresQualifier(sym)) finish(gen.mkAttributedQualifier(pre0), sym) else finish(EmptyTree, sym) @@ -1119,15 +1145,19 @@ trait Contexts { self: Analyzer => ) ) def lookupInPrefix(name: Name) = { - val sym = pre.member(name).filter(qualifies) - def isNonPackageNoModuleClass(sym: Symbol) = - sym.isClass && !sym.isModuleClass && !sym.isPackageClass - if (!sym.exists && thisContext.unit.isJava && isNonPackageNoModuleClass(pre.typeSymbol)) { - // TODO factor out duplication with Typer::inCompanionForJavaStatic - val pre1 = companionSymbolOf(pre.typeSymbol, thisContext).typeOfThis - pre1.member(name).filter(qualifies).andAlso(_ => pre = pre1) - } else sym + if (thisContext.unit.isJava) { + thisContext.javaFindMember(pre, name, qualifies) match { + case (_, NoSymbol) => + NoSymbol + case (pre1, sym) => + pre = pre1 + sym + } + } else { + pre.member(name).filter(qualifies) + } } + def accessibleInPrefix(s: Symbol) = thisContext.isAccessible(s, pre, superAccess = false) @@ -1237,8 +1267,7 @@ trait Contexts { self: Analyzer => } // At this point only one or the other of defSym and impSym might be set. - if (defSym.exists) - finishDefSym(defSym, pre) + if (defSym.exists) finishDefSym(defSym, pre) else if (impSym.exists) { // If we find a competitor imp2 which imports the same name, possible outcomes are: // diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 10382720089b..5d0e51cd2ea9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -64,49 +64,6 @@ trait Macros extends MacroRuntimes with Traces with Helpers { def globalSettings = global.settings - private final val macroClassLoadersCache = - new scala.tools.nsc.classpath.FileBasedCache[ScalaClassLoader]() - - /** Obtains a `ClassLoader` instance used for macro expansion. - * - * By default a new `ScalaClassLoader` is created using the classpath - * from global and the classloader of self as parent. - * - * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. - */ - protected def findMacroClassLoader(): ClassLoader = { - val classpath = global.classPath.asURLs - def newLoader = () => { - macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) - ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader) - } - - val disableCache = settings.YcacheMacroClassLoader.value == settings.CachePolicy.None.name - if (disableCache) newLoader() - else { - import scala.tools.nsc.io.Jar - import scala.reflect.io.{AbstractFile, Path} - - val urlsAndFiles = classpath.map(u => u -> AbstractFile.getURL(u)) - val hasNullURL = urlsAndFiles.filter(_._2 eq null) - if (hasNullURL.nonEmpty) { - // TODO if the only null is jrt:// we can still cache - // TODO filter out classpath elements pointing to non-existing files before we get here, that's another source of null - macroLogVerbose(s"macro classloader: caching is disabled because `AbstractFile.getURL` returned `null` for ${hasNullURL.map(_._1).mkString(", ")}.") - newLoader() - } else { - val locations = urlsAndFiles.map(t => Path(t._2.file)) - val nonJarZips = locations.filterNot(Jar.isJarOrZip(_)) - if (nonJarZips.nonEmpty) { - macroLogVerbose(s"macro classloader: caching is disabled because the following paths are not supported: ${nonJarZips.mkString(",")}.") - newLoader() - } else { - macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) - } - } - } - } - /** `MacroImplBinding` and its companion module are responsible for * serialization/deserialization of macro def -> impl bindings. * @@ -974,6 +931,11 @@ trait Macros extends MacroRuntimes with Traces with Helpers { }.transform(expandee) } +object Macros { + final val macroClassLoadersCache = + new scala.tools.nsc.classpath.FileBasedCache[ScalaClassLoader.URLClassLoader]() +} + trait MacrosStats { self: TypesStats with Statistics => val macroExpandCount = newCounter ("#macro expansions", "typer") diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1fa8add09001..002c098b1ae8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -131,7 +131,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * (`owner` tells where the type occurs). */ def privates[T <: Tree](typer: Typer, owner: Symbol, tree: T): T = - check(typer, owner, EmptyScope, WildcardType, tree) + if (owner.isJavaDefined) tree else check(typer, owner, EmptyScope, WildcardType, tree) private def check[T <: Tree](typer: Typer, owner: Symbol, scope: Scope, pt: Type, tree: T): T = { this.owner = owner @@ -554,7 +554,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * @return modified tree and new prefix type */ private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): (Tree, Type) = - if (context.isInPackageObject(sym, pre.typeSymbol)) { + if (!unit.isJava && context.isInPackageObject(sym, pre.typeSymbol)) { if (pre.typeSymbol == ScalaPackageClass && sym.isTerm) { // short cut some aliases. It seems pattern matching needs this // to notice exhaustiveness and to generate good code when @@ -671,16 +671,16 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - /** The member with given name of given qualifier tree */ - def member(qual: Tree, name: Name) = { + /** The member with given name of given qualifier type */ + def member(qual: Type, name: Name): Symbol = { def callSiteWithinClass(clazz: Symbol) = context.enclClass.owner hasTransOwner clazz - val includeLocals = qual.tpe match { + val includeLocals = qual match { case ThisType(clazz) if callSiteWithinClass(clazz) => true case SuperType(clazz, _) if callSiteWithinClass(clazz.typeSymbol) => true case _ => phase.next.erasedTypes } - if (includeLocals) qual.tpe member name - else qual.tpe nonLocalMember name + if (includeLocals) qual member name + else qual nonLocalMember name } def silent[T](op: Typer => T, @@ -1160,7 +1160,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def vanillaAdapt(tree: Tree) = { def applyPossible = { - def applyMeth = member(adaptToName(tree, nme.apply), nme.apply) + def applyMeth = member(adaptToName(tree, nme.apply).tpe, nme.apply) def hasPolymorphicApply = applyMeth.alternatives exists (_.tpe.typeParams.nonEmpty) def hasMonomorphicApply = applyMeth.alternatives exists (_.tpe.paramSectionCount > 0) @@ -1364,7 +1364,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * If no conversion is found, return `qual` unchanged. */ def adaptToName(qual: Tree, name: Name) = - if (member(qual, name) != NoSymbol) qual + if (member(qual.tpe, name) != NoSymbol) qual else adaptToMember(qual, HasMember(name)) private def validateNoCaseAncestor(clazz: Symbol) = { @@ -1759,7 +1759,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper checkStablePrefixClassType(parent) if (psym != superclazz) { - if (psym.isTrait) { + if (context.unit.isJava && context.unit.isJava && psym.isJavaAnnotation) { + // allowed + } else if (psym.isTrait) { val ps = psym.info.parents if (!ps.isEmpty && !superclazz.isSubClass(ps.head.typeSymbol)) pending += ParentSuperSubclassError(parent, superclazz, ps.head.typeSymbol, psym) @@ -1845,7 +1847,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass) checkEphemeral(clazz, impl2.body) - if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { + if (!clazz.isJavaDefined && (clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { if (!clazz.owner.isPackageClass) context.error(clazz.pos, "inner classes cannot be classfile annotations") // Ignore @SerialVersionUID, because it is special-cased and handled completely differently. @@ -2008,7 +2010,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (clazz.isTrait && hasSuperArgs(parents1.head)) ConstrArgsInParentOfTraitError(parents1.head, clazz) - if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel) + if (!clazz.isJavaDefined && (clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel) context.error(clazz.pos, "inner classes cannot be classfile annotations") if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members @@ -2073,7 +2075,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } // use typedValDef instead. this version is called after creating a new context for the ValDef - private def typedValDefImpl(vdef: ValDef) = { + private def typedValDefImpl(vdef: ValDef): ValDef = { val sym = vdef.symbol.initialize val typedMods = if (nme.isLocalName(sym.name) && sym.isPrivateThis && !vdef.mods.isPrivateLocal) { // scala/bug#10009 This tree has been given a field symbol by `enterGetterSetter`, patch up the @@ -3380,6 +3382,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!context.owner.isPackageClass) checkNoDoubleDefs(scope) + // Note that Java units don't have synthetics, but there's no point in making a special case (for performance or correctness), + // as we only type check Java units when running Scaladoc on Java sources. addSynthetics(stats1, scope) } } @@ -5009,11 +5013,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // For Java, instance and static members are in the same scope, but we put the static ones in the companion object // so, when we can't find a member in the class scope, check the companion - def inCompanionForJavaStatic(pre: Type, cls: Symbol, name: Name): Symbol = - if (!(context.unit.isJava && cls.isClass && !cls.isModuleClass)) NoSymbol else { - val companion = companionSymbolOf(cls, context) - if (!companion.exists) NoSymbol - else member(gen.mkAttributedRef(pre, companion), name) // assert(res.isStatic, s"inCompanionForJavaStatic($pre, $cls, $name) = $res ${res.debugFlagString}") + def inCompanionForJavaStatic(cls: Symbol, name: Name): Symbol = + if (!(context.unit.isJava && cls.isClass)) NoSymbol else { + context.javaFindMember(cls.typeOfThis, name, _ => true)._2 } /* Attribute a selection where `tree` is `qual.name`. @@ -5032,7 +5034,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper wrapErrors(t, (_.typed1(t, mode, pt))) } - val sym = tree.symbol orElse member(qual, name) orElse inCompanionForJavaStatic(qual.tpe.prefix, qual.symbol, name) + val sym = tree.symbol orElse member(qual.tpe, name) orElse inCompanionForJavaStatic(qual.symbol, name) if ((sym eq NoSymbol) && name != nme.CONSTRUCTOR && mode.inAny(EXPRmode | PATTERNmode)) { // symbol not found? --> try to convert implicitly to a type that does have the required // member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an @@ -5149,7 +5151,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (name.isTypeName) { val qualTyped = typedTypeSelectionQualifier(tree.qualifier, WildcardType) val qualStableOrError = - if (qualTyped.isErrorTyped || treeInfo.admitsTypeSelection(qualTyped)) qualTyped + if (qualTyped.isErrorTyped || unit.isJava || treeInfo.admitsTypeSelection(qualTyped)) qualTyped else UnstableTreeError(qualTyped) typedSelect(tree, qualStableOrError, name) } else { @@ -5203,6 +5205,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } // ignore current variable scope in patterns to enforce linearity val startContext = if (mode.typingPatternOrTypePat) context.outer else context + + def asTypeName = if (mode.inAll(MonoQualifierModes) && unit.isJava && name.isTermName) { + startContext.lookupSymbol(name.toTypeName, qualifies).symbol + } else NoSymbol + val nameLookup = tree.symbol match { case NoSymbol => startContext.lookupSymbol(name, qualifies) case sym => LookupSucceeded(EmptyTree, sym) @@ -5212,7 +5219,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case LookupAmbiguous(msg) => issue(AmbiguousIdentError(tree, name, msg)) case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg)) case LookupNotFound => - inEmptyPackage orElse lookupInRoot(name) match { + asTypeName orElse inEmptyPackage orElse lookupInRoot(name) match { case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext)) case sym => typed1(tree setSymbol sym, mode, pt) } @@ -5939,7 +5946,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper final def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = { lookupTransformed(tree) match { case Some(tree1) => tree1 - case _ => typed(tree, mode, pt) + case _ => if (settings.Youtline.value) EmptyTree else typed(tree, mode, pt) } } final def lookupTransformed(tree: Tree): Option[Tree] = diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala index e1cf834c6fb6..2efd699e9f44 100644 --- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala +++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala @@ -25,18 +25,14 @@ import scala.tools.nsc.typechecker.Analyzer class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val rootClassLoader: ClassLoader) extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable { - override lazy val analyzer = new { - val global: ReflectGlobal.this.type = ReflectGlobal.this - } with Analyzer { - /** Obtains the classLoader used for runtime macro expansion. - * - * Macro expansion can use everything available in [[global.classPath]] or [[rootClassLoader]]. - * The [[rootClassLoader]] is used to obtain runtime defined macros. - */ - override protected def findMacroClassLoader(): ClassLoader = { - val classpath = global.classPath.asURLs - ScalaClassLoader.fromURLs(classpath, rootClassLoader) - } + /** Obtains the classLoader used for runtime macro expansion. + * + * Macro expansion can use everything available in `global.classPath` or `rootClassLoader`. + * The `rootClassLoader` is used to obtain runtime defined macros. + */ + override protected[scala] def findMacroClassLoader(): ClassLoader = { + val classpath = classPath.asURLs + perRunCaches.recordClassloader(ScalaClassLoader.fromURLs(classpath, rootClassLoader)) } override def transformedType(sym: Symbol) = diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala index 3abd5f390761..a290c6bfafc8 100644 --- a/src/compiler/scala/tools/reflect/ReflectMain.scala +++ b/src/compiler/scala/tools/reflect/ReflectMain.scala @@ -14,15 +14,13 @@ package scala.tools package reflect import scala.reflect.internal.util.ScalaClassLoader -import scala.tools.nsc.Driver -import scala.tools.nsc.Global -import scala.tools.nsc.Settings +import scala.tools.nsc.{Driver, Global, CloseableRegistry, Settings} import scala.tools.util.PathResolver object ReflectMain extends Driver { private def classloaderFromSettings(settings: Settings) = { - val classPathURLs = new PathResolver(settings).resultAsURLs + val classPathURLs = new PathResolver(settings, new CloseableRegistry).resultAsURLs ScalaClassLoader.fromURLs(classPathURLs, getClass.getClassLoader) } diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index 1ad471e40f8b..cf454d5854f8 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -15,8 +15,9 @@ package tools package util import java.net.URL + import scala.tools.reflect.WrappedProperties.AccessControl -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import scala.tools.nsc.util.ClassPath import scala.reflect.io.{Directory, File, Path} import PartialFunction.condOpt @@ -189,19 +190,24 @@ object PathResolver { } else { val settings = new Settings() val rest = settings.processArguments(args.toList, processAll = false)._2 - val pr = new PathResolver(settings) - println("COMMAND: 'scala %s'".format(args.mkString(" "))) - println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) - - pr.result match { - case cp: AggregateClassPath => - println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") + val registry = new CloseableRegistry + try { + val pr = new PathResolver(settings, registry) + println("COMMAND: 'scala %s'".format(args.mkString(" "))) + println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) + + pr.result match { + case cp: AggregateClassPath => + println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") + } + } finally { + registry.close() } } } -final class PathResolver(settings: Settings) { - private val classPathFactory = new ClassPathFactory(settings) +final class PathResolver(settings: Settings, closeableRegistry: CloseableRegistry) { + private val classPathFactory = new ClassPathFactory(settings, closeableRegistry) import PathResolver.{ AsLines, Defaults, ppcp } @@ -250,7 +256,7 @@ final class PathResolver(settings: Settings) { // Assemble the elements! def basis = List[Traversable[ClassPath]]( - JrtClassPath.apply(settings.releaseValue), // 0. The Java 9 classpath (backed by the jrt:/ virtual system, if available) + jrt, // 0. The Java 9+ classpath (backed by the ct.sym or jrt:/ virtual system, if available) classesInPath(javaBootClassPath), // 1. The Java bootstrap class path. contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path. classesInExpandedPath(javaUserClassPath), // 3. The Java application class path. @@ -261,6 +267,8 @@ final class PathResolver(settings: Settings) { sourcesInPath(sourcePath) // 7. The Scala source path. ) + private def jrt: Option[ClassPath] = JrtClassPath.apply(settings.releaseValue, closeableRegistry) + lazy val containers = basis.flatten.distinct override def toString = s""" diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala index b016778bf428..309a6d49c482 100644 --- a/src/partest-extras/scala/tools/partest/BytecodeTest.scala +++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala @@ -18,6 +18,7 @@ import scala.tools.asm.tree._ import java.io.{InputStream, File => JFile} import AsmNode._ +import scala.tools.nsc.CloseableRegistry /** * Provides utilities for inspecting bytecode using ASM library. @@ -144,7 +145,7 @@ abstract class BytecodeTest { import scala.tools.nsc.Settings // logic inspired by scala.tools.util.PathResolver implementation // `Settings` is used to check YdisableFlatCpCaching in ZipArchiveFlatClassPath - val factory = new ClassPathFactory(new Settings()) + val factory = new ClassPathFactory(new Settings(), new CloseableRegistry) val containers = factory.classesInExpandedPath(sys.props("partest.output") + java.io.File.pathSeparator + Defaults.javaUserClassPath) new AggregateClassPath(containers) } diff --git a/src/reflect/mima-filters/2.12.0.backwards.excludes b/src/reflect/mima-filters/2.12.0.backwards.excludes index ffa7f91a7eb8..ed9dc507eea4 100644 --- a/src/reflect/mima-filters/2.12.0.backwards.excludes +++ b/src/reflect/mima-filters/2.12.0.backwards.excludes @@ -14,3 +14,5 @@ ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats") ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats$") ProblemFilters.exclude[MissingTypesProblem]("scala.reflect.runtime.JavaUniverse") + +ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.io.ZipArchive.close") diff --git a/src/reflect/mima-filters/2.12.0.forwards.excludes b/src/reflect/mima-filters/2.12.0.forwards.excludes index ee7ce7fb19e8..0f3b81cd3cc7 100644 --- a/src/reflect/mima-filters/2.12.0.forwards.excludes +++ b/src/reflect/mima-filters/2.12.0.forwards.excludes @@ -24,4 +24,11 @@ ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.Settin ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.this") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.this") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.getDir") -ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.allDirsByDottedName") \ No newline at end of file +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.allDirsByDottedName") + +ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.RootPath") +ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.RootPath$") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.URLZipArchive.close") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.close") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ManifestResources.close") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.close") \ No newline at end of file diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 31a54e35f4d1..b27bca907b08 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1236,7 +1236,7 @@ trait Definitions extends api.StandardDefinitions { lazy val AnnotationDefaultAttr: ClassSymbol = { val sym = RuntimePackageClass.newClassSymbol(tpnme.AnnotationDefaultATTR, NoPosition, 0L) - sym setInfo ClassInfoType(List(AnnotationClass.tpe), newScope, sym) + sym setInfo ClassInfoType(List(StaticAnnotationClass.tpe), newScope, sym) markAllCompleted(sym) RuntimePackageClass.info.decls.toList.filter(_.name == sym.name) match { case existing :: _ => diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index aba70c2449f6..fe18347d15a7 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -14,6 +14,8 @@ package scala package reflect package internal +import java.net.URLClassLoader + import scala.annotation.elidable import scala.collection.mutable import util._ @@ -429,6 +431,22 @@ abstract class SymbolTable extends macros.Universe cache } + /** Closes the provided classloader at the conclusion of this Run */ + final def recordClassloader(loader: ClassLoader): ClassLoader = { + def attemptClose(loader: ClassLoader): Unit = { + loader match { + case u: URLClassLoader => debuglog("Closing classloader " + u); u.close() + case _ => + } + } + caches ::= new WeakReference((new Clearable { + def clear(): Unit = { + attemptClose(loader) + } + })) + loader + } + /** * Removes a cache from the per-run caches. This is useful for testing: it allows running the * compiler and then inspect the state of a cache. diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 7b82aa3e9f24..c1fc858cef11 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -249,6 +249,9 @@ abstract class UnPickler { else NoSymbol } + if (owner == definitions.ScalaPackageClass && name == tpnme.AnyRef) + return definitions.AnyRefClass + // (1) Try name. localDummy orElse fromName(name) orElse { // (2) Try with expanded name. Can happen if references to private diff --git a/src/reflect/scala/reflect/io/RootPath.scala b/src/reflect/scala/reflect/io/RootPath.scala new file mode 100644 index 000000000000..6634d323481e --- /dev/null +++ b/src/reflect/scala/reflect/io/RootPath.scala @@ -0,0 +1,51 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.reflect.io + +import java.io.Closeable +import java.nio +import java.nio.file.{FileSystems, Files} + + +abstract class RootPath extends Closeable { + def root: nio.file.Path +} + +object RootPath { + def apply(path: nio.file.Path, writable: Boolean): RootPath = { + if (path.getFileName.toString.endsWith(".jar")) { + import java.net.URI + val zipFile = URI.create("jar:file:" + path.toUri.getPath) + val env = new java.util.HashMap[String, String]() + if (!Files.exists(path.getParent)) + Files.createDirectories(path.getParent) + if (writable) { + env.put("create", "true") + if (Files.exists(path)) + Files.delete(path) + } + val zipfs = FileSystems.newFileSystem(zipFile, env) + new RootPath { + def root = zipfs.getRootDirectories.iterator().next() + def close(): Unit = { + zipfs.close() + } + } + } else { + new RootPath { + override def root: nio.file.Path = path + override def close(): Unit = () + } + } + } +} \ No newline at end of file diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index ee109799f3d5..05c591b9d535 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -149,6 +149,7 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext if (entry.isDirectory) ensureDir(dirs, entry.getName, entry) else ensureDir(dirs, dirName(entry.getName), null) } + def close(): Unit } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArchive(file, release) { @@ -232,6 +233,7 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch } } finally { if (ZipArchive.closeZipFile) zipFile.close() + else closeables ::= zipFile } root } @@ -259,6 +261,10 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch case x: FileZipArchive => file.getAbsoluteFile == x.file.getAbsoluteFile case _ => false } + private[this] var closeables: List[java.io.Closeable] = Nil + override def close(): Unit = { + closeables.foreach(_.close) + } } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ final class URLZipArchive(val url: URL) extends ZipArchive(null) { @@ -266,6 +272,7 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) { val root = new DirEntry("/") val dirs = mutable.HashMap[String, DirEntry]("" -> root) val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input))) + closeables ::= in @tailrec def loop() { val zipEntry = in.getNextEntry() @@ -327,6 +334,10 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) { case x: URLZipArchive => url == x.url case _ => false } + private[this] var closeables: List[java.io.Closeable] = Nil + def close(): Unit = { + closeables.foreach(_.close()) + } } final class ManifestResources(val url: URL) extends ZipArchive(null) { @@ -334,6 +345,8 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) { val root = new DirEntry("/") val dirs = mutable.HashMap[String, DirEntry]("" -> root) val manifest = new Manifest(input) + closeables ::= input + val iter = manifest.getEntries().keySet().iterator().asScala.filter(_.endsWith(".class")).map(new ZipEntry(_)) for (zipEntry <- iter) { @@ -385,4 +398,8 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) { } } } + private[this] var closeables: List[java.io.Closeable] = Nil + override def close(): Unit = { + closeables.foreach(_.close()) + } } diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 764bb4d48543..73cbc828eea4 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -28,6 +28,7 @@ import scala.tools.nsc.util._ import ScalaClassLoader.URLClassLoader import scala.tools.nsc.util.Exceptional.unwrap import java.net.URL +import java.io.Closeable import scala.tools.util.PathResolver import scala.util.{Try => Trying} @@ -63,7 +64,7 @@ import scala.util.{Try => Trying} * @author Moez A. Abdel-Gawad * @author Lex Spoon */ -class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports with PresentationCompilation { +class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports with PresentationCompilation with Closeable { imain => def this(initialSettings: Settings) = this(initialSettings, IMain.defaultOut) @@ -100,7 +101,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def compilerClasspath: Seq[java.net.URL] = ( if (isInitializeComplete) global.classPath.asURLs - else new PathResolver(settings).resultAsURLs // the compiler's classpath + else new PathResolver(settings, global.closeableRegistry).resultAsURLs // the compiler's classpath ) def settings = initialSettings // Run the code body with the given boolean settings flipped to true. @@ -683,6 +684,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends */ def close() { reporter.flush() + if (isInitializeComplete) { + global.close() + } } /** Here is where we: diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index 106e649ac69f..7a601ab65750 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -16,7 +16,7 @@ import scala.reflect.internal.util.RangePosition import scala.reflect.io.AbstractFile import scala.tools.nsc.backend.JavaPlatform import scala.tools.nsc.util.ClassPath -import scala.tools.nsc.{interactive, Settings} +import scala.tools.nsc.{interactive, CloseableRegistry, Settings} import scala.tools.nsc.reporters.StoreReporter import scala.tools.nsc.classpath._ @@ -63,10 +63,6 @@ trait PresentationCompilation { * You may downcast the `reporter` to `StoreReporter` to access type errors. */ def newPresentationCompiler(): interactive.Global = { - def mergedFlatClasspath = { - val replOutClasspath = ClassPathFactory.newClassPath(replOutput.dir, settings) - AggregateClassPath(replOutClasspath :: global.platform.classPath :: Nil) - } def copySettings: Settings = { val s = new Settings(_ => () /* ignores "bad option -nc" errors, etc */) s.processArguments(global.settings.recreateArgs, processAll = false) @@ -75,6 +71,11 @@ trait PresentationCompilation { } val storeReporter: StoreReporter = new StoreReporter val interactiveGlobal = new interactive.Global(copySettings, storeReporter) { self => + def mergedFlatClasspath = { + val replOutClasspath = ClassPathFactory.newClassPath(replOutput.dir, settings, closeableRegistry) + AggregateClassPath(replOutClasspath :: global.platform.classPath :: Nil) + } + override lazy val platform: ThisPlatform = { new JavaPlatform { lazy val global: self.type = self diff --git a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala index 1273d6ac92fb..72b5a7424ceb 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala @@ -30,16 +30,11 @@ trait ReplGlobal extends Global { super.abort(msg) } - override lazy val analyzer = new { - val global: ReplGlobal.this.type = ReplGlobal.this - } with Analyzer { - - override protected def findMacroClassLoader(): ClassLoader = { - val loader = super.findMacroClassLoader - macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(global.classPath.asURLs)) - val virtualDirectory = globalSettings.outputDirs.getSingleOutput.get - new util.AbstractFileClassLoader(virtualDirectory, loader) {} - } + override protected[scala] def findMacroClassLoader(): ClassLoader = { + val loader = super.findMacroClassLoader + analyzer.macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(classPath.asURLs)) + val virtualDirectory = analyzer.globalSettings.outputDirs.getSingleOutput.get + new util.AbstractFileClassLoader(virtualDirectory, loader) {} } override def optimizerClassPath(base: ClassPath): ClassPath = { @@ -47,7 +42,7 @@ trait ReplGlobal extends Global { case None => base case Some(out) => // Make bytecode of previous lines available to the inliner - val replOutClasspath = ClassPathFactory.newClassPath(settings.outputDirs.getSingleOutput.get, settings) + val replOutClasspath = ClassPathFactory.newClassPath(settings.outputDirs.getSingleOutput.get, settings, closeableRegistry) AggregateClassPath.createAggregate(platform.classPath, replOutClasspath) } } diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala index 42b0fdfb236a..5e3d633d429f 100644 --- a/src/scalap/scala/tools/scalap/Main.scala +++ b/src/scalap/scala/tools/scalap/Main.scala @@ -14,8 +14,9 @@ package scala package tools.scalap import java.io.{ByteArrayOutputStream, OutputStreamWriter, PrintStream} + import scala.reflect.NameTransformer -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import scala.tools.nsc.classpath.{AggregateClassPath, ClassPathFactory} import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver @@ -185,14 +186,18 @@ object Main extends Main { settings.YdisableFlatCpCaching.value = arguments contains opts.disableFlatClassPathCaching settings.Ylogcp.value = arguments contains opts.logClassPath - val path = createClassPath(cpArg, settings) - - // print the classpath if output is verbose - if (verbose) - Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path.asClassPathString) - - // process all given classes - arguments.getOthers foreach process(arguments, path) + val registry = new CloseableRegistry + try { + val path = createClassPath(cpArg, settings, registry) + // print the classpath if output is verbose + if (verbose) + Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path.asClassPathString) + + // process all given classes + arguments.getOthers foreach process(arguments, path) + } finally { + registry.close() + } } private def parseArguments(args: Array[String]) = @@ -208,11 +213,11 @@ object Main extends Main { .withOption(opts.logClassPath) .parse(args) - private def createClassPath(cpArg: Option[String], settings: Settings) = cpArg match { + private def createClassPath(cpArg: Option[String], settings: Settings, closeableRegistry: CloseableRegistry) = cpArg match { case Some(cp) => - AggregateClassPath(new ClassPathFactory(settings).classesInExpandedPath(cp)) + AggregateClassPath(new ClassPathFactory(settings, closeableRegistry).classesInExpandedPath(cp)) case _ => settings.classpath.value = "." // include '.' in the default classpath scala/bug#6669 - new PathResolver(settings).result + new PathResolver(settings, closeableRegistry).result } } diff --git a/test/files/pos/java-inherited-type/Client.scala b/test/files/pos/java-inherited-type/Client.scala new file mode 100644 index 000000000000..a644363cdd4c --- /dev/null +++ b/test/files/pos/java-inherited-type/Client.scala @@ -0,0 +1,19 @@ +object Client { + def test= { + Test.Outer.Nested.sig + Test.Outer.Nested.sig1 + Test.Outer.Nested.sig2 + val o = new Test.Outer + new o.Nested1().sig + new o.Nested1().sig1 + new o.Nested1().sig2 + } + + def test1 = { + val t = new Test + val o = new t.Outer1 + new o.Nested1().sig + new o.Nested1().sig1 + new o.Nested1().sig2 + } +} diff --git a/test/files/pos/java-inherited-type/Test.java b/test/files/pos/java-inherited-type/Test.java new file mode 100644 index 000000000000..ae89a6559a67 --- /dev/null +++ b/test/files/pos/java-inherited-type/Test.java @@ -0,0 +1,30 @@ +public class Test { + static class OuterBase implements OuterBaseInterface { + static class StaticInner {} + class Inner {} + } + interface OuterBaseInterface { + interface InnerFromInterface {} + } + public static class Outer extends OuterBase { + public static class Nested { + public static P sig; // was: "type StaticInner", "not found: type Inner", "not found: type InnerFromInterface" + public static P sig1; // was: "type StaticInner is not a member of Test.Outer" + public static P sig2; + + } + public class Nested1 { + public P sig; // was: "not found: type StaticInner" + public P sig1; // was: "type StaticInner is not a member of Test.Outer" + public P sig2; + } + } + public class Outer1 extends OuterBase { + public class Nested1 { + public P sig; // was: "not found: type StaticInner" + public P sig1; // was: "type StaticInner is not a member of Test.Outer" + public P sig2; + } + } + public static class P{} +} diff --git a/test/files/pos/java-inherited-type1/J.java b/test/files/pos/java-inherited-type1/J.java new file mode 100644 index 000000000000..ba9963104699 --- /dev/null +++ b/test/files/pos/java-inherited-type1/J.java @@ -0,0 +1,9 @@ +class J extends S { + // These references all work in Javac because `object O { class I }` erases to `O$I` + + void select1(S1.Inner1 i) { new S1.Inner1(); } + void ident(Inner i) {} + + void ident1(Inner1 i) {} + void select(S.Inner i) { new S.Inner(); } +} diff --git a/test/files/pos/java-inherited-type1/S.scala b/test/files/pos/java-inherited-type1/S.scala new file mode 100644 index 000000000000..155efc0e06ba --- /dev/null +++ b/test/files/pos/java-inherited-type1/S.scala @@ -0,0 +1,9 @@ +class S extends S1 +object S { + class Inner +} + +class S1 +object S1 { + class Inner1 +} diff --git a/test/files/pos/java-inherited-type1/Test.scala b/test/files/pos/java-inherited-type1/Test.scala new file mode 100644 index 000000000000..75a171b592af --- /dev/null +++ b/test/files/pos/java-inherited-type1/Test.scala @@ -0,0 +1,8 @@ +object Test { + val j = new J + // force completion of these signatures + j.ident(null); + j.ident1(null); + j.select(null); + j.select1(null); +} diff --git a/test/files/run/t10490-2.check b/test/files/run/t10490-2.check new file mode 100644 index 000000000000..473ecde25dba --- /dev/null +++ b/test/files/run/t10490-2.check @@ -0,0 +1 @@ +Foo$Bar was instantiated! diff --git a/test/files/run/t10490-2/JavaClass.java b/test/files/run/t10490-2/JavaClass.java new file mode 100644 index 000000000000..9774c05a0d91 --- /dev/null +++ b/test/files/run/t10490-2/JavaClass.java @@ -0,0 +1,4 @@ +public class JavaClass { + // This is defined in ScalaClass + public static final Foo.Bar bar = new Foo.Bar(); +} \ No newline at end of file diff --git a/test/files/run/t10490-2/ScalaClass.scala b/test/files/run/t10490-2/ScalaClass.scala new file mode 100644 index 000000000000..0528133cbf2c --- /dev/null +++ b/test/files/run/t10490-2/ScalaClass.scala @@ -0,0 +1,18 @@ +/* Similar to t10490 -- but defines `Foo` in the object. + * Placing this test within t10490 makes it work without a fix, that's why it's independent. + * Note that this was already working, we add it to make sure we don't regress + */ + +class Foo +object Foo { + class Bar { + override def toString: String = "Foo$Bar was instantiated!" + } +} + +object Test { + def main(args: Array[String]): Unit = { + // JavaClass is the user of the Scala defined classes + println(JavaClass.bar) + } +} \ No newline at end of file diff --git a/test/files/run/t10490.check b/test/files/run/t10490.check new file mode 100644 index 000000000000..473ecde25dba --- /dev/null +++ b/test/files/run/t10490.check @@ -0,0 +1 @@ +Foo$Bar was instantiated! diff --git a/test/files/run/t10490/JavaClass.java b/test/files/run/t10490/JavaClass.java new file mode 100644 index 000000000000..08b9e0bd55d4 --- /dev/null +++ b/test/files/run/t10490/JavaClass.java @@ -0,0 +1,4 @@ +public class JavaClass { + // This is defined in ScalaClass + public static final Foo.Bar bar = (new Foo()).new Bar(); +} \ No newline at end of file diff --git a/test/files/run/t10490/ScalaClass.scala b/test/files/run/t10490/ScalaClass.scala new file mode 100644 index 000000000000..da3c682b5033 --- /dev/null +++ b/test/files/run/t10490/ScalaClass.scala @@ -0,0 +1,13 @@ +class Foo { + class Bar { + override def toString: String = "Foo$Bar was instantiated!" + } +} + +object Test { + def main(args: Array[String]): Unit = { + // JavaClass is the user of the Scala defined classes + println(JavaClass.bar) + //println(JavaClass.baz) + } +} \ No newline at end of file diff --git a/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala b/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala new file mode 100644 index 000000000000..500379706090 --- /dev/null +++ b/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala @@ -0,0 +1,72 @@ +package scala.tools.nsc + +import org.junit.{Assert, Test} +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.reflect.internal.util.{AbstractFileClassLoader, NoSourceFile} +import scala.reflect.io.{Path, VirtualDirectory} +import scala.tools.nsc.plugins.{Plugin, PluginComponent} + +@RunWith(classOf[JUnit4]) +class GlobalCustomizeClassloaderTest { + // Demonstrate extension points to customise creation of the classloaders used to load compiler + // plugins and macro implementations. + // + // A use case could be for a build tool to take control of caching of these classloaders in a way + // that properly closes them before one of the elements needs to be overwritten. + @Test def test(): Unit = { + val g = new Global(new Settings) { + override protected[scala] def findMacroClassLoader(): ClassLoader = getClass.getClassLoader + override protected def findPluginClassLoader(classpath: Seq[Path]): ClassLoader = { + val d = new VirtualDirectory("", None) + val xml = d.fileNamed("scalac-plugin.xml") + val out = xml.bufferedOutput + out.write( + s""" + |sample-plugin + |${classOf[SamplePlugin].getName} + | + |""".stripMargin.getBytes()) + out.close() + new AbstractFileClassLoader(d, getClass.getClassLoader) + } + } + g.settings.usejavacp.value = true + g.settings.plugin.value = List("sample") + new g.Run + assert(g.settings.log.value == List("typer")) + + val unit = new g.CompilationUnit(NoSourceFile) + val context = g.analyzer.rootContext(unit) + val typer = g.analyzer.newTyper(context) + import g._ + SampleMacro.data = "in this classloader" + val typed = typer.typed(q"scala.tools.nsc.SampleMacro.m") + assert(!reporter.hasErrors) + typed match { + case Typed(Literal(Constant(s: String)), _) => Assert.assertEquals(SampleMacro.data, s) + case _ => Assert.fail() + } + g.close() + } +} + +object SampleMacro { + var data: String = _ + import language.experimental.macros + import scala.reflect.macros.blackbox.Context + def m: String = macro impl + def impl(c: Context): c.Tree = c.universe.Literal(c.universe.Constant(data)) +} + +class SamplePlugin(val global: Global) extends Plugin { + override val name: String = "sample" + override val description: String = "sample" + override val components: List[PluginComponent] = Nil + override def init(options: List[String], error: String => Unit): Boolean = { + val result = super.init(options, error) + global.settings.log.value = List("typer") + result + } +} diff --git a/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala b/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala index b46677d6d477..fdc2b9caae69 100644 --- a/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala +++ b/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala @@ -8,7 +8,7 @@ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import scala.tools.nsc.backend.jvm.AsmUtils import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver @@ -19,14 +19,15 @@ class JrtClassPathTest { @Test def lookupJavaClasses(): Unit = { val specVersion = scala.util.Properties.javaSpecVersion // Run the test using the JDK8 or 9 provider for rt.jar depending on the platform the test is running on. + val closeableRegistry = new CloseableRegistry val cp: ClassPath = if (specVersion == "" || specVersion == "1.8") { val settings = new Settings() - val resolver = new PathResolver(settings) - val elements = new ClassPathFactory(settings).classesInPath(resolver.Calculated.javaBootClassPath) + val resolver = new PathResolver(settings, closeableRegistry) + val elements = new ClassPathFactory(settings, closeableRegistry).classesInPath(resolver.Calculated.javaBootClassPath) AggregateClassPath(elements) } - else JrtClassPath(None).get + else JrtClassPath(None, closeableRegistry).get assertEquals(Nil, cp.classes("")) assertTrue(cp.packages("java").toString, cp.packages("java").exists(_.name == "java.lang")) @@ -37,5 +38,7 @@ class JrtClassPathTest { assertTrue(cp.list("java.lang").classesAndSources.exists(_.name == "Object")) assertTrue(cp.findClass("java.lang.Object").isDefined) assertTrue(cp.findClassFile("java.lang.Object").isDefined) + + closeableRegistry.close() } } diff --git a/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala b/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala index d3d4289d8b94..e8025ec69ec5 100644 --- a/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala +++ b/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala @@ -4,13 +4,15 @@ package scala.tools.nsc.classpath import java.io.File + import org.junit.Assert._ import org.junit._ import org.junit.rules.TemporaryFolder import org.junit.runner.RunWith import org.junit.runners.JUnit4 + import scala.tools.nsc.util.ClassPath -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import scala.tools.util.PathResolver @RunWith(classOf[JUnit4]) @@ -57,7 +59,7 @@ class PathResolverBaseTest { def deleteTempDir: Unit = tempDir.delete() private def createFlatClassPath(settings: Settings) = - new PathResolver(settings).result + new PathResolver(settings, new CloseableRegistry).result @Test def testEntriesFromListOperationAgainstSeparateMethods: Unit = { diff --git a/test/junit/scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala b/test/junit/scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala index f49f04d2c56d..b58effbcfa36 100644 --- a/test/junit/scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala +++ b/test/junit/scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala @@ -14,7 +14,8 @@ class ZipAndJarFileLookupFactoryTest { Files.delete(f) val g = new scala.tools.nsc.Global(new scala.tools.nsc.Settings()) assert(!g.settings.YdisableFlatCpCaching.value) // we're testing with our JAR metadata caching enabled. - def createCp = ZipAndJarClassPathFactory.create(AbstractFile.getFile(f.toFile), g.settings) + val closeableRegistry = new CloseableRegistry + def createCp = ZipAndJarClassPathFactory.create(AbstractFile.getFile(f.toFile), g.settings, closeableRegistry) try { createZip(f, Array(), "p1/C.class") createZip(f, Array(), "p2/X.class") @@ -41,7 +42,10 @@ class ZipAndJarFileLookupFactoryTest { // And that instance should see D, not C, in package p1. assert(cp3.findClass("p1.C").isEmpty) assert(cp3.findClass("p1.D").isDefined) - } finally Files.delete(f) + } finally { + Files.delete(f) + closeableRegistry.close() + } } def createZip(zipLocation: Path, content: Array[Byte], internalPath: String): Unit = { diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala index cbd5634f292f..e2b11cfecd29 100644 --- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala +++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala @@ -36,7 +36,7 @@ class SymbolTableForUnitTesting extends SymbolTable { def platformPhases: List[SubComponent] = Nil - private[nsc] lazy val classPath: ClassPath = new PathResolver(settings).result + private[nsc] lazy val classPath: ClassPath = new PathResolver(settings, new CloseableRegistry).result def isMaybeBoxed(sym: Symbol): Boolean = ??? def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = ??? diff --git a/test/scaladoc/resources/t11365.java b/test/scaladoc/resources/t11365.java new file mode 100644 index 000000000000..ea5edf9f8d38 --- /dev/null +++ b/test/scaladoc/resources/t11365.java @@ -0,0 +1,18 @@ +/** + * A package header + */ +package test.scaladoc; + +/** + * Testing java comments don't flag Scala specific errors + */ +public class JavaComments { + static @interface Annot { + } + + private class Route {} + final java.util.List routes = null; + + abstract class AnnotImpl implements Annot {} + +} diff --git a/test/scaladoc/run/t11365.check b/test/scaladoc/run/t11365.check new file mode 100644 index 000000000000..619c56180bb9 --- /dev/null +++ b/test/scaladoc/run/t11365.check @@ -0,0 +1 @@ +Done. diff --git a/test/scaladoc/run/t11365.scala b/test/scaladoc/run/t11365.scala new file mode 100644 index 000000000000..2de3844fab94 --- /dev/null +++ b/test/scaladoc/run/t11365.scala @@ -0,0 +1,18 @@ +import scala.tools.nsc.doc.Universe +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocJavaModelTest + +object Test extends ScaladocJavaModelTest { + + override def resourceFile = "t11365.java" + override def scaladocSettings = "" + + def testModel(rootPackage: Package) = { + import access._ + val Tag = ":marker:" + + val base = rootPackage._package("test")._package("scaladoc") + val clazz = base._class("JavaComments") + // Just testing that we haven't hit a compiler error. + } +}