diff --git a/.gitignore b/.gitignore index 32a1665721af..20d700dd1282 100644 --- a/.gitignore +++ b/.gitignore @@ -39,9 +39,9 @@ # eclipse, intellij /.classpath /.project -/src/intellij/*.iml -/src/intellij/*.ipr -/src/intellij/*.iws +/src/intellij*/*.iml +/src/intellij*/*.ipr +/src/intellij*/*.iws **/.cache /.idea /.settings diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 000000000000..e90fc35267a2 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,20 @@ +# this builds the spec using jekyll +# based on http://www.paperplanes.de/2013/8/13/deploying-your-jekyll-blog-to-s3-with-travis-ci.html +language: ruby +rvm: + - 1.9.3 +script: bundle exec jekyll build -s spec/ -d build/spec +install: bundle install + +# https://gist.github.com/kzap/5819745, http://docs.travis-ci.com/user/travis-pro/ +env: + - secure: "WWU490z7DWAI8MidMyTE+i+Ppgjg46mdr7PviF6P6ulrPlRRKOtKXpLvzgJoQmluwzEK6/+iH7D5ybCUYMLdKkQM9kSqaXJ0jeqjOelaaa1LmuOQ8IbuT8O9DwHzjjp/n4Lj/KRvvN4nGxCMI7HLla4gunvPA7M6WK7FA+YKCOU=" # set PRIV_KEY_SECRET to password used to encrypt spec/id_dsa_travis.enc + +# using S3 would be simpler, but we want to upload to scala-lang.org +# after_success: bundle exec s3_website push --headless +# the key is restricted using forced commands so that it can only upload to the directory we need here +after_success: + - openssl aes-256-cbc -pass "pass:$PRIV_KEY_SECRET" -in spec/id_dsa_travis.enc -out spec/id_dsa_travis -d -a + - chmod 600 spec/id_dsa_travis + - eval "$(ssh-agent)" + - '[ "${TRAVIS_PULL_REQUEST}" = "false" ] && ssh-add -D && ssh-add spec/id_dsa_travis && rsync -e "ssh -o StrictHostKeyChecking=no" -rzv build/spec/ scalatest@chara.epfl.ch:/home/linuxsoft/archives/scala/spec/2.11/' \ No newline at end of file diff --git a/Gemfile b/Gemfile new file mode 100644 index 000000000000..53924a4381bd --- /dev/null +++ b/Gemfile @@ -0,0 +1,7 @@ +# To build the spec on Travis CI +source "https://rubygems.org" + +gem "jekyll", "2.0.0.alpha.2" +gem "rouge" +# gem 's3_website' +# gem 'redcarpet' diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf index 10c1da59b8b6..637bd586e0b0 100644 --- a/bincompat-backward.whitelist.conf +++ b/bincompat-backward.whitelist.conf @@ -4,8 +4,8 @@ filter { # "scala.concurrent.impl" # "scala.reflect.runtime" ] - // see SI-8372 problems=[ + // see SI-8372 { matchName="scala.collection.mutable.ArrayOps#ofChar.unzip" problemName=IncompatibleMethTypeProblem @@ -101,6 +101,122 @@ filter { { matchName="scala.collection.mutable.ArrayOps#ofDouble.unzip3" problemName=IncompatibleMethTypeProblem + }, + // see SI-8200 + { + matchName="scala.reflect.api.StandardLiftables#StandardLiftableInstances.liftTree" + problemName=MissingMethodProblem + }, + // see SI-8331 + { + matchName="scala.reflect.api.Internals#ReificationSupportApi#SyntacticTypeAppliedExtractor.unapply" + problemName=IncompatibleResultTypeProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi#SyntacticTypeAppliedExtractor.unapply" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSelectType" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticAppliedType" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSelectTerm" + problemName=MissingMethodProblem + }, + // see SI-8366 + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticPartialFunction" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Mirror.symbolOf" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Mirror.typeOf" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Mirror.weakTypeOf" + problemName=MissingMethodProblem + }, + // see SI-8388 + { + matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticIdentExtractor" + problemName=MissingClassProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticIdent" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSingletonType" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTermIdent" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTypeIdent" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticCompoundType" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticAnnotatedType" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTypeProjection" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticExistentialType" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.runtime.SynchronizedOps.newNestedScope" + problemName=MissingMethodProblem + }, + // see github.com/scala/scala/pull/3925, SI-8627, SI-6440 + { + matchName="scala.collection.TraversableLike.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.Stream.filteredTail" + problemName=MissingMethodProblem + }, + // https://github.com/scala/scala/pull/3848 -- SI-8680 + { + matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$6" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$5" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4" + problemName=MissingMethodProblem + }, + // SI-8946 + { + matchName="scala.reflect.runtime.ThreadLocalStorage#MyThreadLocalStorage.values" + problemName=MissingMethodProblem + }, + // the below method was the unused private (sic!) method but the compatibility checker was complaining about it + { + matchName="scala.reflect.io.ZipArchive.scala$reflect$io$ZipArchive$$walkIterator" + problemName=MissingMethodProblem } ] -} \ No newline at end of file +} diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf index 96994f8969c3..9f27600eb8f5 100644 --- a/bincompat-forward.whitelist.conf +++ b/bincompat-forward.whitelist.conf @@ -4,8 +4,8 @@ filter { # "scala.concurrent.impl" # "scala.reflect.runtime" ] - // see SI-8372 problems=[ + // see SI-8372 { matchName="scala.collection.mutable.ArrayOps#ofChar.unzip" problemName=IncompatibleMethTypeProblem @@ -101,6 +101,316 @@ filter { { matchName="scala.collection.mutable.ArrayOps#ofDouble.unzip3" problemName=IncompatibleMethTypeProblem + }, + // see SI-8200 + { + matchName="scala.reflect.api.Liftables#Liftable.liftTree" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.StandardLiftables#StandardLiftableInstances.liftTree" + problemName=MissingMethodProblem + }, + // see SI-8331 + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSelectType" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticAppliedType" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSelectTerm" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticSelectTermExtractor" + problemName=MissingClassProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi#SyntacticTypeAppliedExtractor.unapply" + problemName=IncompatibleResultTypeProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi#SyntacticTypeAppliedExtractor.unapply" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticSelectTypeExtractor" + problemName=MissingClassProblem + }, + // see SI-8366 + { + matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticPartialFunctionExtractor" + problemName=MissingClassProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticPartialFunction" + problemName=MissingMethodProblem + }, + // see SI-8428 + { + matchName="scala.collection.Iterator#ConcatIterator.this" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Mirror.symbolOf" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Mirror.typeOf" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Mirror.weakTypeOf" + problemName=MissingMethodProblem + }, + // see SI-8388 + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSingletonType" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTermIdent" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTypeIdent" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticCompoundType" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticAnnotatedType" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTypeProjection" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticExistentialType" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticIdent" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticAnnotatedTypeExtractor" + problemName=MissingClassProblem + }, + { + matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticTermIdentExtractor" + problemName=MissingClassProblem + }, + { + matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacitcSingletonTypeExtractor" + problemName=MissingClassProblem + }, + { + matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticTypeIdentExtractor" + problemName=MissingClassProblem + }, + { + matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticCompoundTypeExtractor" + problemName=MissingClassProblem + }, + { + matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticExistentialTypeExtractor" + problemName=MissingClassProblem + }, + { + matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticTypeProjectionExtractor" + problemName=MissingClassProblem + }, + { + matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$followStatic" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.runtime.SynchronizedOps.newNestedScope" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.runtime.JavaUniverse" + problemName=MissingTypesProblem + }, + { + matchName="scala.reflect.runtime.JavaUniverse.reporter" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.runtime.JavaUniverse$PerRunReporting" + problemName=MissingClassProblem + }, + { + matchName="scala.reflect.runtime.JavaUniverse.currentRun" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.runtime.JavaUniverse.PerRunReporting" + problemName=MissingMethodProblem + }, + // see SI-5919 + { + matchName="scala.reflect.api.TypeTags$PredefTypeCreator" + problemName=MissingTypesProblem + }, + { + matchName="scala.reflect.api.TreeCreator" + problemName=MissingTypesProblem + }, + { + matchName="scala.reflect.api.TypeCreator" + problemName=MissingTypesProblem + }, + { + matchName="scala.reflect.api.PredefTypeCreator" + problemName=MissingClassProblem + }, + // see github.com/scala/scala/pull/3925, SI-8627, SI-6440 + { + matchName="scala.collection.IterableViewLike#AbstractTransformed.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.AbstractTraversable.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.TraversableViewLike#AbstractTransformed.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.TraversableLike.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.SeqViewLike#AbstractTransformed.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.TreeSet.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.Stream.filteredTail" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.Stream.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.Stream.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.StringOps.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.TreeMap.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.concurrent.TrieMap.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.mutable.ArrayOps#ofByte.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.mutable.ArrayOps#ofLong.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.mutable.ArrayOps#ofUnit.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.mutable.ArrayOps#ofInt.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.mutable.ArrayOps#ofChar.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.mutable.ArrayOps#ofRef.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.mutable.ArrayOps#ofDouble.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.mutable.ArrayOps#ofFloat.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.mutable.ArrayOps#ofBoolean.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.mutable.ArrayOps#ofShort.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.mutable.TreeSet.filterImpl" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.io.AbstractFile.filterImpl" + problemName=MissingMethodProblem + }, + // https://github.com/scala/scala/pull/3848 -- SI-8680 + { + matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$6" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$5" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$3" + problemName=MissingMethodProblem + }, + { + matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$2" + problemName=MissingMethodProblem + }, + // changes needed by ZipArchiveFileLookup (the flat classpath representation) + { + matchName="scala.reflect.io.FileZipArchive.allDirs" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.io.FileZipArchive.root" + problemName=MissingMethodProblem + }, + // introduced the harmless method (instead of the repeated code in several places) + { + matchName="scala.reflect.runtime.Settings#MultiStringSetting.valueSetByUser" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.runtime.Settings#BooleanSetting.valueSetByUser" + problemName=MissingMethodProblem + }, + { + matchName="scala.reflect.runtime.Settings#IntSetting.valueSetByUser" + problemName=MissingMethodProblem } ] } diff --git a/build-ant-macros.xml b/build-ant-macros.xml index b90102538b46..609f106d0925 100644 --- a/build-ant-macros.xml +++ b/build-ant-macros.xml @@ -408,12 +408,44 @@ - + - + @@ -432,8 +464,14 @@ + + + + + + @@ -554,6 +592,7 @@ + @@ -711,7 +750,7 @@ - + diff --git a/build.number b/build.number index 51674b6915ab..78e98dffb3fe 100644 --- a/build.number +++ b/build.number @@ -1,9 +1,9 @@ #Tue Sep 11 19:21:09 CEST 2007 version.major=2 -version.minor=11 +version.minor=12 version.patch=0 # This is the -N part of a version. if it's 0, it's dropped from maven versions. version.bnum=0 -# Note: To build a release run ant with -Dbuild.release=true -# To build an RC, run ant with -Dmaven.version.suffix=-RCN +# To build a release, see scripts/jobs/scala-release-2.11.x-build +# (normally run by the eponymous job on scala-ci.typesafe.com). \ No newline at end of file diff --git a/build.xml b/build.xml index 763e2711a039..3cd44b9417dc 100755 --- a/build.xml +++ b/build.xml @@ -187,8 +187,6 @@ TODO: - - @@ -274,28 +272,37 @@ TODO: - + - + + + - + + + - - - - - + + + + - + + + + + + + + @@ -304,6 +311,7 @@ TODO: necessary cross suffix (usually something like "_2.11.0-M6". --> + @@ -367,12 +375,13 @@ TODO: - + + @@ -550,6 +559,7 @@ TODO: + @@ -557,12 +567,14 @@ TODO: + + @@ -570,6 +582,7 @@ TODO: + @@ -836,8 +849,7 @@ TODO: --> - - + @@ -966,6 +978,7 @@ TODO: + @@ -979,6 +992,16 @@ TODO: + + + + + + + + + + @@ -1346,27 +1369,45 @@ TODO: srcdir="${test.osgi.src}" jvmargs="${scalacfork.jvmargs}"> - + - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -1428,13 +1469,17 @@ TODO: - + + + - + + + @@ -1500,8 +1545,13 @@ TODO: - - + + + + + + + @@ -1518,9 +1568,10 @@ TODO: - - - + + + + + +To construct tokens, characters are distinguished according to the following +classes (Unicode general category given in parentheses): + +1. Whitespace characters. `\u0020 | \u0009 | \u000D | \u000A`. +1. Letters, which include lower case letters (`Ll`), upper case letters (`Lu`), + titlecase letters (`Lt`), other letters (`Lo`), letter numerals (`Nl`) and the + two characters `\u0024 ‘$’` and `\u005F ‘_’`, which both count as upper case + letters. +1. Digits `‘0’ | … | ‘9’`. +1. Parentheses `‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ `. +1. Delimiter characters ``‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ ``. +1. Operator characters. These consist of all printable ASCII characters + `\u0020` - `\u007F` which are in none of the sets above, mathematical + symbols (`Sm`) and other symbols (`So`). + +## Identifiers + +```ebnf +op ::= opchar {opchar} +varid ::= lower idrest +plainid ::= upper idrest + | varid + | op +id ::= plainid + | ‘`’ stringLiteral ‘`’ +idrest ::= {letter | digit} [‘_’ op] +``` + +There are three ways to form an identifier. First, an identifier can +start with a letter which can be followed by an arbitrary sequence of +letters and digits. This may be followed by underscore ‘_’ +characters and another string composed of either letters and digits or +of operator characters. Second, an identifier can start with an operator +character followed by an arbitrary sequence of operator characters. +The preceding two forms are called _plain_ identifiers. Finally, +an identifier may also be formed by an arbitrary string between +back-quotes (host systems may impose some restrictions on which +strings are legal for identifiers). The identifier then is composed +of all characters excluding the backquotes themselves. + +As usual, a longest match rule applies. For instance, the string + +```scala +big_bob++=`def` +``` + +decomposes into the three identifiers `big_bob`, `++=`, and +`def`. The rules for pattern matching further distinguish between +_variable identifiers_, which start with a lower case letter, and +_constant identifiers_, which do not. + +The ‘\$’ character is reserved for compiler-synthesized identifiers. +User programs should not define identifiers which contain ‘\$’ characters. + +The following names are reserved words instead of being members of the +syntactic class `id` of lexical identifiers. + +```scala +abstract case catch class def +do else extends false final +finally for forSome if implicit +import lazy match new null +object override package private protected +return sealed super this throw +trait try true type val +var while with yield +_ : = => <- <: <% >: # @ +``` + +The Unicode operators `\u21D2` ‘$\Rightarrow$’ and `\u2190` ‘$\leftarrow$’, which have the ASCII +equivalents `=>` and `<-`, are also reserved. + +### Example +Here are examples of identifiers: +```scala + x Object maxIndex p2p empty_? + + `yield` αρετη _y dot_product_* + __system _MAX_LEN_ +``` + +### Example +When one needs to access Java identifiers that are reserved words in Scala, use backquote-enclosed strings. +For instance, the statement `Thread.yield()` is illegal, since +`yield` is a reserved word in Scala. However, here's a +work-around: `` Thread.`yield`() `` + +## Newline Characters + +```ebnf +semi ::= ‘;’ | nl {nl} +``` + +Scala is a line-oriented language where statements may be terminated by +semi-colons or newlines. A newline in a Scala source text is treated +as the special token “nl” if the three following criteria are satisfied: + +1. The token immediately preceding the newline can terminate a statement. +1. The token immediately following the newline can begin a statement. +1. The token appears in a region where newlines are enabled. + +The tokens that can terminate a statement are: literals, identifiers +and the following delimiters and reserved words: + +```scala +this null true false return type +_ ) ] } +``` + +The tokens that can begin a statement are all Scala tokens _except_ +the following delimiters and reserved words: + +```scala +catch else extends finally forSome match +with yield , . ; : = => <- <: <% +>: # [ ) ] } +``` + +A `case` token can begin a statement only if followed by a +`class` or `object` token. + +Newlines are enabled in: + +1. all of a Scala source file, except for nested regions where newlines + are disabled, and +1. the interval between matching `{` and `}` brace tokens, + except for nested regions where newlines are disabled. + +Newlines are disabled in: + +1. the interval between matching `(` and `)` parenthesis tokens, except for + nested regions where newlines are enabled, and +1. the interval between matching `[` and `]` bracket tokens, except for nested + regions where newlines are enabled. +1. The interval between a `case` token and its matching + `=>` token, except for nested regions where newlines are + enabled. +1. Any regions analyzed in [XML mode](#xml-mode). + +Note that the brace characters of `{...}` escapes in XML and +string literals are not tokens, +and therefore do not enclose a region where newlines +are enabled. + +Normally, only a single `nl` token is inserted between two +consecutive non-newline tokens which are on different lines, even if there are multiple lines +between the two tokens. However, if two tokens are separated by at +least one completely blank line (i.e a line which contains no +printable characters), then two `nl` tokens are inserted. + +The Scala grammar (given in full [here](13-syntax-summary.html)) +contains productions where optional `nl` tokens, but not +semicolons, are accepted. This has the effect that a newline in one of these +positions does not terminate an expression or statement. These positions can +be summarized as follows: + +Multiple newline tokens are accepted in the following places (note +that a semicolon in place of the newline would be illegal in every one +of these cases): + +- between the condition of a + [conditional expression](06-expressions.html#conditional-expressions) + or [while loop](06-expressions.html#while-loop-expressions) and the next + following expression, +- between the enumerators of a + [for-comprehension](06-expressions.html#for-comprehensions-and-for-loops) + and the next following expression, and +- after the initial `type` keyword in a + [type definition or declaration](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases). + +A single new line token is accepted + +- in front of an opening brace ‘{’, if that brace is a legal + continuation of the current statement or expression, +- after an [infix operator](06-expressions.html#prefix,-infix,-and-postfix-operations), + if the first token on the next line can start an expression, +- in front of a [parameter clause](04-basic-declarations-and-definitions.html#function-declarations-and-definitions), and +- after an [annotation](11-user-defined-annotations.html#user-defined-annotations). + +### Example + +The newline tokens between the two lines are not +treated as statement separators. + +```scala +if (x > 0) + x = x - 1 + +while (x > 0) + x = x / 2 + +for (x <- 1 to 10) + println(x) + +type + IntList = List[Int] +``` + +### Example + +```scala +new Iterator[Int] +{ + private var x = 0 + def hasNext = true + def next = { x += 1; x } +} +``` + +With an additional newline character, the same code is interpreted as +an object creation followed by a local block: + +```scala +new Iterator[Int] + +{ + private var x = 0 + def hasNext = true + def next = { x += 1; x } +} +``` + +### Example + +```scala + x < 0 || + x > 10 +``` + +With an additional newline character, the same code is interpreted as +two expressions: + +```scala + x < 0 || + + x > 10 +``` + +### Example + +```scala +def func(x: Int) + (y: Int) = x + y +``` + +With an additional newline character, the same code is interpreted as +an abstract function definition and a syntactically illegal statement: + +```scala +def func(x: Int) + + (y: Int) = x + y +``` + +### Example + +```scala +@serializable +protected class Data { ... } +``` + +With an additional newline character, the same code is interpreted as +an attribute and a separate statement (which is syntactically +illegal). + +```scala +@serializable + +protected class Data { ... } +``` + +## Literals + +There are literals for integer numbers, floating point numbers, +characters, booleans, symbols, strings. The syntax of these literals is in +each case as in Java. + + + +```ebnf +Literal ::= [‘-’] integerLiteral + | [‘-’] floatingPointLiteral + | booleanLiteral + | characterLiteral + | stringLiteral + | symbolLiteral + | ‘null’ +``` + +### Integer Literals + +```ebnf +integerLiteral ::= (decimalNumeral | hexNumeral) + [‘L’ | ‘l’] +decimalNumeral ::= ‘0’ | nonZeroDigit {digit} +hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit {hexDigit} +digit ::= ‘0’ | nonZeroDigit +nonZeroDigit ::= ‘1’ | … | ‘9’ +``` + +Integer literals are usually of type `Int`, or of type +`Long` when followed by a `L` or +`l` suffix. Values of type `Int` are all integer +numbers between $-2\^{31}$ and $2\^{31}-1$, inclusive. Values of +type `Long` are all integer numbers between $-2\^{63}$ and +$2\^{63}-1$, inclusive. A compile-time error occurs if an integer literal +denotes a number outside these ranges. + +However, if the expected type [_pt_](06-expressions.html#expression-typing) of a literal +in an expression is either `Byte`, `Short`, or `Char` +and the integer number fits in the numeric range defined by the type, +then the number is converted to type _pt_ and the literal's type +is _pt_. The numeric ranges given by these types are: + +| | | +|----------------|--------------------------| +|`Byte` | $-2\^7$ to $2\^7-1$ | +|`Short` | $-2\^{15}$ to $2\^{15}-1$| +|`Char` | $0$ to $2\^{16}-1$ | + +### Example + +```scala +0 21 0xFFFFFFFF -42L +``` + +### Floating Point Literals + +```ebnf +floatingPointLiteral ::= digit {digit} ‘.’ digit {digit} [exponentPart] [floatType] + | ‘.’ digit {digit} [exponentPart] [floatType] + | digit {digit} exponentPart [floatType] + | digit {digit} [exponentPart] floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit {digit} +floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ +``` + +Floating point literals are of type `Float` when followed by +a floating point type suffix `F` or `f`, and are +of type `Double` otherwise. The type `Float` +consists of all IEEE 754 32-bit single-precision binary floating point +values, whereas the type `Double` consists of all IEEE 754 +64-bit double-precision binary floating point values. + +If a floating point literal in a program is followed by a token +starting with a letter, there must be at least one intervening +whitespace character between the two tokens. + +### Example + +```scala +0.0 1e30f 3.14159f 1.0e-100 .1 +``` + +### Example + +The phrase `1.toString` parses as three different tokens: +the integer literal `1`, a `.`, and the identifier `toString`. + +### Example + +`1.` is not a valid floating point literal because the mandatory digit after the `.` is missing. + +### Boolean Literals + +```ebnf +booleanLiteral ::= ‘true’ | ‘false’ +``` + +The boolean literals `true` and `false` are +members of type `Boolean`. + +### Character Literals + +```ebnf +characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ +``` + +A character literal is a single character enclosed in quotes. +The character is either a printable unicode character or is described +by an [escape sequence](#escape-sequences). + +### Example + +```scala +'a' '\u0041' '\n' '\t' +``` + +Note that `'\u000A'` is _not_ a valid character literal because +Unicode conversion is done before literal parsing and the Unicode +character `\u000A` (line feed) is not a printable +character. One can use instead the escape sequence `'\n'` or +the octal escape `'\12'` ([see here](#escape-sequences)). + +### String Literals + +```ebnf +stringLiteral ::= ‘"’ {stringElement} ‘"’ +stringElement ::= printableCharNoDoubleQuote | charEscapeSeq +``` + +A string literal is a sequence of characters in double quotes. The +characters are either printable unicode character or are described by +[escape sequences](#escape-sequences). If the string literal +contains a double quote character, it must be escaped, +i.e. `"\""`. The value of a string literal is an instance of +class `String`. + +### Example + +```scala +"Hello,\nWorld!" +"This string contains a \" character." +``` + +#### Multi-Line String Literals + +```ebnf +stringLiteral ::= ‘"""’ multiLineChars ‘"""’ +multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} +``` + +A multi-line string literal is a sequence of characters enclosed in +triple quotes `""" ... """`. The sequence of characters is +arbitrary, except that it may contain three or more consuctive quote characters +only at the very end. Characters +must not necessarily be printable; newlines or other +control characters are also permitted. Unicode escapes work as everywhere else, but none +of the escape sequences [here](#escape-sequences) are interpreted. + +### Example + +```scala + """the present string + spans three + lines.""" +``` + +This would produce the string: + +```scala +the present string + spans three + lines. +``` + +The Scala library contains a utility method `stripMargin` +which can be used to strip leading whitespace from multi-line strings. +The expression + +```scala + """the present string + |spans three + |lines.""".stripMargin +``` + +evaluates to + +```scala +the present string +spans three +lines. +``` + +Method `stripMargin` is defined in class +[scala.collection.immutable.StringLike](http://www.scala-lang.org/api/current/#scala.collection.immutable.StringLike). +Because there is a predefined +[implicit conversion](06-expressions.html#implicit-conversions) from `String` to +`StringLike`, the method is applicable to all strings. + +### Escape Sequences + +The following escape sequences are recognized in character and string literals. + +| charEscapeSeq | unicode | name | char | +|---------------|----------|-----------------|--------| +| `‘\‘ ‘b‘` | `\u0008` | backspace | `BS` | +| `‘\‘ ‘t‘` | `\u0009` | horizontal tab | `HT` | +| `‘\‘ ‘n‘` | `\u000a` | linefeed | `LF` | +| `‘\‘ ‘f‘` | `\u000c` | form feed | `FF` | +| `‘\‘ ‘r‘` | `\u000d` | carriage return | `CR` | +| `‘\‘ ‘"‘` | `\u0022` | double quote | `"` | +| `‘\‘ ‘'‘` | `\u0027` | single quote | `'` | +| `‘\‘ ‘\‘` | `\u005c` | backslash | `\` | + +A character with Unicode between 0 and 255 may also be represented by +an octal escape, i.e. a backslash `'\'` followed by a +sequence of up to three octal characters. + +It is a compile time error if a backslash character in a character or +string literal does not start a valid escape sequence. + +### Symbol literals + +```ebnf +symbolLiteral ::= ‘'’ plainid +``` + +A symbol literal `'x` is a shorthand for the expression +`scala.Symbol("x")`. `Symbol` is a [case class](05-classes-and-objects.html#case-classes), +which is defined as follows. + +```scala +package scala +final case class Symbol private (name: String) { + override def toString: String = "'" + name +} +``` + +The `apply` method of `Symbol`'s companion object +caches weak references to `Symbol`s, thus ensuring that +identical symbol literals are equivalent with respect to reference +equality. + +## Whitespace and Comments + +Tokens may be separated by whitespace characters +and/or comments. Comments come in two forms: + +A single-line comment is a sequence of characters which starts with +`//` and extends to the end of the line. + +A multi-line comment is a sequence of characters between +`/*` and `*/`. Multi-line comments may be nested, +but are required to be properly nested. Therefore, a comment like +`/* /* */` will be rejected as having an unterminated +comment. + +## XML mode + +In order to allow literal inclusion of XML fragments, lexical analysis +switches from Scala mode to XML mode when encountering an opening +angle bracket ‘<’ in the following circumstance: The ‘<’ must be +preceded either by whitespace, an opening parenthesis or an opening +brace and immediately followed by a character starting an XML name. + +```ebnf + ( whitespace | ‘(’ | ‘{’ ) ‘<’ (XNameStart | ‘!’ | ‘?’) + + XNameStart ::= ‘_’ | BaseChar | Ideographic // as in W3C XML, but without ‘:’ +``` + +The scanner switches from XML mode to Scala mode if either + +- the XML expression or the XML pattern started by the initial ‘<’ has been + successfully parsed, or if +- the parser encounters an embedded Scala expression or pattern and + forces the Scanner + back to normal mode, until the Scala expression or pattern is + successfully parsed. In this case, since code and XML fragments can be + nested, the parser has to maintain a stack that reflects the nesting + of XML and Scala expressions adequately. + +Note that no Scala tokens are constructed in XML mode, and that comments are interpreted +as text. + +### Example + +The following value definition uses an XML literal with two embedded +Scala expressions: + +```scala +val b = + The Scala Language Specification + {scalaBook.version} + {scalaBook.authors.mkList("", ", ", "")} + +``` diff --git a/spec/02-identifiers-names-and-scopes.md b/spec/02-identifiers-names-and-scopes.md new file mode 100644 index 000000000000..62d326934fd4 --- /dev/null +++ b/spec/02-identifiers-names-and-scopes.md @@ -0,0 +1,111 @@ +--- +title: Identifiers, Names and Scopes +layout: default +chapter: 2 +--- + +# Identifiers, Names and Scopes + +Names in Scala identify types, values, methods, and classes which are +collectively called _entities_. Names are introduced by local +[definitions and declarations](04-basic-declarations-and-definitions.html#basic-declarations-and-definitions), +[inheritance](05-classes-and-objects.html#class-members), +[import clauses](04-basic-declarations-and-definitions.html#import-clauses), or +[package clauses](09-top-level-definitions.html#packagings) +which are collectively called _bindings_. + +Bindings of different kinds have a precedence defined on them: + +1. Definitions and declarations that are local, inherited, or made + available by a package clause in the same compilation unit where the + definition occurs have highest precedence. +1. Explicit imports have next highest precedence. +1. Wildcard imports have next highest precedence. +1. Definitions made available by a package clause not in the + compilation unit where the definition occurs have lowest precedence. + +There are two different name spaces, one for [types](03-types.html#types) +and one for [terms](06-expressions.html#expressions). The same name may designate a +type and a term, depending on the context where the name is used. + +A binding has a _scope_ in which the entity defined by a single +name can be accessed using a simple name. Scopes are nested. A binding +in some inner scope _shadows_ bindings of lower precedence in the +same scope as well as bindings of the same or lower precedence in outer +scopes. + + + +A reference to an unqualified (type- or term-) identifier $x$ is bound +by the unique binding, which + +- defines an entity with name $x$ in the same namespace as the identifier, and +- shadows all other bindings that define entities with name $x$ in that + namespace. + +It is an error if no such binding exists. If $x$ is bound by an +import clause, then the simple name $x$ is taken to be equivalent to +the qualified name to which $x$ is mapped by the import clause. If $x$ +is bound by a definition or declaration, then $x$ refers to the entity +introduced by that binding. In that case, the type of $x$ is the type +of the referenced entity. + +A reference to a qualified (type- or term-) identifier $e.x$ refers to +the member of the type $T$ of $e$ which has the name $x$ in the same +namespace as the identifier. It is an error if $T$ is not a [value type](03-types.html#value-types). +The type of $e.x$ is the member type of the referenced entity in $T$. + +### Example + +Assume the following two definitions of objects named `X` in packages `P` and `Q`. + +```scala +package P { + object X { val x = 1; val y = 2 } +} + +package Q { + object X { val x = true; val y = "" } +} +``` + +The following program illustrates different kinds of bindings and +precedences between them. + +```scala +package P { // `X' bound by package clause +import Console._ // `println' bound by wildcard import +object A { + println("L4: "+X) // `X' refers to `P.X' here + object B { + import Q._ // `X' bound by wildcard import + println("L7: "+X) // `X' refers to `Q.X' here + import X._ // `x' and `y' bound by wildcard import + println("L8: "+x) // `x' refers to `Q.X.x' here + object C { + val x = 3 // `x' bound by local definition + println("L12: "+x) // `x' refers to constant `3' here + { import Q.X._ // `x' and `y' bound by wildcard import +// println("L14: "+x) // reference to `x' is ambiguous here + import X.y // `y' bound by explicit import + println("L16: "+y) // `y' refers to `Q.X.y' here + { val x = "abc" // `x' bound by local definition + import P.X._ // `x' and `y' bound by wildcard import +// println("L19: "+y) // reference to `y' is ambiguous here + println("L20: "+x) // `x' refers to string "abc" here +}}}}}} +``` diff --git a/spec/03-types.md b/spec/03-types.md new file mode 100644 index 000000000000..d067d45ab2fd --- /dev/null +++ b/spec/03-types.md @@ -0,0 +1,1030 @@ +--- +title: Types +layout: default +chapter: 3 +--- + +# Types + +```ebnf + Type ::= FunctionArgTypes ‘=>’ Type + | InfixType [ExistentialClause] + FunctionArgTypes ::= InfixType + | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’ + ExistentialClause ::= ‘forSome’ ‘{’ ExistentialDcl + {semi ExistentialDcl} ‘}’ + ExistentialDcl ::= ‘type’ TypeDcl + | ‘val’ ValDcl + InfixType ::= CompoundType {id [nl] CompoundType} + CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement] + | Refinement + AnnotType ::= SimpleType {Annotation} + SimpleType ::= SimpleType TypeArgs + | SimpleType ‘#’ id + | StableId + | Path ‘.’ ‘type’ + | ‘(’ Types ‘)’ + TypeArgs ::= ‘[’ Types ‘]’ + Types ::= Type {‘,’ Type} +``` + +We distinguish between first-order types and type constructors, which +take type parameters and yield types. A subset of first-order types +called _value types_ represents sets of (first-class) values. +Value types are either _concrete_ or _abstract_. + +Every concrete value type can be represented as a _class type_, i.e. a +[type designator](#type-designators) that refers to a +[class or a trait](05-classes-and-objects.html#class-definitions) [^1], or as a +[compound type](#compound-types) representing an +intersection of types, possibly with a [refinement](#compound-types) +that further constrains the types of its members. + +Abstract value types are introduced by [type parameters](04-basic-declarations-and-definitions.html#type-parameters) +and [abstract type bindings](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases). +Parentheses in types can be used for grouping. + +[^1]: We assume that objects and packages also implicitly + define a class (of the same name as the object or package, but + inaccessible to user programs). + +Non-value types capture properties of identifiers that +[are not values](#non-value-types). For example, a +[type constructor](#type-constructors) does not directly specify a type of +values. However, when a type constructor is applied to the correct type +arguments, it yields a first-order type, which may be a value type. + +Non-value types are expressed indirectly in Scala. E.g., a method type is +described by writing down a method signature, which in itself is not a real +type, although it gives rise to a corresponding [method type](#method-types). +Type constructors are another example, as one can write +`type Swap[m[_, _], a,b] = m[b, a]`, but there is no syntax to write +the corresponding anonymous type function directly. + +## Paths + +```ebnf +Path ::= StableId + | [id ‘.’] this +StableId ::= id + | Path ‘.’ id + | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id +ClassQualifier ::= ‘[’ id ‘]’ +``` + +Paths are not types themselves, but they can be a part of named types +and in that function form a central role in Scala's type system. + +A path is one of the following. + +- The empty path ε (which cannot be written explicitly in user programs). +- $C.$`this`, where $C$ references a class. + The path `this` is taken as a shorthand for $C.$`this` where + $C$ is the name of the class directly enclosing the reference. +- $p.x$ where $p$ is a path and $x$ is a stable member of $p$. + _Stable members_ are packages or members introduced by object definitions or + by value definitions of [non-volatile types](#volatile-types). +- $C.$`super`$.x$ or $C.$`super`$[M].x$ + where $C$ references a class and $x$ references a + stable member of the super class or designated parent class $M$ of $C$. + The prefix `super` is taken as a shorthand for $C.$`super` where + $C$ is the name of the class directly enclosing the reference. + +A _stable identifier_ is a path which ends in an identifier. + +## Value Types + +Every value in Scala has a type which is of one of the following +forms. + +### Singleton Types + +```ebnf +SimpleType ::= Path ‘.’ type +``` + +A singleton type is of the form $p.$`type`, where $p$ is a +path pointing to a value expected to [conform](06-expressions.html#expression-typing) +to `scala.AnyRef`. The type denotes the set of values +consisting of `null` and the value denoted by $p$. + +A _stable type_ is either a singleton type or a type which is +declared to be a subtype of trait `scala.Singleton`. + +### Type Projection + +```ebnf +SimpleType ::= SimpleType ‘#’ id +``` + +A type projection $T$#$x$ references the type member named +$x$ of type $T$. + + + +### Type Designators + +```ebnf +SimpleType ::= StableId +``` + +A type designator refers to a named value type. It can be simple or +qualified. All such type designators are shorthands for type projections. + +Specifically, the unqualified type name $t$ where $t$ is bound in some +class, object, or package $C$ is taken as a shorthand for +$C.$`this.type#`$t$. If $t$ is +not bound in a class, object, or package, then $t$ is taken as a +shorthand for ε`.type#`$t$. + +A qualified type designator has the form `p.t` where `p` is +a [path](#paths) and _t_ is a type name. Such a type designator is +equivalent to the type projection `p.type#t`. + +### Example + +Some type designators and their expansions are listed below. We assume +a local type parameter $t$, a value `maintable` +with a type member `Node` and the standard class `scala.Int`, + +| Designator | Expansion | +|-------------------- | --------------------------| +|t | ε.type#t | +|Int | scala.type#Int | +|scala.Int | scala.type#Int | +|data.maintable.Node | data.maintable.type#Node | + +### Parameterized Types + +```ebnf +SimpleType ::= SimpleType TypeArgs +TypeArgs ::= ‘[’ Types ‘]’ +``` + +A parameterized type $T[ U_1 , \ldots , U_n ]$ consists of a type +designator $T$ and type parameters $U_1 , \ldots , U_n$ where +$n \geq 1$. $T$ must refer to a type constructor which takes $n$ type +parameters $a_1 , \ldots , a_n$. + +Say the type parameters have lower bounds $L_1 , \ldots , L_n$ and +upper bounds $U_1, \ldots, U_n$. The parameterized type is +well-formed if each actual type parameter +_conforms to its bounds_, i.e. $\sigma L_i <: T_i <: \sigma U_i$ where $\sigma$ is the +substitution $[ a_1 := T_1 , \ldots , a_n := T_n ]$. + +### Example Parameterized Types +Given the partial type definitions: + +```scala +class TreeMap[A <: Comparable[A], B] { … } +class List[A] { … } +class I extends Comparable[I] { … } + +class F[M[_], X] { … } +class S[K <: String] { … } +class G[M[ Z <: I ], I] { … } +``` + +the following parameterized types are well formed: + +```scala +TreeMap[I, String] +List[I] +List[List[Boolean]] + +F[List, Int] +G[S, String] +``` + +### Example + +Given the [above type definitions](#example-parameterized-types), +the following types are ill-formed: + +```scala +TreeMap[I] // illegal: wrong number of parameters +TreeMap[List[I], Int] // illegal: type parameter not within bound + +F[Int, Boolean] // illegal: Int is not a type constructor +F[TreeMap, Int] // illegal: TreeMap takes two parameters, + // F expects a constructor taking one +G[S, Int] // illegal: S constrains its parameter to + // conform to String, + // G expects type constructor with a parameter + // that conforms to Int +``` + +### Tuple Types + +```ebnf +SimpleType ::= ‘(’ Types ‘)’ +``` + +A tuple type $(T_1 , \ldots , T_n)$ is an alias for the +class `scala.Tuple$n$[$T_1$, … , $T_n$]`, where $n \geq 2$. + +Tuple classes are case classes whose fields can be accessed using +selectors `_1` , … , `_n`. Their functionality is +abstracted in a corresponding `Product` trait. The _n_-ary tuple +class and product trait are defined at least as follows in the +standard Scala library (they might also add other methods and +implement other traits). + +```scala +case class Tuple$n$[+$T_1$, … , +$T_n$](_1: $T_1$, … , _n: $T_n$) +extends Product_n[$T_1$, … , $T_n$] + +trait Product_n[+$T_1$, … , +$T_n$] { + override def productArity = $n$ + def _1: $T_1$ + … + def _n: $T_n$ +} +``` + +### Annotated Types + +```ebnf +AnnotType ::= SimpleType {Annotation} +``` + +An annotated type $T$ $a_1, \ldots, a_n$ +attaches [annotations](11-user-defined-annotations.html#user-defined-annotations) +$a_1 , \ldots , a_n$ to the type $T$. + +### Example + +The following type adds the `@suspendable` annotation to the type `String`: + +```scala +String @suspendable +``` + +### Compound Types + +```ebnf +CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement] + | Refinement +Refinement ::= [nl] ‘{’ RefineStat {semi RefineStat} ‘}’ +RefineStat ::= Dcl + | ‘type’ TypeDef + | +``` + +A compound type $T_1$ `with` … `with` $T_n \\{ R \\}$ +represents objects with members as given in the component types +$T_1 , \ldots , T_n$ and the refinement $\\{ R \\}$. A refinement +$\\{ R \\}$ contains declarations and type definitions. +If a declaration or definition overrides a declaration or definition in +one of the component types $T_1 , \ldots , T_n$, the usual rules for +[overriding](05-classes-and-objects.html#overriding) apply; otherwise the declaration +or definition is said to be “structural” [^2]. + +[^2]: A reference to a structurally defined member (method call or access + to a value or variable) may generate binary code that is significantly + slower than an equivalent code to a non-structural member. + +Within a method declaration in a structural refinement, the type of +any value parameter may only refer to type parameters or abstract +types that are contained inside the refinement. That is, it must refer +either to a type parameter of the method itself, or to a type +definition within the refinement. This restriction does not apply to +the method's result type. + +If no refinement is given, the empty refinement is implicitly added, +i.e. $T_1$ `with` … `with` $T_n$ is a shorthand for $T_1$ `with` … `with` $T_n \\{\\}$. + +A compound type may also consist of just a refinement +$\\{ R \\}$ with no preceding component types. Such a type is +equivalent to `AnyRef` $\\{ R \\}$. + +### Example + +The following example shows how to declare and use a method which +a parameter type that contains a refinement with structural declarations. + +```scala +case class Bird (val name: String) extends Object { + def fly(height: Int) = … +… +} +case class Plane (val callsign: String) extends Object { + def fly(height: Int) = … +… +} +def takeoff( + runway: Int, + r: { val callsign: String; def fly(height: Int) }) = { + tower.print(r.callsign + " requests take-off on runway " + runway) + tower.read(r.callsign + " is clear for take-off") + r.fly(1000) +} +val bird = new Bird("Polly the parrot"){ val callsign = name } +val a380 = new Plane("TZ-987") +takeoff(42, bird) +takeoff(89, a380) +``` + +Although `Bird` and `Plane` do not share any parent class other than +`Object`, the parameter _r_ of method `takeoff` is defined using a +refinement with structural declarations to accept any object that declares +a value `callsign` and a `fly` method. + +### Infix Types + +```ebnf +InfixType ::= CompoundType {id [nl] CompoundType} +``` + +An infix type $T_1$ `op` $T_2$ consists of an infix +operator `op` which gets applied to two type operands $T_1$ and +$T_2$. The type is equivalent to the type application +`op`$[T_1, T_2]$. The infix operator `op` may be an +arbitrary identifier. + +All type infix operators have the same precedence; parentheses have to +be used for grouping. The [associativity](06-expressions.html#prefix,-infix,-and-postfix-operations) +of a type operator is determined as for term operators: type operators +ending in a colon ‘:’ are right-associative; all other +operators are left-associative. + +In a sequence of consecutive type infix operations +$t_0 \, \mathit{op} \, t_1 \, \mathit{op_2} \, \ldots \, \mathit{op_n} \, t_n$, +all operators $\mathit{op}\_1 , \ldots , \mathit{op}\_n$ must have the same +associativity. If they are all left-associative, the sequence is +interpreted as +$(\ldots (t_0 \mathit{op_1} t_1) \mathit{op_2} \ldots) \mathit{op_n} t_n$, +otherwise it is interpreted as +$t_0 \mathit{op_1} (t_1 \mathit{op_2} ( \ldots \mathit{op_n} t_n) \ldots)$. + +### Function Types + +```ebnf +Type ::= FunctionArgs ‘=>’ Type +FunctionArgs ::= InfixType + | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’ +``` + +The type $(T_1 , \ldots , T_n) \Rightarrow U$ represents the set of function +values that take arguments of types $T1 , \ldots , Tn$ and yield +results of type $U$. In the case of exactly one argument type +$T \Rightarrow U$ is a shorthand for $(T) \Rightarrow U$. +An argument type of the form $\Rightarrow T$ +represents a [call-by-name parameter](04-basic-declarations-and-definitions.html#by-name-parameters) of type $T$. + +Function types associate to the right, e.g. +$S \Rightarrow T \Rightarrow U$ is the same as +$S \Rightarrow (T \Rightarrow U)$. + +Function types are shorthands for class types that define `apply` +functions. Specifically, the $n$-ary function type +$(T_1 , \ldots , T_n) \Rightarrow U$ is a shorthand for the class type +`Function$_n$[T1 , … , $T_n$, U]`. Such class +types are defined in the Scala library for $n$ between 0 and 9 as follows. + +```scala +package scala +trait Function_n[-T1 , … , -T$_n$, +R] { + def apply(x1: T1 , … , x$_n$: T$_n$): R + override def toString = "" +} +``` + +Hence, function types are [covariant](04-basic-declarations-and-definitions.html#variance-annotations) in their +result type and contravariant in their argument types. + +### Existential Types + +```ebnf +Type ::= InfixType ExistentialClauses +ExistentialClauses ::= ‘forSome’ ‘{’ ExistentialDcl + {semi ExistentialDcl} ‘}’ +ExistentialDcl ::= ‘type’ TypeDcl + | ‘val’ ValDcl +``` + +An existential type has the form `$T$ forSome { $Q$ }` +where $Q$ is a sequence of +[type declarations](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases). + +Let +$t_1[\mathit{tps}\_1] >: L_1 <: U_1 , \ldots , t_n[\mathit{tps}\_n] >: L_n <: U_n$ +be the types declared in $Q$ (any of the +type parameter sections `[ $\mathit{tps}_i$ ]` might be missing). +The scope of each type $t_i$ includes the type $T$ and the existential clause +$Q$. +The type variables $t_i$ are said to be _bound_ in the type +`$T$ forSome { $Q$ }`. +Type variables which occur in a type $T$ but which are not bound in $T$ are said +to be _free_ in $T$. + +A _type instance_ of `$T$ forSome { $Q$ }` +is a type $\sigma T$ where $\sigma$ is a substitution over $t_1 , \ldots , t_n$ +such that, for each $i$, $\sigma L_i <: \sigma t_i <: \sigma U_i$. +The set of values denoted by the existential type `$T$ forSome {$\,Q\,$}` +is the union of the set of values of all its type instances. + +A _skolemization_ of `$T$ forSome { $Q$ }` is +a type instance $\sigma T$, where $\sigma$ is the substitution +$[t_1'/t_1 , \ldots , t_n'/t_n]$ and each $t_i'$ is a fresh abstract type +with lower bound $\sigma L_i$ and upper bound $\sigma U_i$. + +#### Simplification Rules + +Existential types obey the following four equivalences: + +1. Multiple for-clauses in an existential type can be merged. E.g., +`$T$ forSome { $Q$ } forSome { $Q'$ }` +is equivalent to +`$T$ forSome { $Q$ ; $Q'$}`. +1. Unused quantifications can be dropped. E.g., +`$T$ forSome { $Q$ ; $Q'$}` +where none of the types defined in $Q'$ are referred to by $T$ or $Q$, +is equivalent to +`$T$ forSome {$ Q $}`. +1. An empty quantification can be dropped. E.g., +`$T$ forSome { }` is equivalent to $T$. +1. An existential type `$T$ forSome { $Q$ }` where $Q$ contains +a clause `type $t[\mathit{tps}] >: L <: U$` is equivalent +to the type `$T'$ forSome { $Q$ }` where $T'$ results from $T$ by replacing +every [covariant occurrence](04-basic-declarations-and-definitions.html#variance-annotations) of $t$ in $T$ by $U$ and by +replacing every contravariant occurrence of $t$ in $T$ by $L$. + +#### Existential Quantification over Values + +As a syntactic convenience, the bindings clause +in an existential type may also contain +value declarations `val $x$: $T$`. +An existential type `$T$ forSome { $Q$; val $x$: $S\,$;$\,Q'$ }` +is treated as a shorthand for the type +`$T'$ forSome { $Q$; type $t$ <: $S$ with Singleton; $Q'$ }`, where $t$ is a +fresh type name and $T'$ results from $T$ by replacing every occurrence of +`$x$.type` with $t$. + +#### Placeholder Syntax for Existential Types + +```ebnf +WildcardType ::= ‘_’ TypeBounds +``` + +Scala supports a placeholder syntax for existential types. +A _wildcard type_ is of the form `_$\;$>:$\,L\,$<:$\,U$`. Both bound +clauses may be omitted. If a lower bound clause `>:$\,L$` is missing, +`>:$\,$scala.Nothing` +is assumed. If an upper bound clause `<:$\,U$` is missing, +`<:$\,$scala.Any` is assumed. A wildcard type is a shorthand for an +existentially quantified type variable, where the existential quantification is +implicit. + +A wildcard type must appear as type argument of a parameterized type. +Let $T = p.c[\mathit{targs},T,\mathit{targs}']$ be a parameterized type where +$\mathit{targs}, \mathit{targs}'$ may be empty and +$T$ is a wildcard type `_$\;$>:$\,L\,$<:$\,U$`. Then $T$ is equivalent to the +existential +type + +```scala +$p.c[\mathit{targs},t,\mathit{targs}']$ forSome { type $t$ >: $L$ <: $U$ } +``` + +where $t$ is some fresh type variable. +Wildcard types may also appear as parts of [infix types](#infix-types) +, [function types](#function-types), +or [tuple types](#tuple-types). +Their expansion is then the expansion in the equivalent parameterized +type. + +### Example + +Assume the class definitions + +```scala +class Ref[T] +abstract class Outer { type T } . +``` + +Here are some examples of existential types: + +```scala +Ref[T] forSome { type T <: java.lang.Number } +Ref[x.T] forSome { val x: Outer } +Ref[x_type # T] forSome { type x_type <: Outer with Singleton } +``` + +The last two types in this list are equivalent. +An alternative formulation of the first type above using wildcard syntax is: + +```scala +Ref[_ <: java.lang.Number] +``` + +### Example + +The type `List[List[_]]` is equivalent to the existential type + +```scala +List[List[t] forSome { type t }] . +``` + +### Example + +Assume a covariant type + +```scala +class List[+T] +``` + +The type + +```scala +List[T] forSome { type T <: java.lang.Number } +``` + +is equivalent (by simplification rule 4 above) to + +```scala +List[java.lang.Number] forSome { type T <: java.lang.Number } +``` + +which is in turn equivalent (by simplification rules 2 and 3 above) to +`List[java.lang.Number]`. + +## Non-Value Types + +The types explained in the following do not denote sets of values, nor +do they appear explicitly in programs. They are introduced in this +report as the internal types of defined identifiers. + +### Method Types + +A method type is denoted internally as $(\mathit{Ps})U$, where $(\mathit{Ps})$ +is a sequence of parameter names and types $(p_1:T_1 , \ldots , p_n:T_n)$ +for some $n \geq 0$ and $U$ is a (value or method) type. This type +represents named methods that take arguments named $p_1 , \ldots , p_n$ +of types $T_1 , \ldots , T_n$ +and that return a result of type $U$. + +Method types associate to the right: $(\mathit{Ps}\_1)(\mathit{Ps}\_2)U$ is +treated as $(\mathit{Ps}\_1)((\mathit{Ps}\_2)U)$. + +A special case are types of methods without any parameters. They are +written here `=> T`. Parameterless methods name expressions +that are re-evaluated each time the parameterless method name is +referenced. + +Method types do not exist as types of values. If a method name is used +as a value, its type is [implicitly converted](06-expressions.html#implicit-conversions) to a +corresponding function type. + +###### Example + +The declarations + +``` +def a: Int +def b (x: Int): Boolean +def c (x: Int) (y: String, z: String): String +``` + +produce the typings + +```scala +a: => Int +b: (Int) Boolean +c: (Int) (String, String) String +``` + +### Polymorphic Method Types + +A polymorphic method type is denoted internally as `[$\mathit{tps}\,$]$T$` where +`[$\mathit{tps}\,$]` is a type parameter section +`[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]` +for some $n \geq 0$ and $T$ is a +(value or method) type. This type represents named methods that +take type arguments `$S_1 , \ldots , S_n$` which +[conform](#parameterized-types) to the lower bounds +`$L_1 , \ldots , L_n$` and the upper bounds +`$U_1 , \ldots , U_n$` and that yield results of type $T$. + +###### Example + +The declarations + +```scala +def empty[A]: List[A] +def union[A <: Comparable[A]] (x: Set[A], xs: Set[A]): Set[A] +``` + +produce the typings + +```scala +empty : [A >: Nothing <: Any] List[A] +union : [A >: Nothing <: Comparable[A]] (x: Set[A], xs: Set[A]) Set[A] +``` + +### Type Constructors + +A type constructor is represented internally much like a polymorphic method type. +`[$\pm$ $a_1$ >: $L_1$ <: $U_1 , \ldots , \pm a_n$ >: $L_n$ <: $U_n$] $T$` +represents a type that is expected by a +[type constructor parameter](04-basic-declarations-and-definitions.html#type-parameters) or an +[abstract type constructor binding](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases) with +the corresponding type parameter clause. + +###### Example + +Consider this fragment of the `Iterable[+X]` class: + +``` +trait Iterable[+X] { + def flatMap[newType[+X] <: Iterable[X], S](f: X => newType[S]): newType[S] +} +``` + +Conceptually, the type constructor `Iterable` is a name for the +anonymous type `[+X] Iterable[X]`, which may be passed to the +`newType` type constructor parameter in `flatMap`. + + + +## Base Types and Member Definitions + +Types of class members depend on the way the members are referenced. +Central here are three notions, namely: +1. the notion of the set of base types of a type $T$, +1. the notion of a type $T$ in some class $C$ seen from some + prefix type $S$, +1. the notion of the set of member bindings of some type $T$. + +These notions are defined mutually recursively as follows. + +1. The set of _base types_ of a type is a set of class types, + given as follows. + - The base types of a class type $C$ with parents $T_1 , \ldots , T_n$ are + $C$ itself, as well as the base types of the compound type + `$T_1$ with … with $T_n$ { $R$ }`. + - The base types of an aliased type are the base types of its alias. + - The base types of an abstract type are the base types of its upper bound. + - The base types of a parameterized type + `$C$[$T_1 , \ldots , T_n$]` are the base types + of type $C$, where every occurrence of a type parameter $a_i$ + of $C$ has been replaced by the corresponding parameter type $T_i$. + - The base types of a singleton type `$p$.type` are the base types of + the type of $p$. + - The base types of a compound type + `$T_1$ with $\ldots$ with $T_n$ { $R$ }` + are the _reduced union_ of the base + classes of all $T_i$'s. This means: + Let the multi-set $\mathscr{S}$ be the multi-set-union of the + base types of all $T_i$'s. + If $\mathscr{S}$ contains several type instances of the same class, say + `$S^i$#$C$[$T^i_1 , \ldots , T^i_n$]` $(i \in I)$, then + all those instances + are replaced by one of them which conforms to all + others. It is an error if no such instance exists. It follows that the + reduced union, if it exists, + produces a set of class types, where different types are instances of + different classes. + - The base types of a type selection `$S$#$T$` are + determined as follows. If $T$ is an alias or abstract type, the + previous clauses apply. Otherwise, $T$ must be a (possibly + parameterized) class type, which is defined in some class $B$. Then + the base types of `$S$#$T$` are the base types of $T$ + in $B$ seen from the prefix type $S$. + - The base types of an existential type `$T$ forSome { $Q$ }` are + all types `$S$ forSome { $Q$ }` where $S$ is a base type of $T$. + +1. The notion of a type $T$ _in class $C$ seen from some prefix type $S$_ + makes sense only if the prefix type $S$ + has a type instance of class $C$ as a base type, say + `$S'$#$C$[$T_1 , \ldots , T_n$]`. Then we define as follows. + - If `$S$ = $\epsilon$.type`, then $T$ in $C$ seen from $S$ is + $T$ itself. + - Otherwise, if $S$ is an existential type `$S'$ forSome { $Q$ }`, and + $T$ in $C$ seen from $S'$ is $T'$, + then $T$ in $C$ seen from $S$ is `$T'$ forSome {$\,Q\,$}`. + - Otherwise, if $T$ is the $i$'th type parameter of some class $D$, then + - If $S$ has a base type `$D$[$U_1 , \ldots , U_n$]`, for some type + parameters `[$U_1 , \ldots , U_n$]`, then $T$ in $C$ seen from $S$ + is $U_i$. + - Otherwise, if $C$ is defined in a class $C'$, then + $T$ in $C$ seen from $S$ is the same as $T$ in $C'$ seen from $S'$. + - Otherwise, if $C$ is not defined in another class, then + $T$ in $C$ seen from $S$ is $T$ itself. + - Otherwise, if $T$ is the singleton type `$D$.this.type` for some class $D$ + then + - If $D$ is a subclass of $C$ and $S$ has a type instance of class $D$ + among its base types, then $T$ in $C$ seen from $S$ is $S$. + - Otherwise, if $C$ is defined in a class $C'$, then + $T$ in $C$ seen from $S$ is the same as $T$ in $C'$ seen from $S'$. + - Otherwise, if $C$ is not defined in another class, then + $T$ in $C$ seen from $S$ is $T$ itself. + - If $T$ is some other type, then the described mapping is performed + to all its type components. + + If $T$ is a possibly parameterized class type, where $T$'s class + is defined in some other class $D$, and $S$ is some prefix type, + then we use "$T$ seen from $S$" as a shorthand for + "$T$ in $D$ seen from $S$". + +1. The _member bindings_ of a type $T$ are + 1. all bindings $d$ such that there exists a type instance of some class $C$ among the base types of $T$ + and there exists a definition or declaration $d'$ in $C$ + such that $d$ results from $d'$ by replacing every + type $T'$ in $d'$ by $T'$ in $C$ seen from $T$, and + 2. all bindings of the type's [refinement](#compound-types), if it has one. + + The _definition_ of a type projection `S#T` is the member + binding $d_T$ of the type `T` in `S`. In that case, we also say + that `S#T` _is defined by_ $d_T$. + +## Relations between types + +We define two relations between types. + +|Name | Symbolically |Interpretation | +|-----------------|----------------|-------------------------------------------------| +|Equivalence |$T \equiv U$ |$T$ and $U$ are interchangeable in all contexts. | +|Conformance |$T <: U$ |Type $T$ conforms to type $U$. | + +### Equivalence + +Equivalence $(\equiv)$ between types is the smallest congruence [^congruence] such that +the following holds: + +- If $t$ is defined by a type alias `type $t$ = $T$`, then $t$ is + equivalent to $T$. +- If a path $p$ has a singleton type `$q$.type`, then + `$p$.type $\equiv q$.type`. +- If $O$ is defined by an object definition, and $p$ is a path + consisting only of package or object selectors and ending in $O$, then + `$O$.this.type $\equiv p$.type`. +- Two [compound types](#compound-types) are equivalent if the sequences + of their component are pairwise equivalent, and occur in the same order, and + their refinements are equivalent. Two refinements are equivalent if they + bind the same names and the modifiers, types and bounds of every + declared entity are equivalent in both refinements. +- Two [method types](#method-types) are equivalent if: + - neither are implicit, or they both are [^implicit]; + - they have equivalent result types; + - they have the same number of parameters; and + - corresponding parameters have equivalent types. + Note that the names of parameters do not matter for method type equivalence. +- Two [polymorphic method types](#polymorphic-method-types) are equivalent if + they have the same number of type parameters, and, after renaming one set of + type parameters by another, the result types as well as lower and upper bounds + of corresponding type parameters are equivalent. +- Two [existential types](#existential-types) + are equivalent if they have the same number of + quantifiers, and, after renaming one list of type quantifiers by + another, the quantified types as well as lower and upper bounds of + corresponding quantifiers are equivalent. +- Two [type constructors](#type-constructors) are equivalent if they have the + same number of type parameters, and, after renaming one list of type + parameters by another, the result types as well as variances, lower and upper + bounds of corresponding type parameters are equivalent. + +[^congruence]: A congruence is an equivalence relation which is closed under formation of contexts. +[^implicit]: A method type is implicit if the parameter section that defines it starts with the `implicit` keyword. + +### Conformance + +The conformance relation $(<:)$ is the smallest +transitive relation that satisfies the following conditions. + +- Conformance includes equivalence. If $T \equiv U$ then $T <: U$. +- For every value type $T$, `scala.Nothing <: $T$ <: scala.Any`. +- For every type constructor $T$ (with any number of type parameters), + `scala.Nothing <: $T$ <: scala.Any`. + +- For every class type $T$ such that `$T$ <: scala.AnyRef` and not + `$T$ <: scala.NotNull` one has `scala.Null <: $T$`. +- A type variable or abstract type $t$ conforms to its upper bound and + its lower bound conforms to $t$. +- A class type or parameterized type conforms to any of its base-types. +- A singleton type `$p$.type` conforms to the type of the path $p$. +- A singleton type `$p$.type` conforms to the type `scala.Singleton`. +- A type projection `$T$#$t$` conforms to `$U$#$t$` if $T$ conforms to $U$. +- A parameterized type `$T$[$T_1$ , … , $T_n$]` conforms to + `$T$[$U_1$ , … , $U_n$]` if + the following three conditions hold for $i \in \{ 1 , \ldots , n \}$: + 1. If the $i$'th type parameter of $T$ is declared covariant, then + $T_i <: U_i$. + 1. If the $i$'th type parameter of $T$ is declared contravariant, then + $U_i <: T_i$. + 1. If the $i$'th type parameter of $T$ is declared neither covariant + nor contravariant, then $U_i \equiv T_i$. +- A compound type `$T_1$ with $\ldots$ with $T_n$ {$R\,$}` conforms to + each of its component types $T_i$. +- If $T <: U_i$ for $i \in \{ 1 , \ldots , n \}$ and for every + binding $d$ of a type or value $x$ in $R$ there exists a member + binding of $x$ in $T$ which subsumes $d$, then $T$ conforms to the + compound type `$U_1$ with $\ldots$ with $U_n$ {$R\,$}`. +- The existential type `$T$ forSome {$\,Q\,$}` conforms to + $U$ if its [skolemization](#existential-types) + conforms to $U$. +- The type $T$ conforms to the existential type `$U$ forSome {$\,Q\,$}` + if $T$ conforms to one of the [type instances](#existential-types) + of `$U$ forSome {$\,Q\,$}`. +- If + $T_i \equiv T_i'$ for $i \in \{ 1 , \ldots , n\}$ and $U$ conforms to $U'$ + then the method type $(p_1:T_1 , \ldots , p_n:T_n) U$ conforms to + $(p_1':T_1' , \ldots , p_n':T_n') U'$. +- The polymorphic type + $[a_1 >: L_1 <: U_1 , \ldots , a_n >: L_n <: U_n] T$ conforms to the + polymorphic type + $[a_1 >: L_1' <: U_1' , \ldots , a_n >: L_n' <: U_n'] T'$ if, assuming + $L_1' <: a_1 <: U_1' , \ldots , L_n' <: a_n <: U_n'$ + one has $T <: T'$ and $L_i <: L_i'$ and $U_i' <: U_i$ + for $i \in \{ 1 , \ldots , n \}$. +- Type constructors $T$ and $T'$ follow a similar discipline. We characterize + $T$ and $T'$ by their type parameter clauses + $[a_1 , \ldots , a_n]$ and + $[a_1' , \ldots , a_n']$, where an $a_i$ or $a_i'$ may include a variance + annotation, a higher-order type parameter clause, and bounds. Then, $T$ + conforms to $T'$ if any list $[t_1 , \ldots , t_n]$ -- with declared + variances, bounds and higher-order type parameter clauses -- of valid type + arguments for $T'$ is also a valid list of type arguments for $T$ and + $T[t_1 , \ldots , t_n] <: T'[t_1 , \ldots , t_n]$. Note that this entails + that: + - The bounds on $a_i$ must be weaker than the corresponding bounds declared + for $a'_i$. + - The variance of $a_i$ must match the variance of $a'_i$, where covariance + matches covariance, contravariance matches contravariance and any variance + matches invariance. + - Recursively, these restrictions apply to the corresponding higher-order + type parameter clauses of $a_i$ and $a'_i$. + +A declaration or definition in some compound type of class type $C$ +_subsumes_ another declaration of the same name in some compound type or class +type $C'$, if one of the following holds. + +- A value declaration or definition that defines a name $x$ with type $T$ + subsumes a value or method declaration that defines $x$ with type $T'$, provided + $T <: T'$. +- A method declaration or definition that defines a name $x$ with type $T$ + subsumes a method declaration that defines $x$ with type $T'$, provided + $T <: T'$. +- A type alias + `type $t$[$T_1$ , … , $T_n$] = $T$` subsumes a type alias + `type $t$[$T_1$ , … , $T_n$] = $T'$` if $T \equiv T'$. +- A type declaration `type $t$[$T_1$ , … , $T_n$] >: $L$ <: $U$` subsumes + a type declaration `type $t$[$T_1$ , … , $T_n$] >: $L'$ <: $U'$` if + $L' <: L$ and $U <: U'$. +- A type or class definition that binds a type name $t$ subsumes an abstract + type declaration `type t[$T_1$ , … , $T_n$] >: L <: U` if + $L <: t <: U$. + +The $(<:)$ relation forms pre-order between types, +i.e. it is transitive and reflexive. _least upper bounds_ and +_greatest lower bounds_ of a set of types +are understood to be relative to that order. + +###### Note +The least upper bound or greatest lower bound +of a set of types does not always exist. For instance, consider +the class definitions + +```scala +class A[+T] {} +class B extends A[B] +class C extends A[C] +``` + +Then the types `A[Any], A[A[Any]], A[A[A[Any]]], ...` form +a descending sequence of upper bounds for `B` and `C`. The +least upper bound would be the infinite limit of that sequence, which +does not exist as a Scala type. Since cases like this are in general +impossible to detect, a Scala compiler is free to reject a term +which has a type specified as a least upper or greatest lower bound, +and that bound would be more complex than some compiler-set +limit [^4]. + +The least upper bound or greatest lower bound might also not be +unique. For instance `A with B` and `B with A` are both +greatest lower bounds of `A` and `B`. If there are several +least upper bounds or greatest lower bounds, the Scala compiler is +free to pick any one of them. + +[^4]: The current Scala compiler limits the nesting level + of parameterization in such bounds to be at most two deeper than the + maximum nesting level of the operand types + +### Weak Conformance + +In some situations Scala uses a more general conformance relation. A +type $S$ _weakly conforms_ +to a type $T$, written $S <:_w +T$, if $S <: T$ or both $S$ and $T$ are primitive number types +and $S$ precedes $T$ in the following ordering. + +```scala +Byte $<:_w$ Short +Short $<:_w$ Int +Char $<:_w$ Int +Int $<:_w$ Long +Long $<:_w$ Float +Float $<:_w$ Double +``` + +A _weak least upper bound_ is a least upper bound with respect to +weak conformance. + +## Volatile Types + +Type volatility approximates the possibility that a type parameter or abstract +type instance +of a type does not have any non-null values. A value member of a volatile type +cannot appear in a [path](#paths). + +A type is _volatile_ if it falls into one of four categories: + +A compound type `$T_1$ with … with $T_n$ {$R\,$}` +is volatile if one of the following two conditions hold. + +1. One of $T_2 , \ldots , T_n$ is a type parameter or abstract type, or +1. $T_1$ is an abstract type and and either the refinement $R$ + or a type $T_j$ for $j > 1$ contributes an abstract member + to the compound type, or +1. one of $T_1 , \ldots , T_n$ is a singleton type. + +Here, a type $S$ _contributes an abstract member_ to a type $T$ if +$S$ contains an abstract member that is also a member of $T$. +A refinement $R$ contributes an abstract member to a type $T$ if $R$ +contains an abstract declaration which is also a member of $T$. + +A type designator is volatile if it is an alias of a volatile type, or +if it designates a type parameter or abstract type that has a volatile type as +its upper bound. + +A singleton type `$p$.type` is volatile, if the underlying +type of path $p$ is volatile. + +An existential type `$T$ forSome {$\,Q\,$}` is volatile if +$T$ is volatile. + +## Type Erasure + +A type is called _generic_ if it contains type arguments or type variables. +_Type erasure_ is a mapping from (possibly generic) types to +non-generic types. We write $|T|$ for the erasure of type $T$. +The erasure mapping is defined as follows. + +- The erasure of an alias type is the erasure of its right-hand side. +- The erasure of an abstract type is the erasure of its upper bound. +- The erasure of the parameterized type `scala.Array$[T_1]$` is + `scala.Array$[|T_1|]$`. +- The erasure of every other parameterized type $T[T_1 , \ldots , T_n]$ is $|T|$. +- The erasure of a singleton type `$p$.type` is the + erasure of the type of $p$. +- The erasure of a type projection `$T$#$x$` is `|$T$|#$x$`. +- The erasure of a compound type + `$T_1$ with $\ldots$ with $T_n$ {$R\,$}` is the erasure of the intersection + dominator of $T_1 , \ldots , T_n$. +- The erasure of an existential type `$T$ forSome {$\,Q\,$}` is $|T|$. + +The _intersection dominator_ of a list of types $T_1 , \ldots , T_n$ is computed +as follows. +Let $T_{i_1} , \ldots , T_{i_m}$ be the subsequence of types $T_i$ +which are not supertypes of some other type $T_j$. +If this subsequence contains a type designator $T_c$ that refers to a class +which is not a trait, +the intersection dominator is $T_c$. Otherwise, the intersection +dominator is the first element of the subsequence, $T_{i_1}$. diff --git a/spec/04-basic-declarations-and-definitions.md b/spec/04-basic-declarations-and-definitions.md new file mode 100644 index 000000000000..65d79dd5f455 --- /dev/null +++ b/spec/04-basic-declarations-and-definitions.md @@ -0,0 +1,926 @@ +--- +title: Basic Declarations and Definitions +layout: default +chapter: 4 +--- + +# Basic Declarations and Definitions + +```ebnf +Dcl ::= ‘val’ ValDcl + | ‘var’ VarDcl + | ‘def’ FunDcl + | ‘type’ {nl} TypeDcl +PatVarDef ::= ‘val’ PatDef + | ‘var’ VarDef +Def ::= PatVarDef + | ‘def’ FunDef + | ‘type’ {nl} TypeDef + | TmplDef +``` + +A _declaration_ introduces names and assigns them types. It can +form part of a [class definition](05-classes-and-objects.html#templates) or of a +refinement in a [compound type](03-types.html#compound-types). + +A _definition_ introduces names that denote terms or types. It can +form part of an object or class definition or it can be local to a +block. Both declarations and definitions produce _bindings_ that +associate type names with type definitions or bounds, and that +associate term names with types. + +The scope of a name introduced by a declaration or definition is the +whole statement sequence containing the binding. However, there is a +restriction on forward references in blocks: In a statement sequence +$s_1 \ldots s_n$ making up a block, if a simple name in $s_i$ refers +to an entity defined by $s_j$ where $j \geq i$, then for all $s_k$ +between and including $s_i$ and $s_j$, + +- $s_k$ cannot be a variable definition. +- If $s_k$ is a value definition, it must be lazy. + + + +## Value Declarations and Definitions + +```ebnf +Dcl ::= ‘val’ ValDcl +ValDcl ::= ids ‘:’ Type +PatVarDef ::= ‘val’ PatDef +PatDef ::= Pattern2 {‘,’ Pattern2} [‘:’ Type] ‘=’ Expr +ids ::= id {‘,’ id} +``` + +A value declaration `val $x$: $T$` introduces $x$ as a name of a value of +type $T$. + +A value definition `val $x$: $T$ = $e$` defines $x$ as a +name of the value that results from the evaluation of $e$. +If the value definition is not recursive, the type +$T$ may be omitted, in which case the [packed type](06-expressions.html#expression-typing) of +expression $e$ is assumed. If a type $T$ is given, then $e$ is expected to +conform to it. + +Evaluation of the value definition implies evaluation of its +right-hand side $e$, unless it has the modifier `lazy`. The +effect of the value definition is to bind $x$ to the value of $e$ +converted to type $T$. A `lazy` value definition evaluates +its right hand side $e$ the first time the value is accessed. + +A _constant value definition_ is of the form + +```scala +final val x = e +``` + +where `e` is a [constant expression](06-expressions.html#constant-expressions). +The `final` modifier must be +present and no type annotation may be given. References to the +constant value `x` are themselves treated as constant expressions; in the +generated code they are replaced by the definition's right-hand side `e`. + +Value definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) +as left-hand side. If $p$ is some pattern other +than a simple name or a name followed by a colon and a type, then the +value definition `val $p$ = $e$` is expanded as follows: + +1. If the pattern $p$ has bound variables $x_1 , \ldots , x_n$, where $n > 1$: + +```scala +val $\$ x$ = $e$ match {case $p$ => ($x_1 , \ldots , x_n$)} +val $x_1$ = $\$ x$._1 +$\ldots$ +val $x_n$ = $\$ x$._n . +``` + +Here, $\$ x$ is a fresh name. + +2. If $p$ has a unique bound variable $x$: + +```scala +val $x$ = $e$ match { case $p$ => $x$ } +``` + +3. If $p$ has no bound variables: + +```scala +$e$ match { case $p$ => ()} +``` + +###### Example + +The following are examples of value definitions + +```scala +val pi = 3.1415 +val pi: Double = 3.1415 // equivalent to first definition +val Some(x) = f() // a pattern definition +val x :: xs = mylist // an infix pattern definition +``` + +The last two definitions have the following expansions. + +```scala +val x = f() match { case Some(x) => x } + +val x$\$$ = mylist match { case x :: xs => (x, xs) } +val x = x$\$$._1 +val xs = x$\$$._2 +``` + +The name of any declared or defined value may not end in `_=`. + +A value declaration `val $x_1 , \ldots , x_n$: $T$` is a shorthand for the +sequence of value declarations `val $x_1$: $T$; ...; val $x_n$: $T$`. +A value definition `val $p_1 , \ldots , p_n$ = $e$` is a shorthand for the +sequence of value definitions `val $p_1$ = $e$; ...; val $p_n$ = $e$`. +A value definition `val $p_1 , \ldots , p_n: T$ = $e$` is a shorthand for the +sequence of value definitions `val $p_1: T$ = $e$; ...; val $p_n: T$ = $e$`. + +## Variable Declarations and Definitions + +```ebnf +Dcl ::= ‘var’ VarDcl +PatVarDef ::= ‘var’ VarDef +VarDcl ::= ids ‘:’ Type +VarDef ::= PatDef + | ids ‘:’ Type ‘=’ ‘_’ +``` + +A variable declaration `var $x$: $T$` is equivalent to the declarations +of both a _getter function_ $x$ *and* a _setter function_ `$x$_=`: + +```scala +def $x$: $T$ +def $x$_= ($y$: $T$): Unit +``` + +An implementation of a class may _define_ a declared variable +using a variable definition, or by defining the corresponding setter and getter methods. + +A variable definition `var $x$: $T$ = $e$` introduces a +mutable variable with type $T$ and initial value as given by the +expression $e$. The type $T$ can be omitted, in which case the type of +$e$ is assumed. If $T$ is given, then $e$ is expected to +[conform to it](06-expressions.html#expression-typing). + +Variable definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) +as left-hand side. A variable definition + `var $p$ = $e$` where $p$ is a pattern other +than a simple name or a name followed by a colon and a type is expanded in the same way +as a [value definition](#value-declarations-and-definitions) +`val $p$ = $e$`, except that +the free names in $p$ are introduced as mutable variables, not values. + +The name of any declared or defined variable may not end in `_=`. + +A variable definition `var $x$: $T$ = _` can appear only as a member of a template. +It introduces a mutable field with type $T$ and a default initial value. +The default value depends on the type $T$ as follows: + +| default | type $T$ | +|----------|------------------------------------| +|`0` | `Int` or one of its subrange types | +|`0L` | `Long` | +|`0.0f` | `Float` | +|`0.0d` | `Double` | +|`false` | `Boolean` | +|`()` | `Unit` | +|`null` | all other types | + +When they occur as members of a template, both forms of variable +definition also introduce a getter function $x$ which returns the +value currently assigned to the variable, as well as a setter function +`$x$_=` which changes the value currently assigned to the variable. +The functions have the same signatures as for a variable declaration. +The template then has these getter and setter functions as +members, whereas the original variable cannot be accessed directly as +a template member. + +###### Example + +The following example shows how _properties_ can be +simulated in Scala. It defines a class `TimeOfDayVar` of time +values with updatable integer fields representing hours, minutes, and +seconds. Its implementation contains tests that allow only legal +values to be assigned to these fields. The user code, on the other +hand, accesses these fields just like normal variables. + +```scala +class TimeOfDayVar { + private var h: Int = 0 + private var m: Int = 0 + private var s: Int = 0 + + def hours = h + def hours_= (h: Int) = if (0 <= h && h < 24) this.h = h + else throw new DateError() + + def minutes = m + def minutes_= (m: Int) = if (0 <= m && m < 60) this.m = m + else throw new DateError() + + def seconds = s + def seconds_= (s: Int) = if (0 <= s && s < 60) this.s = s + else throw new DateError() +} +val d = new TimeOfDayVar +d.hours = 8; d.minutes = 30; d.seconds = 0 +d.hours = 25 // throws a DateError exception +``` + +A variable declaration `var $x_1 , \ldots , x_n$: $T$` is a shorthand for the +sequence of variable declarations `var $x_1$: $T$; ...; var $x_n$: $T$`. +A variable definition `var $x_1 , \ldots , x_n$ = $e$` is a shorthand for the +sequence of variable definitions `var $x_1$ = $e$; ...; var $x_n$ = $e$`. +A variable definition `var $x_1 , \ldots , x_n: T$ = $e$` is a shorthand for +the sequence of variable definitions +`var $x_1: T$ = $e$; ...; var $x_n: T$ = $e$`. + +## Type Declarations and Type Aliases + + + +```ebnf +Dcl ::= ‘type’ {nl} TypeDcl +TypeDcl ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type] +Def ::= type {nl} TypeDef +TypeDef ::= id [TypeParamClause] ‘=’ Type +``` + +A _type declaration_ `type $t$[$\mathit{tps}\,$] >: $L$ <: $U$` declares +$t$ to be an abstract type with lower bound type $L$ and upper bound +type $U$. If the type parameter clause `[$\mathit{tps}\,$]` is omitted, $t$ abstracts over a first-order type, otherwise $t$ stands for a type constructor that accepts type arguments as described by the type parameter clause. + +If a type declaration appears as a member declaration of a +type, implementations of the type may implement $t$ with any type $T$ +for which $L <: T <: U$. It is a compile-time error if +$L$ does not conform to $U$. Either or both bounds may be omitted. +If the lower bound $L$ is absent, the bottom type +`scala.Nothing` is assumed. If the upper bound $U$ is absent, +the top type `scala.Any` is assumed. + +A type constructor declaration imposes additional restrictions on the +concrete types for which $t$ may stand. Besides the bounds $L$ and +$U$, the type parameter clause may impose higher-order bounds and +variances, as governed by the [conformance of type constructors](03-types.html#conformance). + +The scope of a type parameter extends over the bounds `>: $L$ <: $U$` and the type parameter clause $\mathit{tps}$ itself. A +higher-order type parameter clause (of an abstract type constructor +$tc$) has the same kind of scope, restricted to the declaration of the +type parameter $tc$. + +To illustrate nested scoping, these declarations are all equivalent: `type t[m[x] <: Bound[x], Bound[x]]`, `type t[m[x] <: Bound[x], Bound[y]]` and `type t[m[x] <: Bound[x], Bound[_]]`, as the scope of, e.g., the type parameter of $m$ is limited to the declaration of $m$. In all of them, $t$ is an abstract type member that abstracts over two type constructors: $m$ stands for a type constructor that takes one type parameter and that must be a subtype of $Bound$, $t$'s second type constructor parameter. `t[MutableList, Iterable]` is a valid use of $t$. + +A _type alias_ `type $t$ = $T$` defines $t$ to be an alias +name for the type $T$. The left hand side of a type alias may +have a type parameter clause, e.g. `type $t$[$\mathit{tps}\,$] = $T$`. The scope +of a type parameter extends over the right hand side $T$ and the +type parameter clause $\mathit{tps}$ itself. + +The scope rules for [definitions](#basic-declarations-and-definitions) +and [type parameters](#function-declarations-and-definitions) +make it possible that a type name appears in its +own bound or in its right-hand side. However, it is a static error if +a type alias refers recursively to the defined type constructor itself. +That is, the type $T$ in a type alias `type $t$[$\mathit{tps}\,$] = $T$` may not +refer directly or indirectly to the name $t$. It is also an error if +an abstract type is directly or indirectly its own upper or lower bound. + +###### Example + +The following are legal type declarations and definitions: + +```scala +type IntList = List[Integer] +type T <: Comparable[T] +type Two[A] = Tuple2[A, A] +type MyCollection[+X] <: Iterable[X] +``` + +The following are illegal: + +```scala +type Abs = Comparable[Abs] // recursive type alias + +type S <: T // S, T are bounded by themselves. +type T <: S + +type T >: Comparable[T.That] // Cannot select from T. + // T is a type, not a value +type MyCollection <: Iterable // Type constructor members must explicitly + // state their type parameters. +``` + +If a type alias `type $t$[$\mathit{tps}\,$] = $S$` refers to a class type +$S$, the name $t$ can also be used as a constructor for +objects of type $S$. + +###### Example + +The `Predef` object contains a definition which establishes `Pair` +as an alias of the parameterized class `Tuple2`: + +```scala +type Pair[+A, +B] = Tuple2[A, B] +object Pair { + def apply[A, B](x: A, y: B) = Tuple2(x, y) + def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) +} +``` + +As a consequence, for any two types $S$ and $T$, the type +`Pair[$S$, $T\,$]` is equivalent to the type `Tuple2[$S$, $T\,$]`. +`Pair` can also be used as a constructor instead of `Tuple2`, as in: + +```scala +val x: Pair[Int, String] = new Pair(1, "abc") +``` + +## Type Parameters + +```ebnf +TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’ +VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeParam +TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] [‘:’ Type] +``` + +Type parameters appear in type definitions, class definitions, and +function definitions. In this section we consider only type parameter +definitions with lower bounds `>: $L$` and upper bounds +`<: $U$` whereas a discussion of context bounds +`: $U$` and view bounds `<% $U$` +is deferred to [here](07-implicit-parameters-and-views.html#context-bounds-and-view-bounds). + +The most general form of a first-order type parameter is +`$@a_1 \ldots @a_n$ $\pm$ $t$ >: $L$ <: $U$`. +Here, $L$, and $U$ are lower and upper bounds that +constrain possible type arguments for the parameter. It is a +compile-time error if $L$ does not conform to $U$. $\pm$ is a _variance_, i.e. an optional prefix of either `+`, or +`-`. One or more annotations may precede the type parameter. + + + + + +The names of all type parameters must be pairwise different in their enclosing type parameter clause. The scope of a type parameter includes in each case the whole type parameter clause. Therefore it is possible that a type parameter appears as part of its own bounds or the bounds of other type parameters in the same clause. However, a type parameter may not be bounded directly or indirectly by itself. + +A type constructor parameter adds a nested type parameter clause to the type parameter. The most general form of a type constructor parameter is `$@a_1\ldots@a_n$ $\pm$ $t[\mathit{tps}\,]$ >: $L$ <: $U$`. + +The above scoping restrictions are generalized to the case of nested type parameter clauses, which declare higher-order type parameters. Higher-order type parameters (the type parameters of a type parameter $t$) are only visible in their immediately surrounding parameter clause (possibly including clauses at a deeper nesting level) and in the bounds of $t$. Therefore, their names must only be pairwise different from the names of other visible parameters. Since the names of higher-order type parameters are thus often irrelevant, they may be denoted with a `‘_’`, which is nowhere visible. + +###### Example +Here are some well-formed type parameter clauses: + +```scala +[S, T] +[@specialized T, U] +[Ex <: Throwable] +[A <: Comparable[B], B <: A] +[A, B >: A, C >: A <: B] +[M[X], N[X]] +[M[_], N[_]] // equivalent to previous clause +[M[X <: Bound[X]], Bound[_]] +[M[+X] <: Iterable[X]] +``` + +The following type parameter clauses are illegal: + +```scala +[A >: A] // illegal, `A' has itself as bound +[A <: B, B <: C, C <: A] // illegal, `A' has itself as bound +[A, B, C >: A <: B] // illegal lower bound `A' of `C' does + // not conform to upper bound `B'. +``` + +## Variance Annotations + +Variance annotations indicate how instances of parameterized types +vary with respect to [subtyping](03-types.html#conformance). A +‘+’ variance indicates a covariant dependency, a +‘-’ variance indicates a contravariant dependency, and a +missing variance indication indicates an invariant dependency. + +A variance annotation constrains the way the annotated type variable +may appear in the type or class which binds the type parameter. In a +type definition `type $T$[$\mathit{tps}\,$] = $S$`, or a type +declaration `type $T$[$\mathit{tps}\,$] >: $L$ <: $U$` type parameters labeled +‘+’ must only appear in covariant position whereas +type parameters labeled ‘-’ must only appear in contravariant +position. Analogously, for a class definition +`class $C$[$\mathit{tps}\,$]($\mathit{ps}\,$) extends $T$ { $x$: $S$ => ...}`, +type parameters labeled +‘+’ must only appear in covariant position in the +self type $S$ and the template $T$, whereas type +parameters labeled ‘-’ must only appear in contravariant +position. + +The variance position of a type parameter in a type or template is +defined as follows. Let the opposite of covariance be contravariance, +and the opposite of invariance be itself. The top-level of the type +or template is always in covariant position. The variance position +changes at the following constructs. + +- The variance position of a method parameter is the opposite of the + variance position of the enclosing parameter clause. +- The variance position of a type parameter is the opposite of the + variance position of the enclosing type parameter clause. +- The variance position of the lower bound of a type declaration or type parameter + is the opposite of the variance position of the type declaration or parameter. +- The type of a mutable variable is always in invariant position. +- The right-hand side of a type alias is always in invariant position. +- The prefix $S$ of a type selection `$S$#$T$` is always in invariant position. +- For a type argument $T$ of a type `$S$[$\ldots T \ldots$ ]`: If the + corresponding type parameter is invariant, then $T$ is in + invariant position. If the corresponding type parameter is + contravariant, the variance position of $T$ is the opposite of + the variance position of the enclosing type `$S$[$\ldots T \ldots$ ]`. + + + +References to the type parameters in +[object-private or object-protected values, types, variables, or methods](05-classes-and-objects.html#modifiers) of the class are not +checked for their variance position. In these members the type parameter may +appear anywhere without restricting its legal variance annotations. + +###### Example +The following variance annotation is legal. + +```scala +abstract class P[+A, +B] { + def fst: A; def snd: B +} +``` + +With this variance annotation, type instances +of $P$ subtype covariantly with respect to their arguments. +For instance, + +```scala +P[IOException, String] <: P[Throwable, AnyRef] +``` + +If the members of $P$ are mutable variables, +the same variance annotation becomes illegal. + +```scala +abstract class Q[+A, +B](x: A, y: B) { + var fst: A = x // **** error: illegal variance: + var snd: B = y // `A', `B' occur in invariant position. +} +``` + +If the mutable variables are object-private, the class definition +becomes legal again: + +```scala +abstract class R[+A, +B](x: A, y: B) { + private[this] var fst: A = x // OK + private[this] var snd: B = y // OK +} +``` + +###### Example + +The following variance annotation is illegal, since $a$ appears +in contravariant position in the parameter of `append`: + +```scala +abstract class Sequence[+A] { + def append(x: Sequence[A]): Sequence[A] + // **** error: illegal variance: + // `A' occurs in contravariant position. +} +``` + +The problem can be avoided by generalizing the type of `append` +by means of a lower bound: + +```scala +abstract class Sequence[+A] { + def append[B >: A](x: Sequence[B]): Sequence[B] +} +``` + +### Example + +```scala +abstract class OutputChannel[-A] { + def write(x: A): Unit +} +``` + +With that annotation, we have that +`OutputChannel[AnyRef]` conforms to `OutputChannel[String]`. +That is, a +channel on which one can write any object can substitute for a channel +on which one can write only strings. + +## Function Declarations and Definitions + +```ebnf +Dcl ::= ‘def’ FunDcl +FunDcl ::= FunSig ‘:’ Type +Def ::= ‘def’ FunDef +FunDef ::= FunSig [‘:’ Type] ‘=’ Expr +FunSig ::= id [FunTypeParamClause] ParamClauses +FunTypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ +ParamClauses ::= {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’] +ParamClause ::= [nl] ‘(’ [Params] ‘)’} +Params ::= Param {‘,’ Param} +Param ::= {Annotation} id [‘:’ ParamType] [‘=’ Expr] +ParamType ::= Type + | ‘=>’ Type + | Type ‘*’ +``` + +A function declaration has the form `def $f\,\mathit{psig}$: $T$`, where +$f$ is the function's name, $\mathit{psig}$ is its parameter +signature and $T$ is its result type. A function definition +`def $f\,\mathit{psig}$: $T$ = $e$` also includes a _function body_ $e$, +i.e. an expression which defines the function's result. A parameter +signature consists of an optional type parameter clause `[$\mathit{tps}\,$]`, +followed by zero or more value parameter clauses +`($\mathit{ps}_1$)$\ldots$($\mathit{ps}_n$)`. Such a declaration or definition +introduces a value with a (possibly polymorphic) method type whose +parameter types and result type are as given. + +The type of the function body is expected to [conform](06-expressions.html#expression-typing) +to the function's declared +result type, if one is given. If the function definition is not +recursive, the result type may be omitted, in which case it is +determined from the packed type of the function body. + +A type parameter clause $\mathit{tps}$ consists of one or more +[type declarations](#type-declarations-and-type-aliases), which introduce type +parameters, possibly with bounds. The scope of a type parameter includes +the whole signature, including any of the type parameter bounds as +well as the function body, if it is present. + +A value parameter clause $\mathit{ps}$ consists of zero or more formal +parameter bindings such as `$x$: $T$` or `$x: T = e$`, which bind value +parameters and associate them with their types. Each value parameter +declaration may optionally define a default argument. The default argument +expression $e$ is type-checked with an expected type $T'$ obtained +by replacing all occurences of the function's type parameters in $T$ by +the undefined type. + +For every parameter $p_{i,j}$ with a default argument a method named +`$f\$$default$\$$n` is generated which computes the default argument +expression. Here, $n$ denotes the parameter's position in the method +declaration. These methods are parametrized by the type parameter clause +`[$\mathit{tps}\,$]` and all value parameter clauses +`($\mathit{ps}_1$)$\ldots$($\mathit{ps}_{i-1}$)` preceding $p_{i,j}$. +The `$f\$$default$\$$n` methods are inaccessible for +user programs. + +The scope of a formal value parameter name $x$ comprises all subsequent +parameter clauses, as well as the method return type and the function body, if +they are given. Both type parameter names and value parameter names must +be pairwise distinct. + +###### Example +In the method + +```scala +def compare[T](a: T = 0)(b: T = a) = (a == b) +``` + +the default expression `0` is type-checked with an undefined expected +type. When applying `compare()`, the default value `0` is inserted +and `T` is instantiated to `Int`. The methods computing the default +arguments have the form: + +```scala +def compare$\$$default$\$$1[T]: Int = 0 +def compare$\$$default$\$$2[T](a: T): T = a +``` + +### By-Name Parameters + +```ebnf +ParamType ::= ‘=>’ Type +``` + +The type of a value parameter may be prefixed by `=>`, e.g. +`$x$: => $T$`. The type of such a parameter is then the +parameterless method type `=> $T$`. This indicates that the +corresponding argument is not evaluated at the point of function +application, but instead is evaluated at each use within the +function. That is, the argument is evaluated using _call-by-name_. + +The by-name modifier is disallowed for parameters of classes that +carry a `val` or `var` prefix, including parameters of case +classes for which a `val` prefix is implicitly generated. The +by-name modifier is also disallowed for +[implicit parameters](07-implicit-parameters-and-views.html#implicit-parameters). + +###### Example +The declaration + +```scala +def whileLoop (cond: => Boolean) (stat: => Unit): Unit +``` + +indicates that both parameters of `whileLoop` are evaluated using +call-by-name. + +### Repeated Parameters + +```ebnf +ParamType ::= Type ‘*’ +``` + +The last value parameter of a parameter section may be suffixed by +`'*'`, e.g. `(..., $x$:$T$*)`. The type of such a +_repeated_ parameter inside the method is then the sequence type +`scala.Seq[$T$]`. Methods with repeated parameters +`$T$*` take a variable number of arguments of type $T$. +That is, if a method $m$ with type +`($p_1:T_1 , \ldots , p_n:T_n, p_s:S$*)$U$` is applied to arguments +$(e_1 , \ldots , e_k)$ where $k \geq n$, then $m$ is taken in that application +to have type $(p_1:T_1 , \ldots , p_n:T_n, p_s:S , \ldots , p_{s'}S)U$, with +$k - n$ occurrences of type +$S$ where any parameter names beyond $p_s$ are fresh. The only exception to +this rule is if the last argument is +marked to be a _sequence argument_ via a `_*` type +annotation. If $m$ above is applied to arguments +`($e_1 , \ldots , e_n, e'$: _*)`, then the type of $m$ in +that application is taken to be +`($p_1:T_1, \ldots , p_n:T_n,p_{s}:$scala.Seq[$S$])`. + +It is not allowed to define any default arguments in a parameter section +with a repeated parameter. + +###### Example +The following method definition computes the sum of the squares of a +variable number of integer arguments. + +```scala +def sum(args: Int*) = { + var result = 0 + for (arg <- args) result += arg + result +} +``` + +The following applications of this method yield `0`, `1`, +`6`, in that order. + +```scala +sum() +sum(1) +sum(1, 2, 3) +``` + +Furthermore, assume the definition: + +```scala +val xs = List(1, 2, 3) +``` + +The following application of method `sum` is ill-formed: + +```scala +sum(xs) // ***** error: expected: Int, found: List[Int] +``` + +By contrast, the following application is well formed and yields again +the result `6`: + +```scala +sum(xs: _*) +``` + +### Procedures + +```ebnf +FunDcl ::= FunSig +FunDef ::= FunSig [nl] ‘{’ Block ‘}’ +``` + +Special syntax exists for procedures, i.e. functions that return the +`Unit` value `()`. +A procedure declaration is a function declaration where the result type +is omitted. The result type is then implicitly completed to the +`Unit` type. E.g., `def $f$($\mathit{ps}$)` is equivalent to +`def $f$($\mathit{ps}$): Unit`. + +A procedure definition is a function definition where the result type +and the equals sign are omitted; its defining expression must be a block. +E.g., `def $f$($\mathit{ps}$) {$\mathit{stats}$}` is equivalent to +`def $f$($\mathit{ps}$): Unit = {$\mathit{stats}$}`. + +###### Example +Here is a declaration and a definition of a procedure named `write`: + +```scala +trait Writer { + def write(str: String) +} +object Terminal extends Writer { + def write(str: String) { System.out.println(str) } +} +``` + +The code above is implicitly completed to the following code: + +```scala +trait Writer { + def write(str: String): Unit +} +object Terminal extends Writer { + def write(str: String): Unit = { System.out.println(str) } +} +``` + +### Method Return Type Inference + +A class member definition $m$ that overrides some other function $m'$ +in a base class of $C$ may leave out the return type, even if it is +recursive. In this case, the return type $R'$ of the overridden +function $m'$, seen as a member of $C$, is taken as the return type of +$m$ for each recursive invocation of $m$. That way, a type $R$ for the +right-hand side of $m$ can be determined, which is then taken as the +return type of $m$. Note that $R$ may be different from $R'$, as long +as $R$ conforms to $R'$. + +###### Example +Assume the following definitions: + +```scala +trait I { + def factorial(x: Int): Int +} +class C extends I { + def factorial(x: Int) = if (x == 0) 1 else x * factorial(x - 1) +} +``` + +Here, it is OK to leave out the result type of `factorial` +in `C`, even though the method is recursive. + + + +## Import Clauses + +```ebnf +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= StableId ‘.’ (id | ‘_’ | ImportSelectors) +ImportSelectors ::= ‘{’ {ImportSelector ‘,’} + (ImportSelector | ‘_’) ‘}’ +ImportSelector ::= id [‘=>’ id | ‘=>’ ‘_’] +``` + +An import clause has the form `import $p$.$I$` where $p$ is a +[stable identifier](03-types.html#paths) and $I$ is an import expression. +The import expression determines a set of names of importable members of $p$ +which are made available without qualification. A member $m$ of $p$ is +_importable_ if it is not [object-private](05-classes-and-objects.html#modifiers). +The most general form of an import expression is a list of _import selectors_ + +```scala +{ $x_1$ => $y_1 , \ldots , x_n$ => $y_n$, _ } +``` + +for $n \geq 0$, where the final wildcard `‘_’` may be absent. It +makes available each importable member `$p$.$x_i$` under the unqualified name +$y_i$. I.e. every import selector `$x_i$ => $y_i$` renames +`$p$.$x_i$` to +$y_i$. If a final wildcard is present, all importable members $z$ of +$p$ other than `$x_1 , \ldots , x_n,y_1 , \ldots , y_n$` are also made available +under their own unqualified names. + +Import selectors work in the same way for type and term members. For +instance, an import clause `import $p$.{$x$ => $y\,$}` renames the term +name `$p$.$x$` to the term name $y$ and the type name `$p$.$x$` +to the type name $y$. At least one of these two names must +reference an importable member of $p$. + +If the target in an import selector is a wildcard, the import selector +hides access to the source member. For instance, the import selector +`$x$ => _` “renames” $x$ to the wildcard symbol (which is +unaccessible as a name in user programs), and thereby effectively +prevents unqualified access to $x$. This is useful if there is a +final wildcard in the same import selector list, which imports all +members not mentioned in previous import selectors. + +The scope of a binding introduced by an import-clause starts +immediately after the import clause and extends to the end of the +enclosing block, template, package clause, or compilation unit, +whichever comes first. + +Several shorthands exist. An import selector may be just a simple name +$x$. In this case, $x$ is imported without renaming, so the +import selector is equivalent to `$x$ => $x$`. Furthermore, it is +possible to replace the whole import selector list by a single +identifier or wildcard. The import clause `import $p$.$x$` is +equivalent to `import $p$.{$x\,$}`, i.e. it makes available without +qualification the member $x$ of $p$. The import clause +`import $p$._` is equivalent to +`import $p$.{_}`, +i.e. it makes available without qualification all members of $p$ +(this is analogous to `import $p$.*` in Java). + +An import clause with multiple import expressions +`import $p_1$.$I_1 , \ldots , p_n$.$I_n$` is interpreted as a +sequence of import clauses +`import $p_1$.$I_1$; $\ldots$; import $p_n$.$I_n$`. + +###### Example +Consider the object definition: + +```scala +object M { + def z = 0, one = 1 + def add(x: Int, y: Int): Int = x + y +} +``` + +Then the block + +```scala +{ import M.{one, z => zero, _}; add(zero, one) } +``` + +is equivalent to the block + +```scala +{ M.add(M.z, M.one) } +``` diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md new file mode 100644 index 000000000000..fd20d6ae2c6f --- /dev/null +++ b/spec/05-classes-and-objects.md @@ -0,0 +1,1153 @@ +--- +title: Classes and Objects +layout: default +chapter: 5 +--- + +# Classes and Objects + +```ebnf +TmplDef ::= [`case'] `class' ClassDef + | [`case'] `object' ObjectDef + | `trait' TraitDef +``` + +[Classes](#class-definitions) and [objects](#object-definitions) +are both defined in terms of _templates_. + +## Templates + +```ebnf +ClassTemplate ::= [EarlyDefs] ClassParents [TemplateBody] +TraitTemplate ::= [EarlyDefs] TraitParents [TemplateBody] +ClassParents ::= Constr {`with' AnnotType} +TraitParents ::= AnnotType {`with' AnnotType} +TemplateBody ::= [nl] `{' [SelfType] TemplateStat {semi TemplateStat} `}' +SelfType ::= id [`:' Type] `=>' + | this `:' Type `=>' +``` + +A template defines the type signature, behavior and initial state of a +trait or class of objects or of a single object. Templates form part of +instance creation expressions, class definitions, and object +definitions. A template +`$sc$ with $mt_1$ with $\ldots$ with $mt_n$ { $\mathit{stats}$ }` +consists of a constructor invocation $sc$ +which defines the template's _superclass_, trait references +`$mt_1 , \ldots , mt_n$` $(n \geq 0)$, which define the +template's _traits_, and a statement sequence $\mathit{stats}$ which +contains initialization code and additional member definitions for the +template. + +Each trait reference $mt_i$ must denote a [trait](#traits). +By contrast, the superclass constructor $sc$ normally refers to a +class which is not a trait. It is possible to write a list of +parents that starts with a trait reference, e.g. +`$mt_1$ with $\ldots$ with $mt_n$`. In that case the list +of parents is implicitly extended to include the supertype of $mt_1$ +as first parent type. The new supertype must have at least one +constructor that does not take parameters. In the following, we will +always assume that this implicit extension has been performed, so that +the first parent class of a template is a regular superclass +constructor, not a trait reference. + +The list of parents of a template must be well-formed. This means that +the class denoted by the superclass constructor $sc$ must be a +subclass of the superclasses of all the traits $mt_1 , \ldots , mt_n$. +In other words, the non-trait classes inherited by a template form a +chain in the inheritance hierarchy which starts with the template's +superclass. + +The _least proper supertype_ of a template is the class type or +[compound type](03-types.html#compound-types) consisting of all its parent +class types. + +The statement sequence $\mathit{stats}$ contains member definitions that +define new members or overwrite members in the parent classes. If the +template forms part of an abstract class or trait definition, the +statement part $\mathit{stats}$ may also contain declarations of abstract +members. If the template forms part of a concrete class definition, +$\mathit{stats}$ may still contain declarations of abstract type members, but +not of abstract term members. Furthermore, $\mathit{stats}$ may in any case +also contain expressions; these are executed in the order they are +given as part of the initialization of a template. + +The sequence of template statements may be prefixed with a formal +parameter definition and an arrow, e.g. `$x$ =>`, or +`$x$:$T$ =>`. If a formal parameter is given, it can be +used as an alias for the reference `this` throughout the +body of the template. +If the formal parameter comes with a type $T$, this definition affects +the _self type_ $S$ of the underlying class or object as follows: Let $C$ be the type +of the class or trait or object defining the template. +If a type $T$ is given for the formal self parameter, $S$ +is the greatest lower bound of $T$ and $C$. +If no type $T$ is given, $S$ is just $C$. +Inside the template, the type of `this` is assumed to be $S$. + +The self type of a class or object must conform to the self types of +all classes which are inherited by the template $t$. + +A second form of self type annotation reads just +`this: $S$ =>`. It prescribes the type $S$ for `this` +without introducing an alias name for it. + +###### Example +Consider the following class definitions: + +```scala +class Base extends Object {} +trait Mixin extends Base {} +object O extends Mixin {} +``` + +In this case, the definition of `O` is expanded to: + +```scala +object O extends Base with Mixin {} +``` + + + +**Inheriting from Java Types** A template may have a Java class as its superclass and Java interfaces as its +mixins. + +**Template Evaluation** Consider a template `$sc$ with $mt_1$ with $mt_n$ { $\mathit{stats}$ }`. + +If this is the template of a [trait](#traits) then its _mixin-evaluation_ +consists of an evaluation of the statement sequence $\mathit{stats}$. + +If this is not a template of a trait, then its _evaluation_ +consists of the following steps. + +- First, the superclass constructor $sc$ is + [evaluated](#constructor-invocations). +- Then, all base classes in the template's [linearization](#class-linearization) + up to the template's superclass denoted by $sc$ are + mixin-evaluated. Mixin-evaluation happens in reverse order of + occurrence in the linearization. +- Finally the statement sequence $\mathit{stats}\,$ is evaluated. + +###### Delayed Initialization +The initialization code of an object or class (but not a trait) that follows +the superclass +constructor invocation and the mixin-evaluation of the template's base +classes is passed to a special hook, which is inaccessible from user +code. Normally, that hook simply executes the code that is passed to +it. But templates inheriting the `scala.DelayedInit` trait +can override the hook by re-implementing the `delayedInit` +method, which is defined as follows: + +```scala +def delayedInit(body: => Unit) +``` + +### Constructor Invocations + +```ebnf +Constr ::= AnnotType {`(' [Exprs] `)'} +``` + +Constructor invocations define the type, members, and initial state of +objects created by an instance creation expression, or of parts of an +object's definition which are inherited by a class or object +definition. A constructor invocation is a function application +`$x$.$c$[$\mathit{targs}$]($\mathit{args}_1$)$\ldots$($\mathit{args}_n$)`, where $x$ is a +[stable identifier](03-types.html#paths), $c$ is a type name which either designates a +class or defines an alias type for one, $\mathit{targs}$ is a type argument +list, $\mathit{args}_1 , \ldots , \mathit{args}_n$ are argument lists, and there is a +constructor of that class which is [applicable](06-expressions.html#function-applications) +to the given arguments. If the constructor invocation uses named or +default arguments, it is transformed into a block expression using the +same transformation as described [here](sec:named-default). + +The prefix `$x$.` can be omitted. A type argument list +can be given only if the class $c$ takes type parameters. Even then +it can be omitted, in which case a type argument list is synthesized +using [local type inference](06-expressions.html#local-type-inference). If no explicit +arguments are given, an empty list `()` is implicitly supplied. + +An evaluation of a constructor invocation +`$x$.$c$[$\mathit{targs}$]($\mathit{args}_1$)$\ldots$($\mathit{args}_n$)` +consists of the following steps: + +- First, the prefix $x$ is evaluated. +- Then, the arguments $\mathit{args}_1 , \ldots , \mathit{args}_n$ are evaluated from + left to right. +- Finally, the class being constructed is initialized by evaluating the + template of the class referred to by $c$. + +### Class Linearization + +The classes reachable through transitive closure of the direct +inheritance relation from a class $C$ are called the _base classes_ of $C$. Because of mixins, the inheritance relationship +on base classes forms in general a directed acyclic graph. A +linearization of this graph is defined as follows. + +###### Definition: linearization +Let $C$ be a class with template +`$C_1$ with ... with $C_n$ { $\mathit{stats}$ }`. +The _linearization_ of $C$, $\mathcal{L}(C)$ is defined as follows: + +$$\mathcal{L}(C) = C, \mathcal{L}(C_n) \; \vec{+} \; \ldots \; \vec{+} \; \mathcal{L}(C_1)$$ + +Here $\vec{+}$ denotes concatenation where elements of the right operand +replace identical elements of the left operand: + +$$ +\begin{array}{lcll} +\{a, A\} \;\vec{+}\; B &=& a, (A \;\vec{+}\; B) &{\bf if} \; a \not\in B \\\\ + &=& A \;\vec{+}\; B &{\bf if} \; a \in B +\end{array} +$$ + +###### Example +Consider the following class definitions. + +```scala +abstract class AbsIterator extends AnyRef { ... } +trait RichIterator extends AbsIterator { ... } +class StringIterator extends AbsIterator { ... } +class Iter extends StringIterator with RichIterator { ... } +``` + +Then the linearization of class `Iter` is + +```scala +{ Iter, RichIterator, StringIterator, AbsIterator, AnyRef, Any } +``` + +Note that the linearization of a class refines the inheritance +relation: if $C$ is a subclass of $D$, then $C$ precedes $D$ in any +linearization where both $C$ and $D$ occur. +[Linearization](#definition:-linearization) also satisfies the property that +a linearization of a class always contains the linearization of its direct superclass as a suffix. + +For instance, the linearization of `StringIterator` is + +```scala +{ StringIterator, AbsIterator, AnyRef, Any } +``` + +which is a suffix of the linearization of its subclass `Iter`. +The same is not true for the linearization of mixins. +For instance, the linearization of `RichIterator` is + +```scala +{ RichIterator, AbsIterator, AnyRef, Any } +``` + +which is not a suffix of the linearization of `Iter`. + +### Class Members + +A class $C$ defined by a template `$C_1$ with $\ldots$ with $C_n$ { $\mathit{stats}$ }` +can define members in its statement sequence +$\mathit{stats}$ and can inherit members from all parent classes. Scala +adopts Java and C\#'s conventions for static overloading of +methods. It is thus possible that a class defines and/or inherits +several methods with the same name. To decide whether a defined +member of a class $C$ overrides a member of a parent class, or whether +the two co-exist as overloaded variants in $C$, Scala uses the +following definition of _matching_ on members: + +###### Definition: matching +A member definition $M$ _matches_ a member definition $M'$, if $M$ +and $M'$ bind the same name, and one of following holds. + +1. Neither $M$ nor $M'$ is a method definition. +2. $M$ and $M'$ define both monomorphic methods with equivalent argument types. +3. $M$ defines a parameterless method and $M'$ defines a method + with an empty parameter list `()` or _vice versa_. +4. $M$ and $M'$ define both polymorphic methods with + equal number of argument types $\overline T$, $\overline T'$ + and equal numbers of type parameters + $\overline t$, $\overline t'$, say, and $\overline T' = [\overline t'/\overline t]\overline T$. + + + +Member definitions fall into two categories: concrete and abstract. +Members of class $C$ are either _directly defined_ (i.e. they appear in +$C$'s statement sequence $\mathit{stats}$) or they are _inherited_. There are two rules +that determine the set of members of a class, one for each category: + +A _concrete member_ of a class $C$ is any concrete definition $M$ in +some class $C_i \in \mathcal{L}(C)$, except if there is a preceding class +$C_j \in \mathcal{L}(C)$ where $j < i$ which directly defines a concrete +member $M'$ matching $M$. + +An _abstract member_ of a class $C$ is any abstract definition $M$ +in some class $C_i \in \mathcal{L}(C)$, except if $C$ contains already a +concrete member $M'$ matching $M$, or if there is a preceding class +$C_j \in \mathcal{L}(C)$ where $j < i$ which directly defines an abstract +member $M'$ matching $M$. + +This definition also determines the [overriding](#overriding) relationships +between matching members of a class $C$ and its parents. +First, a concrete definition always overrides an abstract definition. +Second, for definitions $M$ and $M$' which are both concrete or both abstract, +$M$ overrides $M'$ if $M$ appears in a class that precedes (in the +linearization of $C$) the class in which $M'$ is defined. + +It is an error if a template directly defines two matching members. It +is also an error if a template contains two members (directly defined +or inherited) with the same name and the same [erased type](03-types.html#type-erasure). +Finally, a template is not allowed to contain two methods (directly +defined or inherited) with the same name which both define default arguments. + +###### Example +Consider the trait definitions: + +```scala +trait A { def f: Int } +trait B extends A { def f: Int = 1 ; def g: Int = 2 ; def h: Int = 3 } +trait C extends A { override def f: Int = 4 ; def g: Int } +trait D extends B with C { def h: Int } +``` + +Then trait `D` has a directly defined abstract member `h`. It +inherits member `f` from trait `C` and member `g` from +trait `B`. + +### Overriding + + + +A member $M$ of class $C$ that [matches](#class-members) +a non-private member $M'$ of a +base class of $C$ is said to _override_ that member. In this case +the binding of the overriding member $M$ must [subsume](03-types.html#conformance) +the binding of the overridden member $M'$. +Furthermore, the following restrictions on modifiers apply to $M$ and +$M'$: + +- $M'$ must not be labeled `final`. +- $M$ must not be [`private`](#modifiers). +- If $M$ is labeled `private[$C$]` for some enclosing class or package $C$, + then $M'$ must be labeled `private[$C'$]` for some class or package $C'$ where + $C'$ equals $C$ or $C'$ is contained in $C$. + + +- If $M$ is labeled `protected`, then $M'$ must also be + labeled `protected`. +- If $M'$ is not an abstract member, then $M$ must be labeled `override`. + Furthermore, one of two possibilities must hold: + - either $M$ is defined in a subclass of the class where is $M'$ is defined, + - or both $M$ and $M'$ override a third member $M''$ which is defined + in a base class of both the classes containing $M$ and $M'$ +- If $M'$ is [incomplete](#modifiers) in $C$ then $M$ must be + labeled `abstract override`. +- If $M$ and $M'$ are both concrete value definitions, then either none + of them is marked `lazy` or both must be marked `lazy`. + +A stable member can only be overridden by a stable member. +For example, this is not allowed: + +```scala +class X { val stable = 1} +class Y extends X { override var stable = 1 } // error +``` + +Another restriction applies to abstract type members: An abstract type +member with a [volatile type](03-types.html#volatile-types) as its upper +bound may not override an abstract type member which does not have a +volatile upper bound. + +A special rule concerns parameterless methods. If a parameterless +method defined as `def $f$: $T$ = ...` or `def $f$ = ...` overrides a method of +type $()T'$ which has an empty parameter list, then $f$ is also +assumed to have an empty parameter list. + +An overriding method inherits all default arguments from the definition +in the superclass. By specifying default arguments in the overriding method +it is possible to add new defaults (if the corresponding parameter in the +superclass does not have a default) or to override the defaults of the +superclass (otherwise). + +### Example + +Consider the definitions: + +```scala +trait Root { type T <: Root } +trait A extends Root { type T <: A } +trait B extends Root { type T <: B } +trait C extends A with B +``` + +Then the class definition `C` is not well-formed because the +binding of `T` in `C` is +`type T <: B`, +which fails to subsume the binding `type T <: A` of `T` +in type `A`. The problem can be solved by adding an overriding +definition of type `T` in class `C`: + +```scala +class C extends A with B { type T <: C } +``` + +### Inheritance Closure + +Let $C$ be a class type. The _inheritance closure_ of $C$ is the +smallest set $\mathscr{S}$ of types such that + +- If $T$ is in $\mathscr{S}$, then every type $T'$ which forms syntactically + a part of $T$ is also in $\mathscr{S}$. +- If $T$ is a class type in $\mathscr{S}$, then all [parents](#templates) + of $T$ are also in $\mathscr{S}$. + +It is a static error if the inheritance closure of a class type +consists of an infinite number of types. (This restriction is +necessary to make subtyping decidable[^kennedy]). + +[^kennedy]: Kennedy, Pierce. [On Decidability of Nominal Subtyping with Variance.]( http://research.microsoft.com/pubs/64041/fool2007.pdf) in FOOL 2007 + +### Early Definitions + +```ebnf +EarlyDefs ::= `{' [EarlyDef {semi EarlyDef}] `}' `with' +EarlyDef ::= {Annotation} {Modifier} PatVarDef +``` + +A template may start with an _early field definition_ clause, +which serves to define certain field values before the supertype +constructor is called. In a template + +```scala +{ val $p_1$: $T_1$ = $e_1$ + ... + val $p_n$: $T_n$ = $e_n$ +} with $sc$ with $mt_1$ with $mt_n$ { $\mathit{stats}$ } +``` + +The initial pattern definitions of $p_1 , \ldots , p_n$ are called +_early definitions_. They define fields +which form part of the template. Every early definition must define +at least one variable. + +An early definition is type-checked and evaluated in the scope which +is in effect just before the template being defined, augmented by any +type parameters of the enclosing class and by any early definitions +preceding the one being defined. In particular, any reference to +`this` in the right-hand side of an early definition refers +to the identity of `this` just outside the template. Consequently, it +is impossible that an early definition refers to the object being +constructed by the template, or refers to one of its fields and +methods, except for any other preceding early definition in the same +section. Furthermore, references to preceding early definitions +always refer to the value that's defined there, and do not take into account +overriding definitions. In other words, a block of early definitions +is evaluated exactly as if it was a local bock containing a number of value +definitions. + +Early definitions are evaluated in the order they are being defined +before the superclass constructor of the template is called. + +###### Example +Early definitions are particularly useful for +traits, which do not have normal constructor parameters. Example: + +```scala +trait Greeting { + val name: String + val msg = "How are you, "+name +} +class C extends { + val name = "Bob" +} with Greeting { + println(msg) +} +``` + +In the code above, the field `name` is initialized before the +constructor of `Greeting` is called. Therefore, field `msg` in +class `Greeting` is properly initialized to `"How are you, Bob"`. + +If `name` had been initialized instead in `C`'s normal class +body, it would be initialized after the constructor of +`Greeting`. In that case, `msg` would be initialized to +`"How are you, "`. + +## Modifiers + +```ebnf +Modifier ::= LocalModifier + | AccessModifier + | `override' +LocalModifier ::= `abstract' + | `final' + | `sealed' + | `implicit' + | `lazy' +AccessModifier ::= (`private' | `protected') [AccessQualifier] +AccessQualifier ::= `[' (id | `this') `]' +``` + +Member definitions may be preceded by modifiers which affect the +accessibility and usage of the identifiers bound by them. If several +modifiers are given, their order does not matter, but the same +modifier may not occur more than once. Modifiers preceding a repeated +definition apply to all constituent definitions. The rules governing +the validity and meaning of a modifier are as follows. + +### `private` +The `private` modifier can be used with any definition or +declaration in a template. Such members can be accessed only from +within the directly enclosing template and its companion module or +[companion class](#object-definitions). They +are not inherited by subclasses and they may not override definitions +in parent classes. + +The modifier can be _qualified_ with an identifier $C$ (e.g. +`private[$C$]`) that must denote a class or package +enclosing the definition. Members labeled with such a modifier are +accessible respectively only from code inside the package $C$ or only +from code inside the class $C$ and its +[companion module](#object-definitions). + +An different form of qualification is `private[this]`. A member +$M$ marked with this modifier is called _object-protected_; it can be accessed only from within +the object in which it is defined. That is, a selection $p.M$ is only +legal if the prefix is `this` or `$O$.this`, for some +class $O$ enclosing the reference. In addition, the restrictions for +unqualified `private` apply. + +Members marked private without a qualifier are called _class-private_, +whereas members labeled with `private[this]` +are called _object-private_. A member _is private_ if it is +either class-private or object-private, but not if it is marked +`private[$C$]` where $C$ is an identifier; in the latter +case the member is called _qualified private_. + +Class-private or object-private members may not be abstract, and may +not have `protected` or `override` modifiers. + +#### `protected` +The `protected` modifier applies to class member definitions. +Protected members of a class can be accessed from within + - the template of the defining class, + - all templates that have the defining class as a base class, + - the companion module of any of those classes. + +A `protected` modifier can be qualified with an +identifier $C$ (e.g. `protected[$C$]`) that must denote a +class or package enclosing the definition. Members labeled with such +a modifier are also accessible respectively from all code inside the +package $C$ or from all code inside the class $C$ and its +[companion module](#object-definitions). + +A protected identifier $x$ may be used as a member name in a selection +`$r$.$x$` only if one of the following applies: + - The access is within the template defining the member, or, if + a qualification $C$ is given, inside the package $C$, + or the class $C$, or its companion module, or + - $r$ is one of the reserved words `this` and + `super`, or + - $r$'s type conforms to a type-instance of the + class which contains the access. + +A different form of qualification is `protected[this]`. A member +$M$ marked with this modifier is called _object-protected_; it can be accessed only from within +the object in which it is defined. That is, a selection $p.M$ is only +legal if the prefix is `this` or `$O$.this`, for some +class $O$ enclosing the reference. In addition, the restrictions for +unqualified `protected` apply. + +#### `override` +The `override` modifier applies to class member definitions or declarations. +It is mandatory for member definitions or declarations that override some +other concrete member definition in a parent class. If an `override` +modifier is given, there must be at least one overridden member +definition or declaration (either concrete or abstract). + +#### `abstract override` +The `override` modifier has an additional significance when +combined with the `abstract` modifier. That modifier combination +is only allowed for value members of traits. + +We call a member $M$ of a template _incomplete_ if it is either +abstract (i.e. defined by a declaration), or it is labeled +`abstract` and `override` and +every member overridden by $M$ is again incomplete. + +Note that the `abstract override` modifier combination does not +influence the concept whether a member is concrete or abstract. A +member is _abstract_ if only a declaration is given for it; +it is _concrete_ if a full definition is given. + +#### `abstract` +The `abstract` modifier is used in class definitions. It is +redundant for traits, and mandatory for all other classes which have +incomplete members. Abstract classes cannot be +[instantiated](06-expressions.html#instance-creation-expressions) with a constructor invocation +unless followed by mixins and/or a refinement which override all +incomplete members of the class. Only abstract classes and traits can have +abstract term members. + +The `abstract` modifier can also be used in conjunction with +`override` for class member definitions. In that case the +previous discussion applies. + +#### `final` +The `final` modifier applies to class member definitions and to +class definitions. A `final` class member definition may not be +overridden in subclasses. A `final` class may not be inherited by +a template. `final` is redundant for object definitions. Members +of final classes or objects are implicitly also final, so the +`final` modifier is generally redundant for them, too. Note, however, that +[constant value definitions](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) do require +an explicit `final` modifier, even if they are defined in a final class or +object. `final` may not be applied to incomplete members, and it may not be +combined in one modifier list with `sealed`. + +#### `sealed` +The `sealed` modifier applies to class definitions. A +`sealed` class may not be directly inherited, except if the inheriting +template is defined in the same source file as the inherited class. +However, subclasses of a sealed class can be inherited anywhere. + +#### `lazy` +The `lazy` modifier applies to value definitions. A `lazy` +value is initialized the first time it is accessed (which might never +happen at all). Attempting to access a lazy value during its +initialization might lead to looping behavior. If an exception is +thrown during initialization, the value is considered uninitialized, +and a later access will retry to evaluate its right hand side. + +###### Example +The following code illustrates the use of qualified private: + +```scala +package outerpkg.innerpkg +class Outer { + class Inner { + private[Outer] def f() + private[innerpkg] def g() + private[outerpkg] def h() + } +} +``` + +Here, accesses to the method `f` can appear anywhere within +`OuterClass`, but not outside it. Accesses to method +`g` can appear anywhere within the package +`outerpkg.innerpkg`, as would be the case for +package-private methods in Java. Finally, accesses to method +`h` can appear anywhere within package `outerpkg`, +including packages contained in it. + +###### Example +A useful idiom to prevent clients of a class from +constructing new instances of that class is to declare the class +`abstract` and `sealed`: + +```scala +object m { + abstract sealed class C (x: Int) { + def nextC = new C(x + 1) {} + } + val empty = new C(0) {} +} +``` + +For instance, in the code above clients can create instances of class +`m.C` only by calling the `nextC` method of an existing `m.C` +object; it is not possible for clients to create objects of class +`m.C` directly. Indeed the following two lines are both in error: + +```scala +new m.C(0) // **** error: C is abstract, so it cannot be instantiated. +new m.C(0) {} // **** error: illegal inheritance from sealed class. +``` + +A similar access restriction can be achieved by marking the primary +constructor `private` ([example](#example-private-constructor)). + +## Class Definitions + +```ebnf +TmplDef ::= `class' ClassDef +ClassDef ::= id [TypeParamClause] {Annotation} + [AccessModifier] ClassParamClauses ClassTemplateOpt +ClassParamClauses ::= {ClassParamClause} + [[nl] `(' implicit ClassParams `)'] +ClassParamClause ::= [nl] `(' [ClassParams] ')' +ClassParams ::= ClassParam {`,' ClassParam} +ClassParam ::= {Annotation} {Modifier} [(`val' | `var')] + id [`:' ParamType] [`=' Expr] +ClassTemplateOpt ::= `extends' ClassTemplate | [[`extends'] TemplateBody] +``` + +The most general form of class definition is + +```scala +class $c$[$\mathit{tps}\,$] $as$ $m$($\mathit{ps}_1$)$\ldots$($\mathit{ps}_n$) extends $t$ $\quad(n \geq 0)$. +``` + +Here, + + - $c$ is the name of the class to be defined. + - $\mathit{tps}$ is a non-empty list of type parameters of the class + being defined. The scope of a type parameter is the whole class + definition including the type parameter section itself. It is + illegal to define two type parameters with the same name. The type + parameter section `[$\mathit{tps}\,$]` may be omitted. A class with a type + parameter section is called _polymorphic_, otherwise it is called + _monomorphic_. + - $as$ is a possibly empty sequence of + [annotations](11-user-defined-annotations.html#user-defined-annotations). + If any annotations are given, they apply to the primary constructor of the + class. + - $m$ is an [access modifier](#modifiers) such as + `private` or `protected`, possibly with a qualification. + If such an access modifier is given it applies to the primary constructor of the class. + - $(\mathit{ps}\_1)\ldots(\mathit{ps}\_n)$ are formal value parameter clauses for + the _primary constructor_ of the class. The scope of a formal value parameter includes + all subsequent parameter sections and the template $t$. However, a formal + value parameter may not form part of the types of any of the parent classes or members of the class template $t$. + It is illegal to define two formal value parameters with the same name. + + If no formal parameter sections are given, an empty parameter section `()` is assumed. + + If a formal parameter declaration $x: T$ is preceded by a `val` + or `var` keyword, an accessor (getter) [definition](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) + for this parameter is implicitly added to the class. + + The getter introduces a value member $x$ of class $c$ that is defined as an alias of the parameter. + If the introducing keyword is `var`, a setter accessor [`$x$_=`](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) is also implicitly added to the class. + In invocation of that setter `$x$_=($e$)` changes the value of the parameter to the result of evaluating $e$. + + The formal parameter declaration may contain modifiers, which then carry over to the accessor definition(s). + When access modifiers are given for a parameter, but no `val` or `var` keyword, `val` is assumed. + A formal parameter prefixed by `val` or `var` may not at the same time be a [call-by-name parameter](04-basic-declarations-and-definitions.html#by-name-parameters). + + - $t$ is a [template](#templates) of the form + + ``` + $sc$ with $mt_1$ with $\ldots$ with $mt_m$ { $\mathit{stats}$ } // $m \geq 0$ + ``` + + which defines the base classes, behavior and initial state of objects of + the class. The extends clause + `extends $sc$ with $mt_1$ with $\ldots$ with $mt_m$` + can be omitted, in which case + `extends scala.AnyRef` is assumed. The class body + `{ $\mathit{stats}$ }` may also be omitted, in which case the empty body + `{}` is assumed. + +This class definition defines a type `$c$[$\mathit{tps}\,$]` and a constructor +which when applied to parameters conforming to types $\mathit{ps}$ +initializes instances of type `$c$[$\mathit{tps}\,$]` by evaluating the template +$t$. + +### Example +The following example illustrates `val` and `var` parameters of a class `C`: + +```scala +class C(x: Int, val y: String, var z: List[String]) +val c = new C(1, "abc", List()) +c.z = c.y :: c.z +``` + +### Example Private Constructor +The following class can be created only from its companion module. + +```scala +object Sensitive { + def makeSensitive(credentials: Certificate): Sensitive = + if (credentials == Admin) new Sensitive() + else throw new SecurityViolationException +} +class Sensitive private () { + ... +} +``` + +### Constructor Definitions + +```ebnf +FunDef ::= `this' ParamClause ParamClauses + (`=' ConstrExpr | [nl] ConstrBlock) +ConstrExpr ::= SelfInvocation + | ConstrBlock +ConstrBlock ::= `{' SelfInvocation {semi BlockStat} `}' +SelfInvocation ::= `this' ArgumentExprs {ArgumentExprs} +``` + +A class may have additional constructors besides the primary +constructor. These are defined by constructor definitions of the form +`def this($\mathit{ps}_1$)$\ldots$($\mathit{ps}_n$) = $e$`. Such a +definition introduces an additional constructor for the enclosing +class, with parameters as given in the formal parameter lists $\mathit{ps}_1 +, \ldots , \mathit{ps}_n$, and whose evaluation is defined by the constructor +expression $e$. The scope of each formal parameter is the subsequent +parameter sections and the constructor +expression $e$. A constructor expression is either a self constructor +invocation `this($\mathit{args}_1$)$\ldots$($\mathit{args}_n$)` or a block +which begins with a self constructor invocation. The self constructor +invocation must construct a generic instance of the class. I.e. if the +class in question has name $C$ and type parameters +`[$\mathit{tps}\,$]`, then a self constructor invocation must +generate an instance of `$C$[$\mathit{tps}\,$]`; it is not permitted +to instantiate formal type parameters. + +The signature and the self constructor invocation of a constructor +definition are type-checked and evaluated in the scope which is in +effect at the point of the enclosing class definition, augmented by +any type parameters of the enclosing class and by any +[early definitions](#early-definitions) of the enclosing template. +The rest of the +constructor expression is type-checked and evaluated as a function +body in the current class. + +If there are auxiliary constructors of a class $C$, they form together +with $C$'s primary [constructor](#class-definitions) +an overloaded constructor +definition. The usual rules for +[overloading resolution](06-expressions.html#overloading-resolution) +apply for constructor invocations of $C$, +including for the self constructor invocations in the constructor +expressions themselves. However, unlike other methods, constructors +are never inherited. To prevent infinite cycles of constructor +invocations, there is the restriction that every self constructor +invocation must refer to a constructor definition which precedes it +(i.e. it must refer to either a preceding auxiliary constructor or the +primary constructor of the class). + +###### Example +Consider the class definition + +```scala +class LinkedList[A]() { + var head = _ + var tail = null + def isEmpty = tail != null + def this(head: A) = { this(); this.head = head } + def this(head: A, tail: List[A]) = { this(head); this.tail = tail } +} +``` + +This defines a class `LinkedList` with three constructors. The +second constructor constructs an singleton list, while the +third one constructs a list with a given head and tail. + +## Case Classes + +```ebnf +TmplDef ::= `case' `class' ClassDef +``` + +If a class definition is prefixed with `case`, the class is said +to be a _case class_. + +The formal parameters in the first parameter section of a case class +are called _elements_; they are treated +specially. First, the value of such a parameter can be extracted as a +field of a constructor pattern. Second, a `val` prefix is +implicitly added to such a parameter, unless the parameter carries +already a `val` or `var` modifier. Hence, an accessor +definition for the parameter is [generated](#class-definitions). + +A case class definition of `$c$[$\mathit{tps}\,$]($\mathit{ps}_1\,$)$\ldots$($\mathit{ps}_n$)` with type +parameters $\mathit{tps}$ and value parameters $\mathit{ps}$ implicitly +generates an [extractor object](08-pattern-matching.html#extractor-patterns) which is +defined as follows: + +```scala +object $c$ { + def apply[$\mathit{tps}\,$]($\mathit{ps}_1\,$)$\ldots$($\mathit{ps}_n$): $c$[$\mathit{tps}\,$] = new $c$[$\mathit{Ts}\,$]($\mathit{xs}_1\,$)$\ldots$($\mathit{xs}_n$) + def unapply[$\mathit{tps}\,$]($x$: $c$[$\mathit{tps}\,$]) = + if (x eq null) scala.None + else scala.Some($x.\mathit{xs}_{11}, \ldots , x.\mathit{xs}_{1k}$) +} +``` + +Here, $\mathit{Ts}$ stands for the vector of types defined in the type +parameter section $\mathit{tps}$, +each $\mathit{xs}\_i$ denotes the parameter names of the parameter +section $\mathit{ps}\_i$, and +$\mathit{xs}\_{11}, \ldots , \mathit{xs}\_{1k}$ denote the names of all parameters +in the first parameter section $\mathit{xs}\_1$. +If a type parameter section is missing in the +class, it is also missing in the `apply` and +`unapply` methods. +The definition of `apply` is omitted if class $c$ is +`abstract`. + +If the case class definition contains an empty value parameter list, the +`unapply` method returns a `Boolean` instead of an `Option` type and +is defined as follows: + +```scala +def unapply[$\mathit{tps}\,$]($x$: $c$[$\mathit{tps}\,$]) = x ne null +``` + +The name of the `unapply` method is changed to `unapplySeq` if the first +parameter section $\mathit{ps}_1$ of $c$ ends in a +[repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters). +If a companion object $c$ exists already, no new object is created, +but the `apply` and `unapply` methods are added to the existing +object instead. + +A method named `copy` is implicitly added to every case class unless the +class already has a member (directly defined or inherited) with that name, or the +class has a repeated parameter. The method is defined as follows: + +```scala +def copy[$\mathit{tps}\,$]($\mathit{ps}'_1\,$)$\ldots$($\mathit{ps}'_n$): $c$[$\mathit{tps}\,$] = new $c$[$\mathit{Ts}\,$]($\mathit{xs}_1\,$)$\ldots$($\mathit{xs}_n$) +``` + +Again, `$\mathit{Ts}$` stands for the vector of types defined in the type parameter section `$\mathit{tps}$` +and each `$xs_i$` denotes the parameter names of the parameter section `$ps'_i$`. The value +parameters `$ps'_{1,j}$` of first parameter list have the form `$x_{1,j}$:$T_{1,j}$=this.$x_{1,j}$`, +the other parameters `$ps'_{i,j}$` of the `copy` method are defined as `$x_{i,j}$:$T_{i,j}$`. +In all cases `$x_{i,j}$` and `$T_{i,j}$` refer to the name and type of the corresponding class parameter +`$\mathit{ps}_{i,j}$`. + +Every case class implicitly overrides some method definitions of class +[`scala.AnyRef`](12-the-scala-standard-library.html#root-classes) unless a definition of the same +method is already given in the case class itself or a concrete +definition of the same method is given in some base class of the case +class different from `AnyRef`. In particular: + +- Method `equals: (Any)Boolean` is structural equality, where two + instances are equal if they both belong to the case class in question and they + have equal (with respect to `equals`) constructor arguments (restricted to the class's _elements_, i.e., the first parameter section). +- Method `hashCode: Int` computes a hash-code. If the hashCode methods + of the data structure members map equal (with respect to equals) + values to equal hash-codes, then the case class hashCode method does + too. +- Method `toString: String` returns a string representation which + contains the name of the class and its elements. + +###### Example +Here is the definition of abstract syntax for lambda calculus: + +```scala +class Expr +case class Var (x: String) extends Expr +case class Apply (f: Expr, e: Expr) extends Expr +case class Lambda(x: String, e: Expr) extends Expr +``` + +This defines a class `Expr` with case classes +`Var`, `Apply` and `Lambda`. A call-by-value evaluator +for lambda expressions could then be written as follows. + +```scala +type Env = String => Value +case class Value(e: Expr, env: Env) + +def eval(e: Expr, env: Env): Value = e match { + case Var (x) => + env(x) + case Apply(f, g) => + val Value(Lambda (x, e1), env1) = eval(f, env) + val v = eval(g, env) + eval (e1, (y => if (y == x) v else env1(y))) + case Lambda(_, _) => + Value(e, env) +} +``` + +It is possible to define further case classes that extend type +`Expr` in other parts of the program, for instance + +```scala +case class Number(x: Int) extends Expr +``` + +This form of extensibility can be excluded by declaring the base class +`Expr` `sealed`; in this case, all classes that +directly extend `Expr` must be in the same source file as +`Expr`. + +### Traits + +```ebnf +TmplDef ::= `trait' TraitDef +TraitDef ::= id [TypeParamClause] TraitTemplateOpt +TraitTemplateOpt ::= `extends' TraitTemplate | [[`extends'] TemplateBody] +``` + +A trait is a class that is meant to be added to some other class +as a mixin. Unlike normal classes, traits cannot have +constructor parameters. Furthermore, no constructor arguments are +passed to the superclass of the trait. This is not necessary as traits are +initialized after the superclass is initialized. + +Assume a trait $D$ defines some aspect of an instance $x$ of type $C$ (i.e. $D$ is a base class of $C$). +Then the _actual supertype_ of $D$ in $x$ is the compound type consisting of all the +base classes in $\mathcal{L}(C)$ that succeed $D$. The actual supertype gives +the context for resolving a [`super` reference](06-expressions.html#this-and-super) in a trait. +Note that the actual supertype depends on the type to which the trait is added in a mixin composition; +it is not statically known at the time the trait is defined. + +If $D$ is not a trait, then its actual supertype is simply its +least proper supertype (which is statically known). + +### Example +The following trait defines the property +of being comparable to objects of some type. It contains an abstract +method `<` and default implementations of the other +comparison operators `<=`, `>`, and +`>=`. + +```scala +trait Comparable[T <: Comparable[T]] { self: T => + def < (that: T): Boolean + def <=(that: T): Boolean = this < that || this == that + def > (that: T): Boolean = that < this + def >=(that: T): Boolean = that <= this +} +``` + +###### Example +Consider an abstract class `Table` that implements maps +from a type of keys `A` to a type of values `B`. The class +has a method `set` to enter a new key / value pair into the table, +and a method `get` that returns an optional value matching a +given key. Finally, there is a method `apply` which is like +`get`, except that it returns a given default value if the table +is undefined for the given key. This class is implemented as follows. + +```scala +abstract class Table[A, B](defaultValue: B) { + def get(key: A): Option[B] + def set(key: A, value: B) + def apply(key: A) = get(key) match { + case Some(value) => value + case None => defaultValue + } +} +``` + +Here is a concrete implementation of the `Table` class. + +```scala +class ListTable[A, B](defaultValue: B) extends Table[A, B](defaultValue) { + private var elems: List[(A, B)] + def get(key: A) = elems.find(._1.==(key)).map(._2) + def set(key: A, value: B) = { elems = (key, value) :: elems } +} +``` + +Here is a trait that prevents concurrent access to the +`get` and `set` operations of its parent class: + +```scala +trait SynchronizedTable[A, B] extends Table[A, B] { + abstract override def get(key: A): B = + synchronized { super.get(key) } + abstract override def set((key: A, value: B) = + synchronized { super.set(key, value) } +} +``` + +Note that `SynchronizedTable` does not pass an argument to +its superclass, `Table`, even though `Table` is defined with a +formal parameter. Note also that the `super` calls +in `SynchronizedTable`'s `get` and `set` methods +statically refer to abstract methods in class `Table`. This is +legal, as long as the calling method is labeled +[`abstract override`](#modifiers). + +Finally, the following mixin composition creates a synchronized list +table with strings as keys and integers as values and with a default +value `0`: + +```scala +object MyTable extends ListTable[String, Int](0) with SynchronizedTable +``` + +The object `MyTable` inherits its `get` and `set` +method from `SynchronizedTable`. The `super` calls in these +methods are re-bound to refer to the corresponding implementations in +`ListTable`, which is the actual supertype of `SynchronizedTable` +in `MyTable`. + +## Object Definitions + +```ebnf +ObjectDef ::= id ClassTemplate +``` + +An object definition defines a single object of a new class. Its +most general form is +`object $m$ extends $t$`. Here, +$m$ is the name of the object to be defined, and +$t$ is a [template](#templates) of the form + +```scala +$sc$ with $mt_1$ with $\ldots$ with $mt_n$ { $\mathit{stats}$ } +``` + +which defines the base classes, behavior and initial state of $m$. +The extends clause `extends $sc$ with $mt_1$ with $\ldots$ with $mt_n$` +can be omitted, in which case +`extends scala.AnyRef` is assumed. The class body +`{ $\mathit{stats}$ }` may also be omitted, in which case the empty body +`{}` is assumed. + +The object definition defines a single object (or: _module_) +conforming to the template $t$. It is roughly equivalent to the +following definition of a lazy value: + +```scala +lazy val $m$ = new $sc$ with $mt_1$ with $\ldots$ with $mt_n$ { this: $m.type$ => $\mathit{stats}$ } +``` + +Note that the value defined by an object definition is instantiated +lazily. The `new $m$\$cls` constructor is evaluated +not at the point of the object definition, but is instead evaluated +the first time $m$ is dereferenced during execution of the program +(which might be never at all). An attempt to dereference $m$ again in +the course of evaluation of the constructor leads to a infinite loop +or run-time error. +Other threads trying to dereference $m$ while the +constructor is being evaluated block until evaluation is complete. + +The expansion given above is not accurate for top-level objects. It +cannot be because variable and method definition cannot appear on the +top-level outside of a [package object](09-top-level-definitions.html#package-objects). Instead, +top-level objects are translated to static fields. + +###### Example +Classes in Scala do not have static members; however, an equivalent +effect can be achieved by an accompanying object definition +E.g. + +```scala +abstract class Point { + val x: Double + val y: Double + def isOrigin = (x == 0.0 && y == 0.0) +} +object Point { + val origin = new Point() { val x = 0.0; val y = 0.0 } +} +``` + +This defines a class `Point` and an object `Point` which +contains `origin` as a member. Note that the double use of the +name `Point` is legal, since the class definition defines the +name `Point` in the type name space, whereas the object +definition defines a name in the term namespace. + +This technique is applied by the Scala compiler when interpreting a +Java class with static members. Such a class $C$ is conceptually seen +as a pair of a Scala class that contains all instance members of $C$ +and a Scala object that contains all static members of $C$. + +Generally, a _companion module_ of a class is an object which has +the same name as the class and is defined in the same scope and +compilation unit. Conversely, the class is called the _companion class_ +of the module. + +Very much like a concrete class definition, an object definition may +still contain declarations of abstract type members, but not of +abstract term members. diff --git a/spec/06-expressions.md b/spec/06-expressions.md new file mode 100644 index 000000000000..bb6cc2a89a70 --- /dev/null +++ b/spec/06-expressions.md @@ -0,0 +1,1788 @@ +--- +title: Expressions +layout: default +chapter: 6 +--- + +# Expressions + +```ebnf +Expr ::= (Bindings | id | `_') `=>' Expr + | Expr1 +Expr1 ::= `if' `(' Expr `)' {nl} Expr [[semi] `else' Expr] + | `while' `(' Expr `)' {nl} Expr + | `try' (`{' Block `}' | Expr) [`catch' `{' CaseClauses `}'] [`finally' Expr] + | `do' Expr [semi] `while' `(' Expr ')' + | `for' (`(' Enumerators `)' | `{' Enumerators `}') {nl} [`yield'] Expr + | `throw' Expr + | `return' [Expr] + | [SimpleExpr `.'] id `=' Expr + | SimpleExpr1 ArgumentExprs `=' Expr + | PostfixExpr + | PostfixExpr Ascription + | PostfixExpr `match' `{' CaseClauses `}' +PostfixExpr ::= InfixExpr [id [nl]] +InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr +PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr +SimpleExpr ::= `new' (ClassTemplate | TemplateBody) + | BlockExpr + | SimpleExpr1 [`_'] +SimpleExpr1 ::= Literal + | Path + | `_' + | `(' [Exprs] `)' + | SimpleExpr `.' id s + | SimpleExpr TypeArgs + | SimpleExpr1 ArgumentExprs + | XmlExpr +Exprs ::= Expr {`,' Expr} +BlockExpr ::= ‘{’ CaseClauses ‘}’ + | ‘{’ Block ‘}’ +Block ::= BlockStat {semi BlockStat} [ResultExpr] +ResultExpr ::= Expr1 + | (Bindings | ([`implicit'] id | `_') `:' CompoundType) `=>' Block +Ascription ::= `:' InfixType + | `:' Annotation {Annotation} + | `:' `_' `*' +``` + +Expressions are composed of operators and operands. Expression forms are +discussed subsequently in decreasing order of precedence. + +## Expression Typing + +The typing of expressions is often relative to some _expected type_ (which might be undefined). When we write "expression $e$ is expected to conform to type $T$", we mean: + 1. the expected type of $e$ is $T$, and + 2. the type of expression $e$ must conform to $T$. + +The following skolemization rule is applied universally for every +expression: If the type of an expression would be an existential type +$T$, then the type of the expression is assumed instead to be a +[skolemization](03-types.html#existential-types) of $T$. + +Skolemization is reversed by type packing. Assume an expression $e$ of +type $T$ and let $t_1[\mathit{tps}\_1] >: L_1 <: U_1 , \ldots , t_n[\mathit{tps}\_n] >: L_n <: U_n$ be +all the type variables created by skolemization of some part of $e$ which are free in $T$. +Then the _packed type_ of $e$ is + +```scala +$T$ forSome { type $t_1[\mathit{tps}\_1] >: L_1 <: U_1$; $\ldots$; type $t_n[\mathit{tps}\_n] >: L_n <: U_n$ }. +``` + +## Literals + +```ebnf +SimpleExpr ::= Literal +``` + +Typing of literals is as described [here](01-lexical-syntax.html#literals); their +evaluation is immediate. + +## The _Null_ Value + +The `null` value is of type `scala.Null`, and is thus +compatible with every reference type. It denotes a reference value +which refers to a special “`null`” object. This object +implements methods in class `scala.AnyRef` as follows: + +- `eq($x\,$)` and `==($x\,$)` return `true` iff the + argument $x$ is also the "null" object. +- `ne($x\,$)` and `!=($x\,$)` return true iff the + argument x is not also the "null" object. +- `isInstanceOf[$T\,$]` always returns `false`. +- `asInstanceOf[$T\,$]` returns the [default value](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) of type $T$. +- `##` returns ``0``. + +A reference to any other member of the "null" object causes a +`NullPointerException` to be thrown. + +## Designators + +```ebnf +SimpleExpr ::= Path + | SimpleExpr `.' id +``` + +A designator refers to a named term. It can be a _simple name_ or +a _selection_. + +A simple name $x$ refers to a value as specified +[here](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes). +If $x$ is bound by a definition or declaration in an enclosing class +or object $C$, it is taken to be equivalent to the selection +`$C$.this.$x$` where $C$ is taken to refer to the class containing $x$ +even if the type name $C$ is [shadowed](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes) at the +occurrence of $x$. + +If $r$ is a [stable identifier](03-types.html#paths) of type $T$, the selection $r.x$ refers +statically to a term member $m$ of $r$ that is identified in $T$ by +the name $x$. + + + +For other expressions $e$, $e.x$ is typed as +if it was `{ val $y$ = $e$; $y$.$x$ }`, for some fresh name +$y$. + +The expected type of a designator's prefix is always undefined. The +type of a designator is the type $T$ of the entity it refers to, with +the following exception: The type of a [path](03-types.html#paths) $p$ +which occurs in a context where a [stable type](03-types.html#singleton-types) +is required is the singleton type `$p$.type`. + +The contexts where a stable type is required are those that satisfy +one of the following conditions: + +1. The path $p$ occurs as the prefix of a selection and it does not +designate a constant, or +1. The expected type $\mathit{pt}$ is a stable type, or +1. The expected type $\mathit{pt}$ is an abstract type with a stable type as lower + bound, and the type $T$ of the entity referred to by $p$ does not + conform to $\mathit{pt}$, or +1. The path $p$ designates a module. + +The selection $e.x$ is evaluated by first evaluating the qualifier +expression $e$, which yields an object $r$, say. The selection's +result is then the member of $r$ that is either defined by $m$ or defined +by a definition overriding $m$. +If that member has a type which +conforms to `scala.NotNull`, the member's value must be initialized +to a value different from `null`, otherwise a `scala.UnitializedError` +is thrown. + +## This and Super + +```ebnf +SimpleExpr ::= [id `.'] `this' + | [id '.'] `super' [ClassQualifier] `.' id +``` + +The expression `this` can appear in the statement part of a +template or compound type. It stands for the object being defined by +the innermost template or compound type enclosing the reference. If +this is a compound type, the type of `this` is that compound type. +If it is a template of a +class or object definition with simple name $C$, the type of this +is the same as the type of `$C$.this`. + +The expression `$C$.this` is legal in the statement part of an +enclosing class or object definition with simple name $C$. It +stands for the object being defined by the innermost such definition. +If the expression's expected type is a stable type, or +`$C$.this` occurs as the prefix of a selection, its type is +`$C$.this.type`, otherwise it is the self type of class $C$. + +A reference `super.$m$` refers statically to a method or type $m$ +in the least proper supertype of the innermost template containing the +reference. It evaluates to the member $m'$ in the actual supertype of +that template which is equal to $m$ or which overrides $m$. The +statically referenced member $m$ must be a type or a +method. + +If it is +a method, it must be concrete, or the template +containing the reference must have a member $m'$ which overrides $m$ +and which is labeled `abstract override`. + +A reference `$C$.super.$m$` refers statically to a method +or type $m$ in the least proper supertype of the innermost enclosing class or +object definition named $C$ which encloses the reference. It evaluates +to the member $m'$ in the actual supertype of that class or object +which is equal to $m$ or which overrides $m$. The +statically referenced member $m$ must be a type or a +method. If the statically +referenced member $m$ is a method, it must be concrete, or the innermost enclosing +class or object definition named $C$ must have a member $m'$ which +overrides $m$ and which is labeled `abstract override`. + +The `super` prefix may be followed by a trait qualifier +`[$T\,$]`, as in `$C$.super[$T\,$].$x$`. This is +called a _static super reference_. In this case, the reference is +to the type or method of $x$ in the parent trait of $C$ whose simple +name is $T$. That member must be uniquely defined. If it is a method, +it must be concrete. + +### Example +Consider the following class definitions + +```scala +class Root { def x = "Root" } +class A extends Root { override def x = "A" ; def superA = super.x } +trait B extends Root { override def x = "B" ; def superB = super.x } +class C extends Root with B { + override def x = "C" ; def superC = super.x +} +class D extends A with B { + override def x = "D" ; def superD = super.x +} +``` + +The linearization of class `C` is `{C, B, Root}` and +the linearization of class `D` is `{D, B, A, Root}`. +Then we have: + +```scala +(new A).superA == "Root", + (new C).superB = "Root", (new C).superC = "B", +(new D).superA == "Root", (new D).superB = "A", (new D).superD = "B", +``` + +Note that the `superB` function returns different results +depending on whether `B` is mixed in with class `Root` or `A`. + +## Function Applications + +```ebnf +SimpleExpr ::= SimpleExpr1 ArgumentExprs +ArgumentExprs ::= `(' [Exprs] `)' + | `(' [Exprs `,'] PostfixExpr `:' `_' `*' ')' + | [nl] BlockExpr +Exprs ::= Expr {`,' Expr} +``` + +An application `$f$($e_1 , \ldots , e_m$)` applies the +function $f$ to the argument expressions $e_1 , \ldots , e_m$. If $f$ +has a method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$`, the type of +each argument expression $e_i$ is typed with the +corresponding parameter type $T_i$ as expected type. Let $S_i$ be type +type of argument $e_i$ $(i = 1 , \ldots , m)$. If $f$ is a polymorphic method, +[local type inference](#local-type-inference) is used to determine +type arguments for $f$. If $f$ has some value type, the application is taken to +be equivalent to `$f$.apply($e_1 , \ldots , e_m$)`, +i.e. the application of an `apply` method defined by $f$. + +The function $f$ must be _applicable_ to its arguments $e_1 +, \ldots , e_n$ of types $S_1 , \ldots , S_n$. + +If $f$ has a method type $(p_1:T_1 , \ldots , p_n:T_n)U$ +we say that an argument expression $e_i$ is a _named_ argument if +it has the form $x_i=e'_i$ and $x_i$ is one of the parameter names +$p_1 , \ldots , p_n$. The function $f$ is applicable if all of the following conditions +hold: + +- For every named argument $x_i=e_i'$ the type $S_i$ + is compatible with the parameter type $T_j$ whose name $p_j$ matches $x_i$. +- For every positional argument $e_i$ the type $S_i$ +is compatible with $T_i$. +- If the expected type is defined, the result type $U$ is + compatible to it. + +If $f$ is a polymorphic method it is applicable if +[local type inference](#local-type-inference) can +determine type arguments so that the instantiated method is applicable. If +$f$ has some value type it is applicable if it has a method member named +`apply` which is applicable. + +Evaluation of `$f$($e_1 , \ldots , e_n$)` usually entails evaluation of +$f$ and $e_1 , \ldots , e_n$ in that order. Each argument expression +is converted to the type of its corresponding formal parameter. After +that, the application is rewritten to the function's right hand side, +with actual arguments substituted for formal parameters. The result +of evaluating the rewritten right-hand side is finally converted to +the function's declared result type, if one is given. + +The case of a formal parameter with a parameterless +method type `=>$T$` is treated specially. In this case, the +corresponding actual argument expression $e$ is not evaluated before the +application. Instead, every use of the formal parameter on the +right-hand side of the rewrite rule entails a re-evaluation of $e$. +In other words, the evaluation order for +`=>`-parameters is _call-by-name_ whereas the evaluation +order for normal parameters is _call-by-value_. +Furthermore, it is required that $e$'s [packed type](#expression-typing) +conforms to the parameter type $T$. +The behavior of by-name parameters is preserved if the application is +transformed into a block due to named or default arguments. In this case, +the local value for that parameter has the form `val $y_i$ = () => $e$` +and the argument passed to the function is `$y_i$()`. + +The last argument in an application may be marked as a sequence +argument, e.g. `$e$: _*`. Such an argument must correspond +to a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type +`$S$*` and it must be the only argument matching this +parameter (i.e. the number of formal parameters and actual arguments +must be the same). Furthermore, the type of $e$ must conform to +`scala.Seq[$T$]`, for some type $T$ which conforms to +$S$. In this case, the argument list is transformed by replacing the +sequence $e$ with its elements. When the application uses named +arguments, the vararg parameter has to be specified exactly once. + +A function application usually allocates a new frame on the program's +run-time stack. However, if a local function or a final method calls +itself as its last action, the call is executed using the stack-frame +of the caller. + +###### Example +Assume the following function which computes the sum of a +variable number of arguments: + +```scala +def sum(xs: Int*) = (0 /: xs) ((x, y) => x + y) +``` + +Then + +```scala +sum(1, 2, 3, 4) +sum(List(1, 2, 3, 4): _*) +``` + +both yield `10` as result. On the other hand, + +```scala +sum(List(1, 2, 3, 4)) +``` + +would not typecheck. + +### Named and Default Arguments + +If an application might uses named arguments $p = e$ or default +arguments, the following conditions must hold. + +- For every named argument $p_i = e_i$ which appears left of a positional argument + in the argument list $e_1 \ldots e_m$, the argument position $i$ coincides with + the position of parameter $p_i$ in the parameter list of the applied function. +- The names $x_i$ of all named arguments are pairwise distinct and no named + argument defines a parameter which is already specified by a + positional argument. +- Every formal parameter $p_j:T_j$ which is not specified by either a positional + or a named argument has a default argument. + +If the application uses named or default +arguments the following transformation is applied to convert it into +an application without named or default arguments. + +If the function $f$ +has the form `$p.m$[$\mathit{targs}$]` it is transformed into the +block + +```scala +{ val q = $p$ + q.$m$[$\mathit{targs}$] +} +``` + +If the function $f$ is itself an application expression the transformation +is applied recursively on $f$. The result of transforming $f$ is a block of +the form + +```scala +{ val q = $p$ + val $x_1$ = expr$_1$ + $\ldots$ + val $x_k$ = expr$_k$ + q.$m$[$\mathit{targs}$]($\mathit{args}_1$)$, \ldots ,$($\mathit{args}_l$) +} +``` + +where every argument in $(\mathit{args}\_1) , \ldots , (\mathit{args}\_l)$ is a reference to +one of the values $x_1 , \ldots , x_k$. To integrate the current application +into the block, first a value definition using a fresh name $y_i$ is created +for every argument in $e_1 , \ldots , e_m$, which is initialised to $e_i$ for +positional arguments and to $e'_i$ for named arguments of the form +`$x_i=e'_i$`. Then, for every parameter which is not specified +by the argument list, a value definition using a fresh name $z_i$ is created, +which is initialized using the method computing the +[default argument](04-basic-declarations-and-definitions.html#function-declarations-and-definitions) of +this parameter. + +Let $\mathit{args}$ be a permutation of the generated names $y_i$ and $z_i$ such such +that the position of each name matches the position of its corresponding +parameter in the method type `($p_1:T_1 , \ldots , p_n:T_n$)$U$`. +The final result of the transformation is a block of the form + +```scala +{ val q = $p$ + val $x_1$ = expr$_1$ + $\ldots$ + val $x_l$ = expr$_k$ + val $y_1$ = $e_1$ + $\ldots$ + val $y_m$ = $e_m$ + val $z_1$ = $q.m\$default\$i[\mathit{targs}](\mathit{args}_1), \ldots ,(\mathit{args}_l)$ + $\ldots$ + val $z_d$ = $q.m\$default\$j[\mathit{targs}](\mathit{args}_1), \ldots ,(\mathit{args}_l)$ + q.$m$[$\mathit{targs}$]($\mathit{args}_1$)$, \ldots ,$($\mathit{args}_l$)($\mathit{args}$) +} +``` + +### Signature Polymorphic Methods + +For invocations of signature polymorphic methods of the target platform `$f$($e_1 , \ldots , e_m$)`, +the invoked function has a different method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$` at each call +site. The parameter types `$T_ , \ldots , T_n$` are the types of the argument expressions +`$e_1 , \ldots , e_m$` and `$U$` is the expected type at the call site. If the expected type is +undefined then `$U$` is `scala.AnyRef`. The parameter names `$p_1 , \ldots , p_n$` are fresh. + +###### Note + +On the Java platform version 7 and later, the methods `invoke` and `invokeExact` in class +`java.lang.invoke.MethodHandle` are signature polymorphic. + +## Method Values + +```ebnf +SimpleExpr ::= SimpleExpr1 `_' +``` + +The expression `$e$ _` is well-formed if $e$ is of method +type or if $e$ is a call-by-name parameter. If $e$ is a method with +parameters, `$e$ _` represents $e$ converted to a function +type by [eta expansion](#eta-expansion). If $e$ is a +parameterless method or call-by-name parameter of type +`=>$T$`, `$e$ _` represents the function of type +`() => $T$`, which evaluates $e$ when it is applied to the empty +parameterlist `()`. + +###### Example +The method values in the left column are each equivalent to the [eta-expanded expressions](#eta-expansion) on the right. + +| placeholder syntax | eta-expansion | +|------------------------------ | ----------------------------------------------------------------------------| +|`math.sin _` | `x => math.sin(x)` | +|`math.pow _` | `(x1, x2) => math.pow(x1, x2)` | +|`val vs = 1 to 9; vs.fold _` | `(z) => (op) => vs.fold(z)(op)` | +|`(1 to 9).fold(z)_` | `{ val eta1 = z; val eta2 = 1 to 9; op => eta2.fold(eta1)(op) }` | +|`Some(1).fold(??? : Int)_` | `{ val eta1 = () => ???; val eta2 = Some(1); op => eta2.fold(eta1())(op) }` | + +Note that a space is necessary between a method name and the trailing underscore +because otherwise the underscore would be considered part of the name. + +## Type Applications + +```ebnf +SimpleExpr ::= SimpleExpr TypeArgs +``` + +A type application `$e$[$T_1 , \ldots , T_n$]` instantiates +a polymorphic value $e$ of type +`[$a_1$ >: $L_1$ <: $U_1, \ldots , a_n$ >: $L_n$ <: $U_n$]$S$` +with argument types +`$T_1 , \ldots , T_n$`. Every argument type $T_i$ must obey +the corresponding bounds $L_i$ and $U_i$. That is, for each $i = 1 +, \ldots , n$, we must have $\sigma L_i <: T_i <: \sigma +U_i$, where $\sigma$ is the substitution $[a_1 := T_1 , \ldots , a_n +:= T_n]$. The type of the application is $\sigma S$. + +If the function part $e$ is of some value type, the type application +is taken to be equivalent to +`$e$.apply[$T_1 , \ldots ,$ T$_n$]`, i.e. the application of an `apply` method defined by +$e$. + +Type applications can be omitted if +[local type inference](#local-type-inference) can infer best type parameters +for a polymorphic functions from the types of the actual function arguments +and the expected result type. + +## Tuples + +```ebnf +SimpleExpr ::= `(' [Exprs] `)' +``` + +A tuple expression `($e_1 , \ldots , e_n$)` is an alias +for the class instance creation +`scala.Tuple$n$($e_1 , \ldots , e_n$)`, where $n \geq 2$. +The empty tuple +`()` is the unique value of type `scala.Unit`. + +## Instance Creation Expressions + +```ebnf +SimpleExpr ::= `new' (ClassTemplate | TemplateBody) +``` + +A simple instance creation expression is of the form +`new $c$` +where $c$ is a [constructor invocation](05-classes-and-objects.html#constructor-invocations). Let $T$ be +the type of $c$. Then $T$ must +denote a (a type instance of) a non-abstract subclass of +`scala.AnyRef`. Furthermore, the _concrete self type_ of the +expression must conform to the [self type](05-classes-and-objects.html#templates) of the class denoted by +$T$. The concrete self type is normally +$T$, except if the expression `new $c$` appears as the +right hand side of a value definition + +```scala +val $x$: $S$ = new $c$ +``` + +(where the type annotation `: $S$` may be missing). +In the latter case, the concrete self type of the expression is the +compound type `$T$ with $x$.type`. + +The expression is evaluated by creating a fresh +object of type $T$ which is is initialized by evaluating $c$. The +type of the expression is $T$. + +A general instance creation expression is of the form +`new $t$` for some [class template](05-classes-and-objects.html#templates) $t$. +Such an expression is equivalent to the block + +```scala +{ class $a$ extends $t$; new $a$ } +``` + +where $a$ is a fresh name of an _anonymous class_ which is +inaccessible to user programs. + +There is also a shorthand form for creating values of structural +types: If `{$D$}` is a class body, then +`new {$D$}` is equivalent to the general instance creation expression +`new AnyRef{$D$}`. + +###### Example +Consider the following structural instance creation expression: + +```scala +new { def getName() = "aaron" } +``` + +This is a shorthand for the general instance creation expression + +```scala +new AnyRef{ def getName() = "aaron" } +``` + +The latter is in turn a shorthand for the block + +```scala +{ class anon\$X extends AnyRef{ def getName() = "aaron" }; new anon\$X } +``` + +where `anon\$X` is some freshly created name. + +## Blocks + +```ebnf +BlockExpr ::= ‘{’ CaseClauses ‘}’ + | ‘{’ Block ‘}’ +Block ::= BlockStat {semi BlockStat} [ResultExpr] +``` + +A block expression `{$s_1$; $\ldots$; $s_n$; $e\,$}` is +constructed from a sequence of block statements $s_1 , \ldots , s_n$ +and a final expression $e$. The statement sequence may not contain +two definitions or declarations that bind the same name in the same +namespace. The final expression can be omitted, in which +case the unit value `()` is assumed. + +The expected type of the final expression $e$ is the expected +type of the block. The expected type of all preceding statements is +undefined. + +The type of a block `$s_1$; $\ldots$; $s_n$; $e$` is +`$T$ forSome {$\,Q\,$}`, where $T$ is the type of $e$ and $Q$ +contains [existential clauses](03-types.html#existential-types) +for every value or type name which is free in $T$ +and which is defined locally in one of the statements $s_1 , \ldots , s_n$. +We say the existential clause _binds_ the occurrence of the value or type name. +Specifically, + +- A locally defined type definition `type$\;t = T$` + is bound by the existential clause `type$\;t >: T <: T$`. + It is an error if $t$ carries type parameters. +- A locally defined value definition `val$\;x: T = e$` is + bound by the existential clause `val$\;x: T$`. +- A locally defined class definition `class$\;c$ extends$\;t$` + is bound by the existential clause `type$\;c <: T$` where + $T$ is the least class type or refinement type which is a proper + supertype of the type $c$. It is an error if $c$ carries type parameters. +- A locally defined object definition `object$\;x\;$extends$\;t$` + is bound by the existential clause `val$\;x: T$` where + $T$ is the least class type or refinement type which is a proper supertype of the type + `$x$.type`. + +Evaluation of the block entails evaluation of its +statement sequence, followed by an evaluation of the final expression +$e$, which defines the result of the block. + +###### Example +Assuming a class `Ref[T](x: T)`, the block + +```scala +{ class C extends B {$\ldots$} ; new Ref(new C) } +``` + +has the type `Ref[_1] forSome { type _1 <: B }`. +The block + +```scala +{ class C extends B {$\ldots$} ; new C } +``` + +simply has type `B`, because with the rules [here](03-types.html#simplification-rules) +the existentially quantified type +`_1 forSome { type _1 <: B }` can be simplified to `B`. + +## Prefix, Infix, and Postfix Operations + +```ebnf +PostfixExpr ::= InfixExpr [id [nl]] +InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr +PrefixExpr ::= [`-' | `+' | `!' | `~'] SimpleExpr +``` + +Expressions can be constructed from operands and operators. + +### Prefix Operations + +A prefix operation $\mathit{op};e$ consists of a prefix operator $\mathit{op}$, which +must be one of the identifiers ‘`+`’, ‘`-`’, +‘`!`’ or ‘`~`’. The expression $\mathit{op};e$ is +equivalent to the postfix method application +`e.unary_$\mathit{op}$`. + + + +Prefix operators are different from normal function applications in +that their operand expression need not be atomic. For instance, the +input sequence `-sin(x)` is read as `-(sin(x))`, whereas the +function application `negate sin(x)` would be parsed as the +application of the infix operator `sin` to the operands +`negate` and `(x)`. + +### Postfix Operations + +A postfix operator can be an arbitrary identifier. The postfix +operation $e;\mathit{op}$ is interpreted as $e.\mathit{op}$. + +### Infix Operations + +An infix operator can be an arbitrary identifier. Infix operators have +precedence and associativity defined as follows: + +The _precedence_ of an infix operator is determined by the operator's first +character. Characters are listed below in increasing order of +precedence, with characters on the same line having the same precedence. + +```scala +(all letters) +| +^ +& += ! +< > +: ++ - +* / % +(all other special characters) +``` + +That is, operators starting with a letter have lowest precedence, +followed by operators starting with ``|`', etc. + +There's one exception to this rule, which concerns +[_assignment operators_](#assignment-operators). +The precedence of an assignment operator is the same as the one +of simple assignment `(=)`. That is, it is lower than the +precedence of any other operator. + +The _associativity_ of an operator is determined by the operator's +last character. Operators ending in a colon ``:`' are +right-associative. All other operators are left-associative. + +Precedence and associativity of operators determine the grouping of +parts of an expression as follows. + +- If there are several infix operations in an + expression, then operators with higher precedence bind more closely + than operators with lower precedence. +- If there are consecutive infix + operations $e_0; \mathit{op}\_1; e_1; \mathit{op}\_2 \ldots \mathit{op}\_n; e_n$ + with operators $\mathit{op}\_1 , \ldots , \mathit{op}\_n$ of the same precedence, + then all these operators must + have the same associativity. If all operators are left-associative, + the sequence is interpreted as + $(\ldots(e_0;\mathit{op}\_1;e_1);\mathit{op}\_2\ldots);\mathit{op}\_n;e_n$. + Otherwise, if all operators are right-associative, the + sequence is interpreted as + $e_0;\mathit{op}\_1;(e_1;\mathit{op}\_2;(\ldots \mathit{op}\_n;e_n)\ldots)$. +- Postfix operators always have lower precedence than infix + operators. E.g. $e_1;\mathit{op}\_1;e_2;\mathit{op}\_2$ is always equivalent to + $(e_1;\mathit{op}\_1;e_2);\mathit{op}\_2$. + +The right-hand operand of a left-associative operator may consist of +several arguments enclosed in parentheses, e.g. $e;\mathit{op};(e_1,\ldots,e_n)$. +This expression is then interpreted as $e.\mathit{op}(e_1,\ldots,e_n)$. + +A left-associative binary +operation $e_1;\mathit{op};e_2$ is interpreted as $e_1.\mathit{op}(e_2)$. If $\mathit{op}$ is +right-associative, the same operation is interpreted as +`{ val $x$=$e_1$; $e_2$.$\mathit{op}$($x\,$) }`, where $x$ is a fresh +name. + +### Assignment Operators + +An assignment operator is an operator symbol (syntax category +`op` in [Identifiers](01-lexical-syntax.html#identifiers)) that ends in an equals character +“`=`”, with the exception of operators for which one of +the following conditions holds: + +1. the operator also starts with an equals character, or +1. the operator is one of `(<=)`, `(>=)`, `(!=)`. + +Assignment operators are treated specially in that they +can be expanded to assignments if no other interpretation is valid. + +Let's consider an assignment operator such as `+=` in an infix +operation `$l$ += $r$`, where $l$, $r$ are expressions. +This operation can be re-interpreted as an operation which corresponds +to the assignment + +```scala +$l$ = $l$ + $r$ +``` + +except that the operation's left-hand-side $l$ is evaluated only once. + +The re-interpretation occurs if the following two conditions are fulfilled. + +1. The left-hand-side $l$ does not have a member named + `+=`, and also cannot be converted by an + [implicit conversion](#implicit-conversions) + to a value with a member named `+=`. +1. The assignment `$l$ = $l$ + $r$` is type-correct. + In particular this implies that $l$ refers to a variable or object + that can be assigned to, and that is convertible to a value with a member + named `+`. + +## Typed Expressions + +```ebnf +Expr1 ::= PostfixExpr `:' CompoundType +``` + +The typed expression $e: T$ has type $T$. The type of +expression $e$ is expected to conform to $T$. The result of +the expression is the value of $e$ converted to type $T$. + +###### Example +Here are examples of well-typed and ill-typed expressions. + +```scala +1: Int // legal, of type Int +1: Long // legal, of type Long +// 1: string // ***** illegal +``` + +## Annotated Expressions + +```ebnf +Expr1 ::= PostfixExpr `:' Annotation {Annotation} +``` + +An annotated expression `$e$: @$a_1$ $\ldots$ @$a_n$` +attaches [annotations](11-user-defined-annotations.html#user-defined-annotations) $a_1 , \ldots , a_n$ to the +expression $e$. + +## Assignments + +```ebnf +Expr1 ::= [SimpleExpr `.'] id `=' Expr + | SimpleExpr1 ArgumentExprs `=' Expr +``` + +The interpretation of an assignment to a simple variable `$x$ = $e$` +depends on the definition of $x$. If $x$ denotes a mutable +variable, then the assignment changes the current value of $x$ to be +the result of evaluating the expression $e$. The type of $e$ is +expected to conform to the type of $x$. If $x$ is a parameterless +function defined in some template, and the same template contains a +setter function `$x$_=` as member, then the assignment +`$x$ = $e$` is interpreted as the invocation +`$x$_=($e\,$)` of that setter function. Analogously, an +assignment `$f.x$ = $e$` to a parameterless function $x$ +is interpreted as the invocation `$f.x$_=($e\,$)`. + +An assignment `$f$($\mathit{args}\,$) = $e$` with a function application to the +left of the ‘`=`’ operator is interpreted as +`$f.$update($\mathit{args}$, $e\,$)`, i.e. +the invocation of an `update` function defined by $f$. + +###### Example +Here are some assignment expressions and their equivalent expansions. + +| assignment | expansion | +|--------------------------|----------------------| +|`x.f = e` | `x.f_=(e)` | +|`x.f() = e` | `x.f.update(e)` | +|`x.f(i) = e` | `x.f.update(i, e)` | +|`x.f(i, j) = e` | `x.f.update(i, j, e)`| + +### Example Imperative Matrix Multiplication + +Here is the usual imperative code for matrix multiplication. + +```scala +def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = { + val zss: Array[Array[Double]] = new Array(xss.length, yss(0).length) + var i = 0 + while (i < xss.length) { + var j = 0 + while (j < yss(0).length) { + var acc = 0.0 + var k = 0 + while (k < yss.length) { + acc = acc + xss(i)(k) * yss(k)(j) + k += 1 + } + zss(i)(j) = acc + j += 1 + } + i += 1 + } + zss +} +``` + +Desugaring the array accesses and assignments yields the following +expanded version: + +```scala +def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = { + val zss: Array[Array[Double]] = new Array(xss.length, yss.apply(0).length) + var i = 0 + while (i < xss.length) { + var j = 0 + while (j < yss.apply(0).length) { + var acc = 0.0 + var k = 0 + while (k < yss.length) { + acc = acc + xss.apply(i).apply(k) * yss.apply(k).apply(j) + k += 1 + } + zss.apply(i).update(j, acc) + j += 1 + } + i += 1 + } + zss +} +``` + +## Conditional Expressions + +```ebnf +Expr1 ::= `if' `(' Expr `)' {nl} Expr [[semi] `else' Expr] +``` + +The conditional expression `if ($e_1$) $e_2$ else $e_3$` chooses +one of the values of $e_2$ and $e_3$, depending on the +value of $e_1$. The condition $e_1$ is expected to +conform to type `Boolean`. The then-part $e_2$ and the +else-part $e_3$ are both expected to conform to the expected +type of the conditional expression. The type of the conditional +expression is the [weak least upper bound](03-types.html#weak-conformance) +of the types of $e_2$ and +$e_3$. A semicolon preceding the `else` symbol of a +conditional expression is ignored. + +The conditional expression is evaluated by evaluating first +$e_1$. If this evaluates to `true`, the result of +evaluating $e_2$ is returned, otherwise the result of +evaluating $e_3$ is returned. + +A short form of the conditional expression eliminates the +else-part. The conditional expression `if ($e_1$) $e_2$` is +evaluated as if it was `if ($e_1$) $e_2$ else ()`. + +## While Loop Expressions + +```ebnf +Expr1 ::= `while' `(' Expr ')' {nl} Expr +``` + +The while loop expression `while ($e_1$) $e_2$` is typed and +evaluated as if it was an application of `whileLoop ($e_1$) ($e_2$)` where +the hypothetical function `whileLoop` is defined as follows. + +```scala +def whileLoop(cond: => Boolean)(body: => Unit): Unit = + if (cond) { body ; whileLoop(cond)(body) } else {} +``` + +## Do Loop Expressions + +```ebnf +Expr1 ::= `do' Expr [semi] `while' `(' Expr ')' +``` + +The do loop expression `do $e_1$ while ($e_2$)` is typed and +evaluated as if it was the expression `($e_1$ ; while ($e_2$) $e_1$)`. +A semicolon preceding the `while` symbol of a do loop expression is ignored. + +## For Comprehensions and For Loops + +```ebnf +Expr1 ::= `for' (`(' Enumerators `)' | `{' Enumerators `}') + {nl} [`yield'] Expr +Enumerators ::= Generator {semi Generator} +Generator ::= Pattern1 `<-' Expr {[semi] Guard | semi Pattern1 `=' Expr} +Guard ::= `if' PostfixExpr +``` + +A for loop `for ($\mathit{enums}\,$) $e$` executes expression $e$ +for each binding generated by the enumerators $\mathit{enums}$. A for +comprehension `for ($\mathit{enums}\,$) yield $e$` evaluates +expression $e$ for each binding generated by the enumerators $\mathit{enums}$ +and collects the results. An enumerator sequence always starts with a +generator; this can be followed by further generators, value +definitions, or guards. A _generator_ `$p$ <- $e$` +produces bindings from an expression $e$ which is matched in some way +against pattern $p$. A _value definition_ `$p$ = $e$` +binds the value name $p$ (or several names in a pattern $p$) to +the result of evaluating the expression $e$. A _guard_ +`if $e$` contains a boolean expression which restricts +enumerated bindings. The precise meaning of generators and guards is +defined by translation to invocations of four methods: `map`, +`withFilter`, `flatMap`, and `foreach`. These methods can +be implemented in different ways for different carrier types. + +The translation scheme is as follows. In a first step, every +generator `$p$ <- $e$`, where $p$ is not [irrefutable](08-pattern-matching.html#patterns) +for the type of $e$ is replaced by + +```scala +$p$ <- $e$.withFilter { case $p$ => true; case _ => false } +``` + +Then, the following rules are applied repeatedly until all +comprehensions have been eliminated. + + - A for comprehension + `for ($p$ <- $e\,$) yield $e'$` + is translated to + `$e$.map { case $p$ => $e'$ }`. + - A for loop + `for ($p$ <- $e\,$) $e'$` + is translated to + `$e$.foreach { case $p$ => $e'$ }`. + - A for comprehension + + ``` + for ($p$ <- $e$; $p'$ <- $e'; \ldots$) yield $e''$ + ``` + + where `$\ldots$` is a (possibly empty) + sequence of generators, definitions, or guards, + is translated to + + ``` + $e$.flatMap { case $p$ => for ($p'$ <- $e'; \ldots$) yield $e''$ } + ``` + + - A for loop + + ``` + for ($p$ <- $e$; $p'$ <- $e'; \ldots$) $e''$ + ``` + + where `$\ldots$` is a (possibly empty) + sequence of generators, definitions, or guards, + is translated to + + ``` + $e$.foreach { case $p$ => for ($p'$ <- $e'; \ldots$) $e''$ } + ``` + + - A generator `$p$ <- $e$` followed by a guard + `if $g$` is translated to a single generator + `$p$ <- $e$.withFilter(($x_1 , \ldots , x_n$) => $g\,$)` where + $x_1 , \ldots , x_n$ are the free variables of $p$. + + - A generator `$p$ <- $e$` followed by a value definition + `$p'$ = $e'$` is translated to the following generator of pairs of values, where + $x$ and $x'$ are fresh names: + + ``` + ($p$, $p'$) <- for ($x @ p$ <- $e$) yield { val $x' @ p'$ = $e'$; ($x$, $x'$) } + ``` + +###### Example +The following code produces all pairs of numbers between $1$ and $n-1$ +whose sums are prime. + +```scala +for { i <- 1 until n + j <- 1 until i + if isPrime(i+j) +} yield (i, j) +``` + +The for comprehension is translated to: + +```scala +(1 until n) + .flatMap { + case i => (1 until i) + .withFilter { j => isPrime(i+j) } + .map { case j => (i, j) } } +``` + +###### Example +For comprehensions can be used to express vector +and matrix algorithms concisely. +For instance, here is a function to compute the transpose of a given matrix: + + + +```scala +def transpose[A](xss: Array[Array[A]]) = { + for (i <- Array.range(0, xss(0).length)) yield + for (xs <- xss) yield xs(i) +} +``` + +Here is a function to compute the scalar product of two vectors: + +```scala +def scalprod(xs: Array[Double], ys: Array[Double]) = { + var acc = 0.0 + for ((x, y) <- xs zip ys) acc = acc + x * y + acc +} +``` + +Finally, here is a function to compute the product of two matrices. +Compare with the [imperative version](#example-imperative-matrix-multiplication). + +```scala +def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = { + val ysst = transpose(yss) + for (xs <- xss) yield + for (yst <- ysst) yield + scalprod(xs, yst) +} +``` + +The code above makes use of the fact that `map`, `flatMap`, +`withFilter`, and `foreach` are defined for instances of class +`scala.Array`. + +## Return Expressions + +```ebnf +Expr1 ::= `return' [Expr] +``` + +A return expression `return $e$` must occur inside the body of some +enclosing named method or function. The innermost enclosing named +method or function in a source program, $f$, must have an explicitly declared result type, +and the type of $e$ must conform to it. +The return expression +evaluates the expression $e$ and returns its value as the result of +$f$. The evaluation of any statements or +expressions following the return expression is omitted. The type of +a return expression is `scala.Nothing`. + +The expression $e$ may be omitted. The return expression +`return` is type-checked and evaluated as if it was `return ()`. + +An `apply` method which is generated by the compiler as an +expansion of an anonymous function does not count as a named function +in the source program, and therefore is never the target of a return +expression. + +Returning from a nested anonymous function is implemented by throwing +and catching a `scala.runtime.NonLocalReturnException`. Any +exception catches between the point of return and the enclosing +methods might see the exception. A key comparison makes sure that +these exceptions are only caught by the method instance which is +terminated by the return. + +If the return expression is itself part of an anonymous function, it +is possible that the enclosing instance of $f$ has already returned +before the return expression is executed. In that case, the thrown +`scala.runtime.NonLocalReturnException` will not be caught, +and will propagate up the call stack. + +## Throw Expressions + +```ebnf +Expr1 ::= `throw' Expr +``` + +A throw expression `throw $e$` evaluates the expression +$e$. The type of this expression must conform to +`Throwable`. If $e$ evaluates to an exception +reference, evaluation is aborted with the thrown exception. If $e$ +evaluates to `null`, evaluation is instead aborted with a +`NullPointerException`. If there is an active +[`try` expression](#try-expressions) which handles the thrown +exception, evaluation resumes with the handler; otherwise the thread +executing the `throw` is aborted. The type of a throw expression +is `scala.Nothing`. + +## Try Expressions + +```ebnf +Expr1 ::= `try' `{' Block `}' [`catch' `{' CaseClauses `}'] + [`finally' Expr] +``` + +A try expression is of the form `try { $b$ } catch $h$` +where the handler $h$ is a +[pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions) + +```scala +{ case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ } +``` + +This expression is evaluated by evaluating the block +$b$. If evaluation of $b$ does not cause an exception to be +thrown, the result of $b$ is returned. Otherwise the +handler $h$ is applied to the thrown exception. +If the handler contains a case matching the thrown exception, +the first such case is invoked. If the handler contains +no case matching the thrown exception, the exception is +re-thrown. + +Let $\mathit{pt}$ be the expected type of the try expression. The block +$b$ is expected to conform to $\mathit{pt}$. The handler $h$ +is expected conform to type +`scala.PartialFunction[scala.Throwable, $\mathit{pt}\,$]`. The +type of the try expression is the [weak least upper bound](03-types.html#weak-conformance) +of the type of $b$ +and the result type of $h$. + +A try expression `try { $b$ } finally $e$` evaluates the block +$b$. If evaluation of $b$ does not cause an exception to be +thrown, the expression $e$ is evaluated. If an exception is thrown +during evaluation of $e$, the evaluation of the try expression is +aborted with the thrown exception. If no exception is thrown during +evaluation of $e$, the result of $b$ is returned as the +result of the try expression. + +If an exception is thrown during evaluation of $b$, the finally block +$e$ is also evaluated. If another exception $e$ is thrown +during evaluation of $e$, evaluation of the try expression is +aborted with the thrown exception. If no exception is thrown during +evaluation of $e$, the original exception thrown in $b$ is +re-thrown once evaluation of $e$ has completed. The block +$b$ is expected to conform to the expected type of the try +expression. The finally expression $e$ is expected to conform to +type `Unit`. + +A try expression `try { $b$ } catch $e_1$ finally $e_2$` +is a shorthand +for `try { try { $b$ } catch $e_1$ } finally $e_2$`. + +## Anonymous Functions + +```ebnf +Expr ::= (Bindings | [`implicit'] id | `_') `=>' Expr +ResultExpr ::= (Bindings | ([`implicit'] id | `_') `:' CompoundType) `=>' Block +Bindings ::= `(' Binding {`,' Binding} `)' +Binding ::= (id | `_') [`:' Type] +``` + +The anonymous function `($x_1$: $T_1 , \ldots , x_n$: $T_n$) => e` +maps parameters $x_i$ of types $T_i$ to a result given +by expression $e$. The scope of each formal parameter +$x_i$ is $e$. Formal parameters must have pairwise distinct names. + +If the expected type of the anonymous function is of the form +`scala.Function$n$[$S_1 , \ldots , S_n$, $R\,$]`, the +expected type of $e$ is $R$ and the type $T_i$ of any of the +parameters $x_i$ can be omitted, in which +case`$T_i$ = $S_i$` is assumed. +If the expected type of the anonymous function is +some other type, all formal parameter types must be explicitly given, +and the expected type of $e$ is undefined. The type of the anonymous +function +is`scala.Function$n$[$S_1 , \ldots , S_n$, $T\,$]`, +where $T$ is the [packed type](#expression-typing) +of $e$. $T$ must be equivalent to a +type which does not refer to any of the formal parameters $x_i$. + +The anonymous function is evaluated as the instance creation expression + +```scala +new scala.Function$n$[$T_1 , \ldots , T_n$, $T$] { + def apply($x_1$: $T_1 , \ldots , x_n$: $T_n$): $T$ = $e$ +} +``` + +In the case of a single untyped formal parameter, +`($x\,$) => $e$` +can be abbreviated to `$x$ => $e$`. If an +anonymous function `($x$: $T\,$) => $e$` with a single +typed parameter appears as the result expression of a block, it can be +abbreviated to `$x$: $T$ => e`. + +A formal parameter may also be a wildcard represented by an underscore `_`. +In that case, a fresh name for the parameter is chosen arbitrarily. + +A named parameter of an anonymous function may be optionally preceded +by an `implicit` modifier. In that case the parameter is +labeled [`implicit`](07-implicit-parameters-and-views.html#implicit-parameters-and-views); however the +parameter section itself does not count as an implicit parameter +section in the sense defined [here](07-implicit-parameters-and-views.html#implicit-parameters). Hence, arguments to +anonymous functions always have to be given explicitly. + +###### Example +Examples of anonymous functions: + +```scala +x => x // The identity function + +f => g => x => f(g(x)) // Curried function composition + +(x: Int,y: Int) => x + y // A summation function + +() => { count += 1; count } // The function which takes an + // empty parameter list $()$, + // increments a non-local variable + // `count' and returns the new value. + +_ => 5 // The function that ignores its argument + // and always returns 5. +``` + +### Placeholder Syntax for Anonymous Functions + +```ebnf +SimpleExpr1 ::= `_' +``` + +An expression (of syntactic category `Expr`) +may contain embedded underscore symbols `_` at places where identifiers +are legal. Such an expression represents an anonymous function where subsequent +occurrences of underscores denote successive parameters. + +Define an _underscore section_ to be an expression of the form +`_:$T$` where $T$ is a type, or else of the form `_`, +provided the underscore does not appear as the expression part of a +type ascription `_:$T$`. + +An expression $e$ of syntactic category `Expr` _binds_ an underscore section +$u$, if the following two conditions hold: (1) $e$ properly contains $u$, and +(2) there is no other expression of syntactic category `Expr` +which is properly contained in $e$ and which itself properly contains $u$. + +If an expression $e$ binds underscore sections $u_1 , \ldots , u_n$, in this order, it is equivalent to +the anonymous function `($u'_1$, ... $u'_n$) => $e'$` +where each $u_i'$ results from $u_i$ by replacing the underscore with a fresh identifier and +$e'$ results from $e$ by replacing each underscore section $u_i$ by $u_i'$. + +###### Example +The anonymous functions in the left column use placeholder +syntax. Each of these is equivalent to the anonymous function on its right. + +| | | +|---------------------------|----------------------------| +|`_ + 1` | `x => x + 1` | +|`_ * _` | `(x1, x2) => x1 * x2` | +|`(_: Int) * 2` | `(x: Int) => (x: Int) * 2` | +|`if (_) x else y` | `z => if (z) x else y` | +|`_.map(f)` | `x => x.map(f)` | +|`_.map(_ + 1)` | `x => x.map(y => y + 1)` | + +## Constant Expressions + +Constant expressions are expressions that the Scala compiler can evaluate to a constant. +The definition of "constant expression" depends on the platform, but they +include at least the expressions of the following forms: + +- A literal of a value class, such as an integer +- A string literal +- A class constructed with [`Predef.classOf`](12-the-scala-standard-library.html#the-predef-object) +- An element of an enumeration from the underlying platform +- A literal array, of the form + `Array$(c_1 , \ldots , c_n)$`, + where all of the $c_i$'s are themselves constant expressions +- An identifier defined by a + [constant value definition](04-basic-declarations-and-definitions.html#value-declarations-and-definitions). + +## Statements + +```ebnf +BlockStat ::= Import + | {Annotation} [‘implicit’ | ‘lazy’] Def + | {Annotation} {LocalModifier} TmplDef + | Expr1 + | +TemplateStat ::= Import + | {Annotation} {Modifier} Def + | {Annotation} {Modifier} Dcl + | Expr + | +``` + +Statements occur as parts of blocks and templates. A statement can be +an import, a definition or an expression, or it can be empty. +Statements used in the template of a class definition can also be +declarations. An expression that is used as a statement can have an +arbitrary value type. An expression statement $e$ is evaluated by +evaluating $e$ and discarding the result of the evaluation. + + + +Block statements may be definitions which bind local names in the +block. The only modifier allowed in all block-local definitions is +`implicit`. When prefixing a class or object definition, +modifiers `abstract`, `final`, and `sealed` are also +permitted. + +Evaluation of a statement sequence entails evaluation of the +statements in the order they are written. + +## Implicit Conversions + +Implicit conversions can be applied to expressions whose type does not +match their expected type, to qualifiers in selections, and to unapplied methods. The +available implicit conversions are given in the next two sub-sections. + +We say, a type $T$ is _compatible_ to a type $U$ if $T$ weakly conforms +to $U$ after applying [eta-expansion](#eta-expansion) and +[view applications](07-implicit-parameters-and-views.html#views). + +### Value Conversions + +The following five implicit conversions can be applied to an +expression $e$ which has some value type $T$ and which is type-checked with +some expected type $\mathit{pt}$. + +###### Static Overloading Resolution +If an expression denotes several possible members of a class, +[overloading resolution](#overloading-resolution) +is applied to pick a unique member. + +###### Type Instantiation +An expression $e$ of polymorphic type + +```scala +[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$ +``` + +which does not appear as the function part of +a type application is converted to a type instance of $T$ +by determining with [local type inference](#local-type-inference) +instance types `$T_1 , \ldots , T_n$` +for the type variables `$a_1 , \ldots , a_n$` and +implicitly embedding $e$ in the [type application](#type-applications) +`$e$[$T_1 , \ldots , T_n$]`. + +###### Numeric Widening +If $e$ has a primitive number type which [weakly conforms](03-types.html#weak-conformance) +to the expected type, it is widened to +the expected type using one of the numeric conversion methods +`toShort`, `toChar`, `toInt`, `toLong`, +`toFloat`, `toDouble` defined [here](12-the-scala-standard-library.html#numeric-value-types). + +###### Numeric Literal Narrowing +If the expected type is `Byte`, `Short` or `Char`, and +the expression $e$ is an integer literal fitting in the range of that +type, it is converted to the same literal in that type. + +###### Value Discarding +If $e$ has some value type and the expected type is `Unit`, +$e$ is converted to the expected type by embedding it in the +term `{ $e$; () }`. + +###### View Application +If none of the previous conversions applies, and $e$'s type +does not conform to the expected type $\mathit{pt}$, it is attempted to convert +$e$ to the expected type with a [view](07-implicit-parameters-and-views.html#views). + +###### Dynamic Member Selection +If none of the previous conversions applies, and $e$ is a prefix +of a selection $e.x$, and $e$'s type conforms to class `scala.Dynamic`, +then the selection is rewritten according to the rules for +[dynamic member selection](#dynamic-member-selection). + +### Method Conversions + +The following four implicit conversions can be applied to methods +which are not applied to some argument list. + +###### Evaluation +A parameterless method $m$ of type `=> $T$` is always converted to +type $T$ by evaluating the expression to which $m$ is bound. + +###### Implicit Application +If the method takes only implicit parameters, implicit +arguments are passed following the rules [here](07-implicit-parameters-and-views.html#implicit-parameters). + +###### Eta Expansion +Otherwise, if the method is not a constructor, +and the expected type $\mathit{pt}$ is a function type +$(\mathit{Ts}') \Rightarrow T'$, [eta-expansion](#eta-expansion) +is performed on the expression $e$. + +###### Empty Application +Otherwise, if $e$ has method type $()T$, it is implicitly applied to the empty +argument list, yielding $e()$. + +### Overloading Resolution + +If an identifier or selection $e$ references several members of a +class, the context of the reference is used to identify a unique +member. The way this is done depends on whether or not $e$ is used as +a function. Let $\mathscr{A}$ be the set of members referenced by $e$. + +Assume first that $e$ appears as a function in an application, as in +`$e$($e_1 , \ldots , e_m$)`. + +One first determines the set of functions that is potentially +applicable based on the _shape_ of the arguments. + +The shape of an argument expression $e$, written $\mathit{shape}(e)$, is +a type that is defined as follows: + +- For a function expression `($p_1$: $T_1 , \ldots , p_n$: $T_n$) => $b$`: + `(Any $, \ldots ,$ Any) => $\mathit{shape}(b)$`, where `Any` occurs $n$ times + in the argument type. +- For a named argument `$n$ = $e$`: $\mathit{shape}(e)$. +- For all other expressions: `Nothing`. + +Let $\mathscr{B}$ be the set of alternatives in $\mathscr{A}$ that are +[_applicable_](#function-applications) +to expressions $(e_1 , \ldots , e_n)$ of types +$(\mathit{shape}(e_1) , \ldots , \mathit{shape}(e_n))$. +If there is precisely one +alternative in $\mathscr{B}$, that alternative is chosen. + +Otherwise, let $S_1 , \ldots , S_m$ be the vector of types obtained by +typing each argument with an undefined expected type. For every +member $m$ in $\mathscr{B}$ one determines whether it is +applicable to expressions ($e_1 , \ldots , e_m$) of types $S_1 +, \ldots , S_m$. +It is an error if none of the members in $\mathscr{B}$ is applicable. If there is one +single applicable alternative, that alternative is chosen. Otherwise, let $\mathscr{CC}$ +be the set of applicable alternatives which don't employ any default argument +in the application to $e_1 , \ldots , e_m$. It is again an error if $\mathscr{CC}$ is empty. +Otherwise, one chooses the _most specific_ alternative among the alternatives +in $\mathscr{CC}$, according to the following definition of being "as specific as", and +"more specific than": + + + +- A parameterized method $m$ of type `($p_1:T_1, \ldots , p_n:T_n$)$U$` is _as specific as_ some other + member $m'$ of type $S$ if $m'$ is applicable to arguments + `($p_1 , \ldots , p_n\,$)` of + types $T_1 , \ldots , T_n$. +- A polymorphic method of type + `[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$` is + as specific as some other member of type $S$ if $T$ is as + specific as $S$ under the assumption that for + $i = 1 , \ldots , n$ each $a_i$ is an abstract type name + bounded from below by $L_i$ and from above by $U_i$. +- A member of any other type is always as specific as a parameterized method + or a polymorphic method. +- Given two members of types $T$ and $U$ which are + neither parameterized nor polymorphic method types, the member of type $T$ is as specific as + the member of type $U$ if the existential dual of $T$ conforms to the existential dual of $U$. + Here, the existential dual of a polymorphic type + `[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$` is + `$T$ forSome { type $a_1$ >: $L_1$ <: $U_1$ $, \ldots ,$ type $a_n$ >: $L_n$ <: $U_n$}`. + The existential dual of every other type is the type itself. + +The _relative weight_ of an alternative $A$ over an alternative $B$ is a +number from 0 to 2, defined as the sum of + +- 1 if $A$ is as specific as $B$, 0 otherwise, and +- 1 if $A$ is defined in a class or object which is derived + from the class or object defining $B$, 0 otherwise. + +A class or object $C$ is _derived_ from a class or object $D$ if one of +the following holds: + +- $C$ is a subclass of $D$, or +- $C$ is a companion object of a class derived from $D$, or +- $D$ is a companion object of a class from which $C$ is derived. + +An alternative $A$ is _more specific_ than an alternative $B$ if +the relative weight of $A$ over $B$ is greater than the relative +weight of $B$ over $A$. + +It is an error if there is no alternative in $\mathscr{CC}$ which is more +specific than all other alternatives in $\mathscr{CC}$. + +Assume next that $e$ appears as a function in a type application, as +in `$e$[$\mathit{targs}\,$]`. Then all alternatives in +$\mathscr{A}$ which take the same number of type parameters as there are type +arguments in $\mathit{targs}$ are chosen. It is an error if no such alternative exists. +If there are several such alternatives, overloading resolution is +applied again to the whole expression `$e$[$\mathit{targs}\,$]`. + +Assume finally that $e$ does not appear as a function in either +an application or a type application. If an expected type is given, +let $\mathscr{B}$ be the set of those alternatives in $\mathscr{A}$ which are +[compatible](#implicit-conversions) to it. Otherwise, let $\mathscr{B}$ be the same +as $\mathscr{A}$. +We choose in this case the most specific alternative among all +alternatives in $\mathscr{B}$. It is an error if there is no +alternative in $\mathscr{B}$ which is more specific than all other +alternatives in $\mathscr{B}$. + +###### Example +Consider the following definitions: + +```scala +class A extends B {} +def f(x: B, y: B) = $\ldots$ +def f(x: A, y: B) = $\ldots$ +val a: A +val b: B +``` + +Then the application `f(b, b)` refers to the first +definition of $f$ whereas the application `f(a, a)` +refers to the second. Assume now we add a third overloaded definition + +```scala +def f(x: B, y: A) = $\ldots$ +``` + +Then the application `f(a, a)` is rejected for being ambiguous, since +no most specific applicable signature exists. + +### Local Type Inference + +Local type inference infers type arguments to be passed to expressions +of polymorphic type. Say $e$ is of type [$a_1$ >: $L_1$ <: $U_1 +, \ldots , a_n$ >: $L_n$ <: $U_n$]$T$ and no explicit type parameters +are given. + +Local type inference converts this expression to a type +application `$e$[$T_1 , \ldots , T_n$]`. The choice of the +type arguments $T_1 , \ldots , T_n$ depends on the context in which +the expression appears and on the expected type $\mathit{pt}$. +There are three cases. + +###### Case 1: Selections +If the expression appears as the prefix of a selection with a name +$x$, then type inference is _deferred_ to the whole expression +$e.x$. That is, if $e.x$ has type $S$, it is now treated as having +type [$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$S$, +and local type inference is applied in turn to infer type arguments +for $a_1 , \ldots , a_n$, using the context in which $e.x$ appears. + +###### Case 2: Values +If the expression $e$ appears as a value without being applied to +value arguments, the type arguments are inferred by solving a +constraint system which relates the expression's type $T$ with the +expected type $\mathit{pt}$. Without loss of generality we can assume that +$T$ is a value type; if it is a method type we apply +[eta-expansion](#eta-expansion) to convert it to a function type. Solving +means finding a substitution $\sigma$ of types $T_i$ for the type +parameters $a_i$ such that + +- None of the inferred types $T_i$ is a [singleton type](03-types.html#singleton-types) +- All type parameter bounds are respected, i.e. + $\sigma L_i <: \sigma a_i$ and $\sigma a_i <: \sigma U_i$ for $i = 1 , \ldots , n$. +- The expression's type conforms to the expected type, i.e. + $\sigma T <: \sigma \mathit{pt}$. + +It is a compile time error if no such substitution exists. +If several substitutions exist, local-type inference will choose for +each type variable $a_i$ a minimal or maximal type $T_i$ of the +solution space. A _maximal_ type $T_i$ will be chosen if the type +parameter $a_i$ appears [contravariantly](04-basic-declarations-and-definitions.html#variance-annotations) in the +type $T$ of the expression. A _minimal_ type $T_i$ will be chosen +in all other situations, i.e. if the variable appears covariantly, +non-variantly or not at all in the type $T$. We call such a substitution +an _optimal solution_ of the given constraint system for the type $T$. + +###### Case 3: Methods +The last case applies if the expression +$e$ appears in an application $e(d_1 , \ldots , d_m)$. In that case +$T$ is a method type $(p_1:R_1 , \ldots , p_m:R_m)T'$. Without loss of +generality we can assume that the result type $T'$ is a value type; if +it is a method type we apply [eta-expansion](#eta-expansion) to +convert it to a function type. One computes first the types $S_j$ of +the argument expressions $d_j$, using two alternative schemes. Each +argument expression $d_j$ is typed first with the expected type $R_j$, +in which the type parameters $a_1 , \ldots , a_n$ are taken as type +constants. If this fails, the argument $d_j$ is typed instead with an +expected type $R_j'$ which results from $R_j$ by replacing every type +parameter in $a_1 , \ldots , a_n$ with _undefined_. + +In a second step, type arguments are inferred by solving a constraint +system which relates the method's type with the expected type +$\mathit{pt}$ and the argument types $S_1 , \ldots , S_m$. Solving the +constraint system means +finding a substitution $\sigma$ of types $T_i$ for the type parameters +$a_i$ such that + +- None of the inferred types $T_i$ is a [singleton type](03-types.html#singleton-types) +- All type parameter bounds are respected, i.e. $\sigma L_i <: \sigma a_i$ and + $\sigma a_i <: \sigma U_i$ for $i = 1 , \ldots , n$. +- The method's result type $T'$ conforms to the expected type, i.e. $\sigma T' <: \sigma \mathit{pt}$. +- Each argument type [weakly conforms](03-types.html#weak-conformance) + to the corresponding formal parameter + type, i.e. $\sigma S_j <:_w \sigma R_j$ for $j = 1 , \ldots , m$. + +It is a compile time error if no such substitution exists. If several +solutions exist, an optimal one for the type $T'$ is chosen. + +All or parts of an expected type $\mathit{pt}$ may be undefined. The rules for +[conformance](03-types.html#conformance) are extended to this case by adding +the rule that for any type $T$ the following two statements are always +true: $\mathit{undefined} <: T$ and $T <: \mathit{undefined}$ + +It is possible that no minimal or maximal solution for a type variable +exists, in which case a compile-time error results. Because $<:$ is a +pre-order, it is also possible that a solution set has several optimal +solutions for a type. In that case, a Scala compiler is free to pick +any one of them. + +###### Example +Consider the two methods: + +```scala +def cons[A](x: A, xs: List[A]): List[A] = x :: xs +def nil[B]: List[B] = Nil +``` + +and the definition + +```scala +val xs = cons(1, nil) +``` + +The application of `cons` is typed with an undefined expected +type. This application is completed by local type inference to +`cons[Int](1, nil)`. +Here, one uses the following +reasoning to infer the type argument `Int` for the type +parameter `a`: + +First, the argument expressions are typed. The first argument `1` +has type `Int` whereas the second argument `nil` is +itself polymorphic. One tries to type-check `nil` with an +expected type `List[a]`. This leads to the constraint system + +```scala +List[b?] <: List[a] +``` + +where we have labeled `b?` with a question mark to indicate +that it is a variable in the constraint system. +Because class `List` is covariant, the optimal +solution of this constraint is + +```scala +b = scala.Nothing +``` + +In a second step, one solves the following constraint system for +the type parameter `a` of `cons`: + +```scala +Int <: a? +List[scala.Nothing] <: List[a?] +List[a?] <: $\mathit{undefined}$ +``` + +The optimal solution of this constraint system is + +```scala +a = Int +``` + +so `Int` is the type inferred for `a`. + +###### Example + +Consider now the definition + +```scala +val ys = cons("abc", xs) +``` + +where `xs` is defined of type `List[Int]` as before. +In this case local type inference proceeds as follows. + +First, the argument expressions are typed. The first argument +`"abc"` has type `String`. The second argument `xs` is +first tried to be typed with expected type `List[a]`. This fails, +as `List[Int]` is not a subtype of `List[a]`. Therefore, +the second strategy is tried; `xs` is now typed with expected type +`List[$\mathit{undefined}$]`. This succeeds and yields the argument type +`List[Int]`. + +In a second step, one solves the following constraint system for +the type parameter `a` of `cons`: + +```scala +String <: a? +List[Int] <: List[a?] +List[a?] <: $\mathit{undefined}$ +``` + +The optimal solution of this constraint system is + +```scala +a = scala.Any +``` + +so `scala.Any` is the type inferred for `a`. + +### Eta Expansion + +_Eta-expansion_ converts an expression of method type to an +equivalent expression of function type. It proceeds in two steps. + +First, one identifes the maximal sub-expressions of $e$; let's +say these are $e_1 , \ldots , e_m$. For each of these, one creates a +fresh name $x_i$. Let $e'$ be the expression resulting from +replacing every maximal subexpression $e_i$ in $e$ by the +corresponding fresh name $x_i$. Second, one creates a fresh name $y_i$ +for every argument type $T_i$ of the method ($i = 1 , \ldots , +n$). The result of eta-conversion is then: + +```scala +{ val $x_1$ = $e_1$; + $\ldots$ + val $x_m$ = $e_m$; + ($y_1: T_1 , \ldots , y_n: T_n$) => $e'$($y_1 , \ldots , y_n$) +} +``` + +The behavior of [call-by-name parameters](#function-applications) +is preserved under eta-expansion: the corresponding actual argument expression, +a sub-expression of parameterless method type, is not evaluated in the expanded block. + +### Dynamic Member Selection + +The standard Scala library defines a trait `scala.Dynamic` which defines a member +`applyDynamic` as follows: + +```scala +package scala +trait Dynamic { + def applyDynamic (name: String, args: Any*): Any + ... +} +``` + +Assume a selection of the form $e.x$ where the type of $e$ conforms to `scala.Dynamic`. +Further assuming the selection is not followed by any function arguments, such an expression can be rewritten under the conditions given [here](#implicit-conversions) to: + +```scala +$e$.applyDynamic("$x$") +``` + +If the selection is followed by some arguments, e.g. $e.x(\mathit{args})$, then that expression +is rewritten to + +```scala +$e$.applyDynamic("$x$", $\mathit{args}$) +``` diff --git a/spec/07-implicit-parameters-and-views.md b/spec/07-implicit-parameters-and-views.md new file mode 100644 index 000000000000..27a50cf0586d --- /dev/null +++ b/spec/07-implicit-parameters-and-views.md @@ -0,0 +1,432 @@ +--- +title: Implicit Parameters and Views +layout: default +chapter: 7 +--- + +# Implicit Parameters and Views + +## The Implicit Modifier + +```ebnf +LocalModifier ::= ‘implicit’ +ParamClauses ::= {ParamClause} [nl] ‘(’ ‘implicit’ Params ‘)’ +``` + +Template members and parameters labeled with an `implicit` +modifier can be passed to [implicit parameters](#implicit-parameters) +and can be used as implicit conversions called [views](#views). +The `implicit` modifier is illegal for all +type members, as well as for [top-level objects](09-top-level-definitions.html#packagings). + +### Example Monoid +The following code defines an abstract class of monoids and +two concrete implementations, `StringMonoid` and +`IntMonoid`. The two implementations are marked implicit. + +```scala +abstract class Monoid[A] extends SemiGroup[A] { + def unit: A + def add(x: A, y: A): A +} +object Monoids { + implicit object stringMonoid extends Monoid[String] { + def add(x: String, y: String): String = x.concat(y) + def unit: String = "" + } + implicit object intMonoid extends Monoid[Int] { + def add(x: Int, y: Int): Int = x + y + def unit: Int = 0 + } +} +``` + +## Implicit Parameters + +An implicit parameter list +`(implicit $p_1$,$\ldots$,$p_n$)` of a method marks the parameters $p_1 , \ldots , p_n$ as +implicit. A method or constructor can have only one implicit parameter +list, and it must be the last parameter list given. + +A method with implicit parameters can be applied to arguments just +like a normal method. In this case the `implicit` label has no +effect. However, if such a method misses arguments for its implicit +parameters, such arguments will be automatically provided. + +The actual arguments that are eligible to be passed to an implicit +parameter of type $T$ fall into two categories. First, eligible are +all identifiers $x$ that can be accessed at the point of the method +call without a prefix and that denote an +[implicit definition](#the-implicit-modifier) +or an implicit parameter. An eligible +identifier may thus be a local name, or a member of an enclosing +template, or it may be have been made accessible without a prefix +through an [import clause](04-basic-declarations-and-definitions.html#import-clauses). If there are no eligible +identifiers under this rule, then, second, eligible are also all +`implicit` members of some object that belongs to the implicit +scope of the implicit parameter's type, $T$. + +The _implicit scope_ of a type $T$ consists of all [companion modules](05-classes-and-objects.html#object-definitions) of classes that are associated with the implicit parameter's type. +Here, we say a class $C$ is _associated_ with a type $T$ if it is a [base class](05-classes-and-objects.html#class-linearization) of some part of $T$. + +The _parts_ of a type $T$ are: + +- if $T$ is a compound type `$T_1$ with $\ldots$ with $T_n$`, + the union of the parts of $T_1 , \ldots , T_n$, as well as $T$ itself; +- if $T$ is a parameterized type `$S$[$T_1 , \ldots , T_n$]`, + the union of the parts of $S$ and $T_1 , \ldots , T_n$; +- if $T$ is a singleton type `$p$.type`, + the parts of the type of $p$; +- if $T$ is a type projection `$S$#$U$`, + the parts of $S$ as well as $T$ itself; +- if $T$ is a type alias, the parts of its expansion; +- if $T$ is an abstract type, the parts of its upper bound; +- if $T$ denotes an implicit conversion to a type with a method with argument types $T_1 , \ldots , T_n$ and result type $U$, + the union of the parts of $T_1 , \ldots , T_n$ and $U$; +- the parts of quantified (existential or univeral) and annotated types are defined as the parts of the underlying types (e.g., the parts of `T forSome { ... }` are the parts of `T`); +- in all other cases, just $T$ itself. + +Note that packages are internally represented as classes with companion modules to hold the package members. +Thus, implicits defined in a package object are part of the implicit scope of a type prefixed by that package. + +If there are several eligible arguments which match the implicit +parameter's type, a most specific one will be chosen using the rules +of static [overloading resolution](06-expressions.html#overloading-resolution). +If the parameter has a default argument and no implicit argument can +be found the default argument is used. + +###### Example +Assuming the classes from the [`Monoid` example](#example-monoid), here is a +method which computes the sum of a list of elements using the +monoid's `add` and `unit` operations. + +```scala +def sum[A](xs: List[A])(implicit m: Monoid[A]): A = + if (xs.isEmpty) m.unit + else m.add(xs.head, sum(xs.tail)) +``` + +The monoid in question is marked as an implicit parameter, and can therefore +be inferred based on the type of the list. +Consider for instance the call `sum(List(1, 2, 3))` +in a context where `stringMonoid` and `intMonoid` +are visible. We know that the formal type parameter `a` of +`sum` needs to be instantiated to `Int`. The only +eligible object which matches the implicit formal parameter type +`Monoid[Int]` is `intMonoid` so this object will +be passed as implicit parameter. + +This discussion also shows that implicit parameters are inferred after +any type arguments are [inferred](06-expressions.html#local-type-inference). + +Implicit methods can themselves have implicit parameters. An example +is the following method from module `scala.List`, which injects +lists into the `scala.Ordered` class, provided the element +type of the list is also convertible to this type. + +```scala +implicit def list2ordered[A](x: List[A]) + (implicit elem2ordered: A => Ordered[A]): Ordered[List[A]] = + ... +``` + +Assume in addition a method + +```scala +implicit def int2ordered(x: Int): Ordered[Int] +``` + +that injects integers into the `Ordered` class. We can now +define a `sort` method over ordered lists: + +```scala +def sort[A](xs: List[A])(implicit a2ordered: A => Ordered[A]) = ... +``` + +We can apply `sort` to a list of lists of integers +`yss: List[List[Int]]` +as follows: + +```scala +sort(yss) +``` + +The call above will be completed by passing two nested implicit arguments: + +```scala +sort(yss)(xs: List[Int] => list2ordered[Int](xs)(int2ordered)) . +``` + +The possibility of passing implicit arguments to implicit arguments +raises the possibility of an infinite recursion. For instance, one +might try to define the following method, which injects _every_ type into the +`Ordered` class: + +```scala +implicit def magic[A](x: A)(implicit a2ordered: A => Ordered[A]): Ordered[A] = + a2ordered(x) +``` + +Now, if one tried to apply +`sort` to an argument `arg` of a type that did not have +another injection into the `Ordered` class, one would obtain an infinite +expansion: + +```scala +sort(arg)(x => magic(x)(x => magic(x)(x => ... ))) +``` + +To prevent such infinite expansions, the compiler keeps track of +a stack of “open implicit types” for which implicit arguments are currently being +searched. Whenever an implicit argument for type $T$ is searched, the +“core type” of $T$ is added to the stack. Here, the _core type_ +of $T$ is $T$ with aliases expanded, top-level type [annotations](11-user-defined-annotations.html#user-defined-annotations) and +[refinements](03-types.html#compound-types) removed, and occurrences +of top-level existentially bound variables replaced by their upper +bounds. The core type is removed from the stack once the search for +the implicit argument either definitely fails or succeeds. Everytime a +core type is added to the stack, it is checked that this type does not +dominate any of the other types in the set. + +Here, a core type $T$ _dominates_ a type $U$ if $T$ is +[equivalent](03-types.html#equivalence) +to $U$, or if the top-level type constructors of $T$ and $U$ have a +common element and $T$ is more complex than $U$. + +The set of _top-level type constructors_ $\mathit{ttcs}(T)$ of a type $T$ depends on the form of +the type: + +- For a type designator, $\mathit{ttcs}(p.c) ~=~ \{c\}$; +- For a parameterized type, $\mathit{ttcs}(p.c[\mathit{targs}]) ~=~ \{c\}$; +- For a singleton type, $\mathit{ttcs}(p.type) ~=~ \mathit{ttcs}(T)$, provided $p$ has type $T$; +- For a compound type, `$\mathit{ttcs}(T_1$ with $\ldots$ with $T_n)$` $~=~ \mathit{ttcs}(T_1) \cup \ldots \cup \mathit{ttcs}(T_n)$. + +The _complexity_ $\operatorname{complexity}(T)$ of a core type is an integer which also depends on the form of +the type: + +- For a type designator, $\operatorname{complexity}(p.c) ~=~ 1 + \operatorname{complexity}(p)$ +- For a parameterized type, $\operatorname{complexity}(p.c[\mathit{targs}]) ~=~ 1 + \Sigma \operatorname{complexity}(\mathit{targs})$ +- For a singleton type denoting a package $p$, $\operatorname{complexity}(p.type) ~=~ 0$ +- For any other singleton type, $\operatorname{complexity}(p.type) ~=~ 1 + \operatorname{complexity}(T)$, provided $p$ has type $T$; +- For a compound type, `$\operatorname{complexity}(T_1$ with $\ldots$ with $T_n)$` $= \Sigma\operatorname{complexity}(T_i)$ + +###### Example +When typing `sort(xs)` for some list `xs` of type `List[List[List[Int]]]`, +the sequence of types for +which implicit arguments are searched is + +```scala +List[List[Int]] => Ordered[List[List[Int]]], +List[Int] => Ordered[List[Int]] +Int => Ordered[Int] +``` + +All types share the common type constructor `scala.Function1`, +but the complexity of the each new type is lower than the complexity of the previous types. +Hence, the code typechecks. + +###### Example +Let `ys` be a list of some type which cannot be converted +to `Ordered`. For instance: + +```scala +val ys = List(new IllegalArgumentException, new ClassCastException, new Error) +``` + +Assume that the definition of `magic` above is in scope. Then the sequence +of types for which implicit arguments are searched is + +```scala +Throwable => Ordered[Throwable], +Throwable => Ordered[Throwable], +... +``` + +Since the second type in the sequence is equal to the first, the compiler +will issue an error signalling a divergent implicit expansion. + +## Views + +Implicit parameters and methods can also define implicit conversions +called views. A _view_ from type $S$ to type $T$ is +defined by an implicit value which has function type +`$S$=>$T$` or `(=>$S$)=>$T$` or by a method convertible to a value of that +type. + +Views are applied in three situations: + +1. If an expression $e$ is of type $T$, and $T$ does not conform to the + expression's expected type $\mathit{pt}$. In this case an implicit $v$ is + searched which is applicable to $e$ and whose result type conforms to + $\mathit{pt}$. The search proceeds as in the case of implicit parameters, + where the implicit scope is the one of `$T$ => $\mathit{pt}$`. If + such a view is found, the expression $e$ is converted to + `$v$($e$)`. +1. In a selection $e.m$ with $e$ of type $T$, if the selector $m$ does + not denote an accessible member of $T$. In this case, a view $v$ is searched + which is applicable to $e$ and whose result contains a member named + $m$. The search proceeds as in the case of implicit parameters, where + the implicit scope is the one of $T$. If such a view is found, the + selection $e.m$ is converted to `$v$($e$).$m$`. +1. In a selection $e.m(\mathit{args})$ with $e$ of type $T$, if the selector + $m$ denotes some member(s) of $T$, but none of these members is applicable to the arguments + $\mathit{args}$. In this case a view $v$ is searched which is applicable to $e$ + and whose result contains a method $m$ which is applicable to $\mathit{args}$. + The search proceeds as in the case of implicit parameters, where + the implicit scope is the one of $T$. If such a view is found, the + selection $e.m$ is converted to `$v$($e$).$m(\mathit{args})$`. + +The implicit view, if it is found, can accept is argument $e$ as a +call-by-value or as a call-by-name parameter. However, call-by-value +implicits take precedence over call-by-name implicits. + +As for implicit parameters, overloading resolution is applied +if there are several possible candidates (of either the call-by-value +or the call-by-name category). + +### Example Ordered +Class `scala.Ordered[A]` contains a method + +```scala + def <= [B >: A](that: B)(implicit b2ordered: B => Ordered[B]): Boolean . +``` + +Assume two lists `xs` and `ys` of type `List[Int]` +and assume that the `list2ordered` and `int2ordered` +methods defined [here](#implicit-parameters) are in scope. +Then the operation + +```scala + xs <= ys +``` + +is legal, and is expanded to: + +```scala + list2ordered(xs)(int2ordered).<= + (ys) + (xs => list2ordered(xs)(int2ordered)) +``` + +The first application of `list2ordered` converts the list +`xs` to an instance of class `Ordered`, whereas the second +occurrence is part of an implicit parameter passed to the `<=` +method. + +## Context Bounds and View Bounds + +```ebnf + TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] + {‘<%’ Type} {‘:’ Type} +``` + +A type parameter $A$ of a method or non-trait class may have one or more view +bounds `$A$ <% $T$`. In this case the type parameter may be +instantiated to any type $S$ which is convertible by application of a +view to the bound $T$. + +A type parameter $A$ of a method or non-trait class may also have one +or more context bounds `$A$ : $T$`. In this case the type parameter may be +instantiated to any type $S$ for which _evidence_ exists at the +instantiation point that $S$ satisfies the bound $T$. Such evidence +consists of an implicit value with type $T[S]$. + +A method or class containing type parameters with view or context bounds is treated as being +equivalent to a method with implicit parameters. Consider first the case of a +single parameter with view and/or context bounds such as: + +```scala +def $f$[$A$ <% $T_1$ ... <% $T_m$ : $U_1$ : $U_n$]($\mathit{ps}$): $R$ = ... +``` + +Then the method definition above is expanded to + +```scala +def $f$[$A$]($\mathit{ps}$)(implicit $v_1$: $A$ => $T_1$, ..., $v_m$: $A$ => $T_m$, + $w_1$: $U_1$[$A$], ..., $w_n$: $U_n$[$A$]): $R$ = ... +``` + +where the $v_i$ and $w_j$ are fresh names for the newly introduced implicit parameters. These +parameters are called _evidence parameters_. + +If a class or method has several view- or context-bounded type parameters, each +such type parameter is expanded into evidence parameters in the order +they appear and all the resulting evidence parameters are concatenated +in one implicit parameter section. Since traits do not take +constructor parameters, this translation does not work for them. +Consequently, type-parameters in traits may not be view- or context-bounded. +Also, a method or class with view- or context bounds may not define any +additional implicit parameters. + +###### Example +The `<=` method from the [`Ordered` example](#example-ordered) can be declared +more concisely as follows: + +```scala +def <= [B >: A <% Ordered[B]](that: B): Boolean +``` + +## Manifests + +Manifests are type descriptors that can be automatically generated by +the Scala compiler as arguments to implicit parameters. The Scala +standard library contains a hierarchy of four manifest classes, +with `OptManifest` +at the top. Their signatures follow the outline below. + +```scala +trait OptManifest[+T] +object NoManifest extends OptManifest[Nothing] +trait ClassManifest[T] extends OptManifest[T] +trait Manifest[T] extends ClassManifest[T] +``` + +If an implicit parameter of a method or constructor is of a subtype $M[T]$ of +class `OptManifest[T]`, _a manifest is determined for $M[S]$_, +according to the following rules. + +First if there is already an implicit argument that matches $M[T]$, this +argument is selected. + +Otherwise, let $\mathit{Mobj}$ be the companion object `scala.reflect.Manifest` +if $M$ is trait `Manifest`, or be +the companion object `scala.reflect.ClassManifest` otherwise. Let $M'$ be the trait +`Manifest` if $M$ is trait `Manifest`, or be the trait `OptManifest` otherwise. +Then the following rules apply. + +1. If $T$ is a value class or one of the classes `Any`, `AnyVal`, `Object`, + `Null`, or `Nothing`, + a manifest for it is generated by selecting + the corresponding manifest value `Manifest.$T$`, which exists in the + `Manifest` module. +1. If $T$ is an instance of `Array[$S$]`, a manifest is generated + with the invocation `$\mathit{Mobj}$.arrayType[S](m)`, where $m$ is the manifest + determined for $M[S]$. +1. If $T$ is some other class type $S$#$C[U_1, \ldots, U_n]$ where the prefix + type $S$ cannot be statically determined from the class $C$, + a manifest is generated with the invocation `$\mathit{Mobj}$.classType[T]($m_0$, classOf[T], $ms$)` + where $m_0$ is the manifest determined for $M'[S]$ and $ms$ are the + manifests determined for $M'[U_1], \ldots, M'[U_n]$. +1. If $T$ is some other class type with type arguments $U_1 , \ldots , U_n$, + a manifest is generated + with the invocation `$\mathit{Mobj}$.classType[T](classOf[T], $ms$)` + where $ms$ are the + manifests determined for $M'[U_1] , \ldots , M'[U_n]$. +1. If $T$ is a singleton type `$p$.type`, a manifest is generated with + the invocation `$\mathit{Mobj}$.singleType[T]($p$)` +1. If $T$ is a refined type $T' \{ R \}$, a manifest is generated for $T'$. + (That is, refinements are never reflected in manifests). +1. If $T$ is an intersection type + `$T_1$ with $, \ldots ,$ with $T_n$` + where $n > 1$, the result depends on whether a full manifest is + to be determined or not. + If $M$ is trait `Manifest`, then + a manifest is generated with the invocation + `Manifest.intersectionType[T]($ms$)` where $ms$ are the manifests + determined for $M[T_1] , \ldots , M[T_n]$. + Otherwise, if $M$ is trait `ClassManifest`, + then a manifest is generated for the [intersection dominator](03-types.html#type-erasure) + of the types $T_1 , \ldots , T_n$. +1. If $T$ is some other type, then if $M$ is trait `OptManifest`, + a manifest is generated from the designator `scala.reflect.NoManifest`. + If $M$ is a type different from `OptManifest`, a static error results. diff --git a/spec/08-pattern-matching.md b/spec/08-pattern-matching.md new file mode 100644 index 000000000000..e75bddc09640 --- /dev/null +++ b/spec/08-pattern-matching.md @@ -0,0 +1,716 @@ +--- +title: Pattern Matching +layout: default +chapter: 8 +--- + +# Pattern Matching + +## Patterns + +```ebnf + Pattern ::= Pattern1 { ‘|’ Pattern1 } + Pattern1 ::= varid ‘:’ TypePat + | ‘_’ ‘:’ TypePat + | Pattern2 + Pattern2 ::= varid [‘@’ Pattern3] + | Pattern3 + Pattern3 ::= SimplePattern + | SimplePattern {id [nl] SimplePattern} + SimplePattern ::= ‘_’ + | varid + | Literal + | StableId + | StableId ‘(’ [Patterns] ‘)’ + | StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’ + | ‘(’ [Patterns] ‘)’ + | XmlPattern + Patterns ::= Pattern {‘,’ Patterns} +``` + +A pattern is built from constants, constructors, variables and type +tests. Pattern matching tests whether a given value (or sequence of values) +has the shape defined by a pattern, and, if it does, binds the +variables in the pattern to the corresponding components of the value +(or sequence of values). The same variable name may not be bound more +than once in a pattern. + +###### Example +Some examples of patterns are: + 1. The pattern `ex: IOException` matches all instances of class + `IOException`, binding variable `ex` to the instance. + 1. The pattern `Some(x)` matches values of the form `Some($v$)`, + binding `x` to the argument value $v$ of the `Some` constructor. + 1. The pattern `(x, _)` matches pairs of values, binding `x` to + the first component of the pair. The second component is matched + with a wildcard pattern. + 1. The pattern `x :: y :: xs` matches lists of length $\geq 2$, + binding `x` to the list's first element, `y` to the list's + second element, and `xs` to the remainder. + 1. The pattern `1 | 2 | 3` matches the integers between 1 and 3. + +Pattern matching is always done in a context which supplies an +expected type of the pattern. We distinguish the following kinds of +patterns. + +### Variable Patterns + +```ebnf + SimplePattern ::= `_' + | varid +``` + +A variable pattern $x$ is a simple identifier which starts with a +lower case letter. It matches any value, and binds the variable name +to that value. The type of $x$ is the expected type of the pattern as +given from outside. A special case is the wild-card pattern `_` +which is treated as if it was a fresh variable on each occurrence. + +### Typed Patterns + +```ebnf + Pattern1 ::= varid `:' TypePat + | `_' `:' TypePat +``` + +A typed pattern $x: T$ consists of a pattern variable $x$ and a +type pattern $T$. The type of $x$ is the type pattern $T$, where +each type variable and wildcard is replaced by a fresh, unknown type. +This pattern matches any value matched by the [type pattern](#type-patterns) +$T$; it binds the variable name to +that value. + +### Pattern Binders + +```ebnf + Pattern2 ::= varid `@' Pattern3 +``` + +A pattern binder `$x$@$p$` consists of a pattern variable $x$ and a +pattern $p$. The type of the variable $x$ is the static type $T$ of the pattern $p$. +This pattern matches any value $v$ matched by the pattern $p$, +provided the run-time type of $v$ is also an instance of $T$, +and it binds the variable name to that value. + +### Literal Patterns + +```ebnf + SimplePattern ::= Literal +``` + +A literal pattern $L$ matches any value that is equal (in terms of +`==`) to the literal $L$. The type of $L$ must conform to the +expected type of the pattern. + +### Stable Identifier Patterns + +```ebnf + SimplePattern ::= StableId +``` + +A stable identifier pattern is a [stable identifier](03-types.html#paths) $r$. +The type of $r$ must conform to the expected +type of the pattern. The pattern matches any value $v$ such that +`$r$ == $v$` (see [here](12-the-scala-standard-library.html#root-classes)). + +To resolve the syntactic overlap with a variable pattern, a +stable identifier pattern may not be a simple name starting with a lower-case +letter. However, it is possible to enclose such a variable name in +backquotes; then it is treated as a stable identifier pattern. + +###### Example +Consider the following function definition: + +```scala +def f(x: Int, y: Int) = x match { + case y => ... +} +``` + +Here, `y` is a variable pattern, which matches any value. +If we wanted to turn the pattern into a stable identifier pattern, this +can be achieved as follows: + +```scala +def f(x: Int, y: Int) = x match { + case `y` => ... +} +``` + +Now, the pattern matches the `y` parameter of the enclosing function `f`. +That is, the match succeeds only if the `x` argument and the `y` +argument of `f` are equal. + +### Constructor Patterns + +```ebnf +SimplePattern ::= StableId `(' [Patterns] `) +``` + +A constructor pattern is of the form $c(p_1 , \ldots , p_n)$ where $n +\geq 0$. It consists of a stable identifier $c$, followed by element +patterns $p_1 , \ldots , p_n$. The constructor $c$ is a simple or +qualified name which denotes a [case class](05-classes-and-objects.html#case-classes). +If the case class is monomorphic, then it +must conform to the expected type of the pattern, and the formal +parameter types of $x$'s [primary constructor](05-classes-and-objects.html#class-definitions) +are taken as the expected types of the element patterns $p_1, \ldots , +p_n$. If the case class is polymorphic, then its type parameters are +instantiated so that the instantiation of $c$ conforms to the expected +type of the pattern. The instantiated formal parameter types of $c$'s +primary constructor are then taken as the expected types of the +component patterns $p_1, \ldots , p_n$. The pattern matches all +objects created from constructor invocations $c(v_1 , \ldots , v_n)$ +where each element pattern $p_i$ matches the corresponding value +$v_i$. + +A special case arises when $c$'s formal parameter types end in a +repeated parameter. This is further discussed [here](#pattern-sequences). + +### Tuple Patterns + +```ebnf + SimplePattern ::= `(' [Patterns] `)' +``` + +A tuple pattern `($p_1 , \ldots , p_n$)` is an alias +for the constructor pattern `scala.Tuple$n$($p_1 , \ldots , p_n$)`, +where $n \geq 2$. The empty tuple +`()` is the unique value of type `scala.Unit`. + +### Extractor Patterns + +```ebnf + SimplePattern ::= StableId `(' [Patterns] `)' +``` + +An extractor pattern $x(p_1 , \ldots , p_n)$ where $n \geq 0$ is of +the same syntactic form as a constructor pattern. However, instead of +a case class, the stable identifier $x$ denotes an object which has a +member method named `unapply` or `unapplySeq` that matches +the pattern. + +An `unapply` method in an object $x$ _matches_ the pattern +$x(p_1 , \ldots , p_n)$ if it takes exactly one argument and one of +the following applies: + +* $n=0$ and `unapply`'s result type is `Boolean`. In this case + the extractor pattern matches all values $v$ for which + `$x$.unapply($v$)` yields `true`. +* $n=1$ and `unapply`'s result type is `Option[$T$]`, for some + type $T$. In this case, the (only) argument pattern $p_1$ is typed in + turn with expected type $T$. The extractor pattern matches then all + values $v$ for which `$x$.unapply($v$)` yields a value of form + `Some($v_1$)`, and $p_1$ matches $v_1$. +* $n>1$ and `unapply`'s result type is + `Option[($T_1 , \ldots , T_n$)]`, for some + types $T_1 , \ldots , T_n$. In this case, the argument patterns $p_1 + , \ldots , p_n$ are typed in turn with expected types $T_1 , \ldots , + T_n$. The extractor pattern matches then all values $v$ for which + `$x$.unapply($v$)` yields a value of form + `Some(($v_1 , \ldots , v_n$))`, and each pattern + $p_i$ matches the corresponding value $v_i$. + +An `unapplySeq` method in an object $x$ matches the pattern +$x(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$ if it takes exactly one argument +and its result type is of the form `Option[($T_1 , \ldots , T_m$, Seq[S])]` (if `m = 0`, the type `Option[Seq[S]]` is also accepted). +This case is further discussed [below](#pattern-sequences). + +###### Example +The `Predef` object contains a definition of an +extractor object `Pair`: + +```scala +object Pair { + def apply[A, B](x: A, y: B) = Tuple2(x, y) + def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) +} +``` + +This means that the name `Pair` can be used in place of `Tuple2` for tuple +formation as well as for deconstruction of tuples in patterns. +Hence, the following is possible: + +```scala +val x = (1, 2) +val y = x match { + case Pair(i, s) => Pair(s + i, i * i) +} +``` + +### Pattern Sequences + +```ebnf +SimplePattern ::= StableId `(' [Patterns `,'] [varid `@'] `_' `*' `)' +``` + +A pattern sequence $p_1 , \ldots , p_n$ appears in two contexts. +First, in a constructor pattern $c(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$, where $c$ is a case class which has $m+1$ primary constructor parameters, ending in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `S*`. +Second, in an extractor pattern $x(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$ if the extractor object $x$ does not have an `unapply` method, +but it does define an `unapplySeq` method with a result type conforming to `Option[(T_1, ... , T_m, Seq[S])]` (if `m = 0`, the type `Option[Seq[S]]` is also accepted). The expected type for the patterns $p_i$ is $S$. + +The last pattern in a pattern sequence may be a _sequence wildcard_ `_*`. +Each element pattern $p_i$ is type-checked with +$S$ as expected type, unless it is a sequence wildcard. If a final +sequence wildcard is present, the pattern matches all values $v$ that +are sequences which start with elements matching patterns +$p_1 , \ldots , p_{n-1}$. If no final sequence wildcard is given, the +pattern matches all values $v$ that are sequences of +length $n$ which consist of elements matching patterns $p_1 , \ldots , +p_n$. + +### Infix Operation Patterns + +```ebnf + Pattern3 ::= SimplePattern {id [nl] SimplePattern} +``` + +An infix operation pattern $p;\mathit{op};q$ is a shorthand for the +constructor or extractor pattern $\mathit{op}(p, q)$. The precedence and +associativity of operators in patterns is the same as in +[expressions](06-expressions.html#prefix,-infix,-and-postfix-operations). + +An infix operation pattern $p;\mathit{op};(q_1 , \ldots , q_n)$ is a +shorthand for the constructor or extractor pattern $\mathit{op}(p, q_1 +, \ldots , q_n)$. + +### Pattern Alternatives + +```ebnf + Pattern ::= Pattern1 { `|' Pattern1 } +``` + +A pattern alternative `$p_1$ | $\ldots$ | $p_n$` +consists of a number of alternative patterns $p_i$. All alternative +patterns are type checked with the expected type of the pattern. They +may not bind variables other than wildcards. The alternative pattern +matches a value $v$ if at least one its alternatives matches $v$. + +### XML Patterns + +XML patterns are treated [here](10-xml-expressions-and-patterns.html#xml-patterns). + +### Regular Expression Patterns + +Regular expression patterns have been discontinued in Scala from version 2.0. + +Later version of Scala provide a much simplified version of regular +expression patterns that cover most scenarios of non-text sequence +processing. A _sequence pattern_ is a pattern that stands in a +position where either (1) a pattern of a type `T` which is +conforming to +`Seq[A]` for some `A` is expected, or (2) a case +class constructor that has an iterated formal parameter +`A*`. A wildcard star pattern `_*` in the +rightmost position stands for arbitrary long sequences. It can be +bound to variables using `@`, as usual, in which case the variable will have the +type `Seq[A]`. + +### Irrefutable Patterns + +A pattern $p$ is _irrefutable_ for a type $T$, if one of the following applies: + +1. $p$ is a variable pattern, +1. $p$ is a typed pattern $x: T'$, and $T <: T'$, +1. $p$ is a constructor pattern $c(p_1 , \ldots , p_n)$, the type $T$ + is an instance of class $c$, the [primary constructor](05-classes-and-objects.html#class-definitions) + of type $T$ has argument types $T_1 , \ldots , T_n$, and each $p_i$ is + irrefutable for $T_i$. + +## Type Patterns + +```ebnf + TypePat ::= Type +``` + +Type patterns consist of types, type variables, and wildcards. +A type pattern $T$ is of one of the following forms: + +* A reference to a class $C$, $p.C$, or `$T$#$C$`. This + type pattern matches any non-null instance of the given class. + Note that the prefix of the class, if it is given, is relevant for determining + class instances. For instance, the pattern $p.C$ matches only + instances of classes $C$ which were created with the path $p$ as + prefix. + + The bottom types `scala.Nothing` and `scala.Null` cannot + be used as type patterns, because they would match nothing in any case. + +* A singleton type `$p$.type`. This type pattern matches only the value + denoted by the path $p$ (that is, a pattern match involved a + comparison of the matched value with $p$ using method `eq` in class + `AnyRef`). +* A compound type pattern `$T_1$ with $\ldots$ with $T_n$` where each $T_i$ is a + type pattern. This type pattern matches all values that are matched by each of + the type patterns $T_i$. + +* A parameterized type pattern $T[a_1 , \ldots , a_n]$, where the $a_i$ + are type variable patterns or wildcards `_`. + This type pattern matches all values which match $T$ for + some arbitrary instantiation of the type variables and wildcards. The + bounds or alias type of these type variable are determined as + described [here](#type-parameter-inference-in-patterns). + +* A parameterized type pattern `scala.Array$[T_1]$`, where + $T_1$ is a type pattern. This type pattern matches any non-null instance + of type `scala.Array$[U_1]$`, where $U_1$ is a type matched by $T_1$. + +Types which are not of one of the forms described above are also +accepted as type patterns. However, such type patterns will be translated to their +[erasure](03-types.html#type-erasure). The Scala +compiler will issue an "unchecked" warning for these patterns to +flag the possible loss of type-safety. + +A _type variable pattern_ is a simple identifier which starts with +a lower case letter. + +## Type Parameter Inference in Patterns + +Type parameter inference is the process of finding bounds for the +bound type variables in a typed pattern or constructor +pattern. Inference takes into account the expected type of the +pattern. + +### Type parameter inference for typed patterns. + +Assume a typed pattern $p: T'$. Let $T$ result from $T'$ where all wildcards in +$T'$ are renamed to fresh variable names. Let $a_1 , \ldots , a_n$ be +the type variables in $T$. These type variables are considered bound +in the pattern. Let the expected type of the pattern be $\mathit{pt}$. + +Type parameter inference constructs first a set of subtype constraints over +the type variables $a_i$. The initial constraints set $\mathcal{C}\_0$ reflects +just the bounds of these type variables. That is, assuming $T$ has +bound type variables $a_1 , \ldots , a_n$ which correspond to class +type parameters $a_1' , \ldots , a_n'$ with lower bounds $L_1, \ldots , L_n$ +and upper bounds $U_1 , \ldots , U_n$, $\mathcal{C}_0$ contains the constraints + +$$ +\begin{cases} +a_i &<: \sigma U_i & \quad (i = 1, \ldots , n) \\\\ +\sigma L_i &<: a_i & \quad (i = 1, \ldots , n) +\end{cases} +$$ + +where $\sigma$ is the substitution $[a_1' := a_1 , \ldots , a_n' :=a_n]$. + +The set $\mathcal{C}_0$ is then augmented by further subtype constraints. There are two +cases. + +###### Case 1 +If there exists a substitution $\sigma$ over the type variables $a_i , \ldots , a_n$ such that $\sigma T$ conforms to $\mathit{pt}$, one determines the weakest subtype constraints +$\mathcal{C}\_1$ over the type variables $a_1, \ldots , a_n$ such that $\mathcal{C}\_0 \wedge \mathcal{C}_1$ implies that $T$ conforms to $\mathit{pt}$. + +###### Case 2 +Otherwise, if $T$ can not be made to conform to $\mathit{pt}$ by +instantiating its type variables, one determines all type variables in +$\mathit{pt}$ which are defined as type parameters of a method enclosing +the pattern. Let the set of such type parameters be $b_1 , \ldots , +b_m$. Let $\mathcal{C}\_0'$ be the subtype constraints reflecting the bounds of the +type variables $b_i$. If $T$ denotes an instance type of a final +class, let $\mathcal{C}\_2$ be the weakest set of subtype constraints over the type +variables $a_1 , \ldots , a_n$ and $b_1 , \ldots , b_m$ such that +$\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}\_2$ implies that $T$ conforms to +$\mathit{pt}$. If $T$ does not denote an instance type of a final class, +let $\mathcal{C}\_2$ be the weakest set of subtype constraints over the type variables +$a_1 , \ldots , a_n$ and $b_1 , \ldots , b_m$ such that $\mathcal{C}\_0 \wedge +\mathcal{C}\_0' \wedge \mathcal{C}\_2$ implies that it is possible to construct a type +$T'$ which conforms to both $T$ and $\mathit{pt}$. It is a static error if +there is no satisfiable set of constraints $\mathcal{C}\_2$ with this property. + +The final step consists in choosing type bounds for the type +variables which imply the established constraint system. The process +is different for the two cases above. + +###### Case 1 +We take $a_i >: L_i <: U_i$ where each $L_i$ is minimal and each $U_i$ is maximal wrt $<:$ such that $a_i >: L_i <: U_i$ for $i = 1, \ldots, n$ implies $\mathcal{C}\_0 \wedge \mathcal{C}\_1$. + +###### Case 2 +We take $a_i >: L_i <: U_i$ and $b\_i >: L_i' <: U_i' $ where each $L_i$ +and $L_j'$ is minimal and each $U_i$ and $U_j'$ is maximal such that +$a_i >: L_i <: U_i$ for $i = 1 , \ldots , n$ and +$b_j >: L_j' <: U_j'$ for $j = 1 , \ldots , m$ +implies $\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}_2$. + +In both cases, local type inference is permitted to limit the +complexity of inferred bounds. Minimality and maximality of types have +to be understood relative to the set of types of acceptable +complexity. + +#### Type parameter inference for constructor patterns. +Assume a constructor pattern $C(p_1 , \ldots , p_n)$ where class $C$ +has type type parameters $a_1 , \ldots , a_n$. These type parameters +are inferred in the same way as for the typed pattern +`(_: $C[a_1 , \ldots , a_n]$)`. + +###### Example +Consider the program fragment: + +```scala +val x: Any +x match { + case y: List[a] => ... +} +``` + +Here, the type pattern `List[a]` is matched against the +expected type `Any`. The pattern binds the type variable +`a`. Since `List[a]` conforms to `Any` +for every type argument, there are no constraints on `a`. +Hence, `a` is introduced as an abstract type with no +bounds. The scope of `a` is right-hand side of its case clause. + +On the other hand, if `x` is declared as + +```scala +val x: List[List[String]], +``` + +this generates the constraint +`List[a] <: List[List[String]]`, which simplifies to +`a <: List[String]`, because `List` is covariant. Hence, +`a` is introduced with upper bound +`List[String]`. + +###### Example +Consider the program fragment: + +```scala +val x: Any +x match { + case y: List[String] => ... +} +``` + +Scala does not maintain information about type arguments at run-time, +so there is no way to check that `x` is a list of strings. +Instead, the Scala compiler will [erase](03-types.html#type-erasure) the +pattern to `List[_]`; that is, it will only test whether the +top-level runtime-class of the value `x` conforms to +`List`, and the pattern match will succeed if it does. This +might lead to a class cast exception later on, in the case where the +list `x` contains elements other than strings. The Scala +compiler will flag this potential loss of type-safety with an +"unchecked" warning message. + +###### Example +Consider the program fragment + +```scala +class Term[A] +class Number(val n: Int) extends Term[Int] +def f[B](t: Term[B]): B = t match { + case y: Number => y.n +} +``` + +The expected type of the pattern `y: Number` is +`Term[B]`. The type `Number` does not conform to +`Term[B]`; hence Case 2 of the rules above +applies. This means that `b` is treated as another type +variable for which subtype constraints are inferred. In our case the +applicable constraint is `Number <: Term[B]`, which +entails `B = Int`. Hence, `B` is treated in +the case clause as an abstract type with lower and upper bound +`Int`. Therefore, the right hand side of the case clause, +`y.n`, of type `Int`, is found to conform to the +function's declared result type, `Number`. + +## Pattern Matching Expressions + +```ebnf + Expr ::= PostfixExpr `match' `{' CaseClauses `}' + CaseClauses ::= CaseClause {CaseClause} + CaseClause ::= `case' Pattern [Guard] `=>' Block +``` + +A pattern matching expression + +```scala +e match { case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ } +``` + +consists of a selector expression $e$ and a number $n > 0$ of +cases. Each case consists of a (possibly guarded) pattern $p_i$ and a +block $b_i$. Each $p_i$ might be complemented by a guard +`if $e$` where $e$ is a boolean expression. +The scope of the pattern +variables in $p_i$ comprises the pattern's guard and the corresponding block $b_i$. + +Let $T$ be the type of the selector expression $e$ and let $a_1 +, \ldots , a_m$ be the type parameters of all methods enclosing +the pattern matching expression. For every $a_i$, let $L_i$ be its +lower bound and $U_i$ be its higher bound. Every pattern $p \in \{p_1, , \ldots , p_n\}$ +can be typed in two ways. First, it is attempted +to type $p$ with $T$ as its expected type. If this fails, $p$ is +instead typed with a modified expected type $T'$ which results from +$T$ by replacing every occurrence of a type parameter $a_i$ by +\mbox{\sl undefined}. If this second step fails also, a compile-time +error results. If the second step succeeds, let $T_p$ be the type of +pattern $p$ seen as an expression. One then determines minimal bounds +$L_11 , \ldots , L_m'$ and maximal bounds $U_1' , \ldots , U_m'$ such +that for all $i$, $L_i <: L_i'$ and $U_i' <: U_i$ and the following +constraint system is satisfied: + +$$L_1 <: a_1 <: U_1\;\wedge\;\ldots\;\wedge\;L_m <: a_m <: U_m \ \Rightarrow\ T_p <: T$$ + +If no such bounds can be found, a compile time error results. If such +bounds are found, the pattern matching clause starting with $p$ is +then typed under the assumption that each $a_i$ has lower bound $L_i'$ +instead of $L_i$ and has upper bound $U_i'$ instead of $U_i$. + +The expected type of every block $b_i$ is the expected type of the +whole pattern matching expression. The type of the pattern matching +expression is then the [weak least upper bound](03-types.html#weak-conformance) +of the types of all blocks +$b_i$. + +When applying a pattern matching expression to a selector value, +patterns are tried in sequence until one is found which matches the +[selector value](#patterns). Say this case is `case $p_i \Rightarrow b_i$`. +The result of the whole expression is the result of evaluating $b_i$, +where all pattern variables of $p_i$ are bound to +the corresponding parts of the selector value. If no matching pattern +is found, a `scala.MatchError` exception is thrown. + +The pattern in a case may also be followed by a guard suffix +`if e` with a boolean expression $e$. The guard expression is +evaluated if the preceding pattern in the case matches. If the guard +expression evaluates to `true`, the pattern match succeeds as +normal. If the guard expression evaluates to `false`, the pattern +in the case is considered not to match and the search for a matching +pattern continues. + +In the interest of efficiency the evaluation of a pattern matching +expression may try patterns in some other order than textual +sequence. This might affect evaluation through +side effects in guards. However, it is guaranteed that a guard +expression is evaluated only if the pattern it guards matches. + +If the selector of a pattern match is an instance of a +[`sealed` class](05-classes-and-objects.html#modifiers), +the compilation of pattern matching can emit warnings which diagnose +that a given set of patterns is not exhaustive, i.e. that there is a +possibility of a `MatchError` being raised at run-time. + +### Example + +Consider the following definitions of arithmetic terms: + +```scala +abstract class Term[T] +case class Lit(x: Int) extends Term[Int] +case class Succ(t: Term[Int]) extends Term[Int] +case class IsZero(t: Term[Int]) extends Term[Boolean] +case class If[T](c: Term[Boolean], + t1: Term[T], + t2: Term[T]) extends Term[T] +``` + +There are terms to represent numeric literals, incrementation, a zero +test, and a conditional. Every term carries as a type parameter the +type of the expression it represents (either `Int` or `Boolean`). + +A type-safe evaluator for such terms can be written as follows. + +```scala +def eval[T](t: Term[T]): T = t match { + case Lit(n) => n + case Succ(u) => eval(u) + 1 + case IsZero(u) => eval(u) == 0 + case If(c, u1, u2) => eval(if (eval(c)) u1 else u2) +} +``` + +Note that the evaluator makes crucial use of the fact that type +parameters of enclosing methods can acquire new bounds through pattern +matching. + +For instance, the type of the pattern in the second case, +`Succ(u)`, is `Int`. It conforms to the selector type +`T` only if we assume an upper and lower bound of `Int` for `T`. +Under the assumption `Int <: T <: Int` we can also +verify that the type right hand side of the second case, `Int` +conforms to its expected type, `T`. + +## Pattern Matching Anonymous Functions + +```ebnf + BlockExpr ::= `{' CaseClauses `}' +``` + +An anonymous function can be defined by a sequence of cases + +```scala +{ case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ } +``` + +which appear as an expression without a prior `match`. The +expected type of such an expression must in part be defined. It must +be either `scala.Function$k$[$S_1 , \ldots , S_k$, $R$]` for some $k > 0$, +or `scala.PartialFunction[$S_1$, $R$]`, where the +argument type(s) $S_1 , \ldots , S_k$ must be fully determined, but the result type +$R$ may be undetermined. + +If the expected type is `scala.Function$k$[$S_1 , \ldots , S_k$, $R$]`, +the expression is taken to be equivalent to the anonymous function: + +```scala +($x_1: S_1 , \ldots , x_k: S_k$) => ($x_1 , \ldots , x_k$) match { + case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ +} +``` + +Here, each $x_i$ is a fresh name. +As was shown [here](06-expressions.html#anonymous-functions), this anonymous function is in turn +equivalent to the following instance creation expression, where + $T$ is the weak least upper bound of the types of all $b_i$. + +```scala +new scala.Function$k$[$S_1 , \ldots , S_k$, $T$] { + def apply($x_1: S_1 , \ldots , x_k: S_k$): $T$ = ($x_1 , \ldots , x_k$) match { + case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ + } +} +``` + +If the expected type is `scala.PartialFunction[$S$, $R$]`, +the expression is taken to be equivalent to the following instance creation expression: + +```scala +new scala.PartialFunction[$S$, $T$] { + def apply($x$: $S$): $T$ = x match { + case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ + } + def isDefinedAt($x$: $S$): Boolean = { + case $p_1$ => true $\ldots$ case $p_n$ => true + case _ => false + } +} +``` + +Here, $x$ is a fresh name and $T$ is the weak least upper bound of the +types of all $b_i$. The final default case in the `isDefinedAt` +method is omitted if one of the patterns $p_1 , \ldots , p_n$ is +already a variable or wildcard pattern. + +###### Example +Here is a method which uses a fold-left operation +`/:` to compute the scalar product of +two vectors: + +```scala +def scalarProduct(xs: Array[Double], ys: Array[Double]) = + (0.0 /: (xs zip ys)) { + case (a, (b, c)) => a + b * c + } +``` + +The case clauses in this code are equivalent to the following +anonymous function: + +```scala +(x, y) => (x, y) match { + case (a, (b, c)) => a + b * c +} +``` diff --git a/spec/09-top-level-definitions.md b/spec/09-top-level-definitions.md new file mode 100644 index 000000000000..e3185d8b7de4 --- /dev/null +++ b/spec/09-top-level-definitions.md @@ -0,0 +1,197 @@ +--- +title: Top-Level Definitions +layout: default +chapter: 9 +--- + +# Top-Level Definitions + +## Compilation Units + +```ebnf +CompilationUnit ::= {‘package’ QualId semi} TopStatSeq +TopStatSeq ::= TopStat {semi TopStat} +TopStat ::= {Annotation} {Modifier} TmplDef + | Import + | Packaging + | PackageObject + | +QualId ::= id {‘.’ id} +``` + +A compilation unit consists of a sequence of packagings, import +clauses, and class and object definitions, which may be preceded by a +package clause. + +A compilation unit + +```scala +package $p_1$; +$\ldots$ +package $p_n$; +$\mathit{stats}$ +``` + +starting with one or more package +clauses is equivalent to a compilation unit consisting of the +packaging + +```scala +package $p_1$ { $\ldots$ + package $p_n$ { + $\mathit{stats}$ + } $\ldots$ +} +``` + +Every compilation unit implicitly imports the following packages, in the given order: + 1. the package `java.lang`, + 2. the package `scala`, and + 3. the object [`scala.Predef`](12-the-scala-standard-library.html#the-predef-object), unless there is an explicit top-level import that references `scala.Predef`. + +Members of a later import in that order hide members of an earlier import. + +The exception to the implicit import of `scala.Predef` can be useful to hide, e.g., predefined implicit conversions. + +## Packagings + +```ebnf +Packaging ::= ‘package’ QualId [nl] ‘{’ TopStatSeq ‘}’ +``` + +A package is a special object which defines a set of member classes, +objects and packages. Unlike other objects, packages are not introduced +by a definition. Instead, the set of members of a package is determined by +packagings. + +A packaging `package $p$ { $\mathit{ds}$ }` injects all +definitions in $\mathit{ds}$ as members into the package whose qualified name +is $p$. Members of a package are called _top-level_ definitions. +If a definition in $\mathit{ds}$ is labeled `private`, it is +visible only for other members in the package. + +Inside the packaging, all members of package $p$ are visible under their +simple names. However this rule does not extend to members of enclosing +packages of $p$ that are designated by a prefix of the path $p$. + +```scala +package org.net.prj { + ... +} +``` + +all members of package `org.net.prj` are visible under their +simple names, but members of packages `org` or `org.net` require +explicit qualification or imports. + +Selections $p$.$m$ from $p$ as well as imports from $p$ +work as for objects. However, unlike other objects, packages may not +be used as values. It is illegal to have a package with the same fully +qualified name as a module or a class. + +Top-level definitions outside a packaging are assumed to be injected +into a special empty package. That package cannot be named and +therefore cannot be imported. However, members of the empty package +are visible to each other without qualification. + +## Package Objects + +```ebnf +PackageObject ::= ‘package’ ‘object’ ObjectDef +``` + +A package object `package object $p$ extends $t$` adds the +members of template $t$ to the package $p$. There can be only one +package object per package. The standard naming convention is to place +the definition above in a file named `package.scala` that's +located in the directory corresponding to package $p$. + +The package object should not define a member with the same name as +one of the top-level objects or classes defined in package $p$. If +there is a name conflict, the behavior of the program is currently +undefined. It is expected that this restriction will be lifted in a +future version of Scala. + +## Package References + +```ebnf +QualId ::= id {‘.’ id} +``` + +A reference to a package takes the form of a qualified identifier. +Like all other references, package references are relative. That is, +a package reference starting in a name $p$ will be looked up in the +closest enclosing scope that defines a member named $p$. + +The special predefined name `_root_` refers to the +outermost root package which contains all top-level packages. + +###### Example +Consider the following program: + +```scala +package b { + class B +} + +package a.b { + class A { + val x = new _root_.b.B + } +} +``` + +Here, the reference `_root_.b.B` refers to class `B` in the +toplevel package `b`. If the `_root_` prefix had been +omitted, the name `b` would instead resolve to the package +`a.b`, and, provided that package does not also +contain a class `B`, a compiler-time error would result. + +## Programs + +A _program_ is a top-level object that has a member method +_main_ of type `(Array[String])Unit`. Programs can be +executed from a command shell. The program's command arguments are are +passed to the `main` method as a parameter of type +`Array[String]`. + +The `main` method of a program can be directly defined in the +object, or it can be inherited. The scala library defines a special class +`scala.App` whose body acts as a `main` method. +An objects $m$ inheriting from this class is thus a program, +which executes the initialization code of the object $m$. + +###### Example +The following example will create a hello world program by defining +a method `main` in module `test.HelloWorld`. + +```scala +package test +object HelloWorld { + def main(args: Array[String]) { println("Hello World") } +} +``` + +This program can be started by the command + +```scala +scala test.HelloWorld +``` + +In a Java environment, the command + +```scala +java test.HelloWorld +``` + +would work as well. + +`HelloWorld` can also be defined without a `main` method +by inheriting from `App` instead: + +```scala +package test +object HelloWorld extends App { + println("Hello World") +} +``` diff --git a/spec/10-xml-expressions-and-patterns.md b/spec/10-xml-expressions-and-patterns.md new file mode 100644 index 000000000000..407b2b9a6767 --- /dev/null +++ b/spec/10-xml-expressions-and-patterns.md @@ -0,0 +1,146 @@ +--- +title: XML Expressions and Patterns +layout: default +chapter: 10 +--- + +# XML Expressions and Patterns + +__By Burak Emir__ + +This chapter describes the syntactic structure of XML expressions and patterns. +It follows as closely as possible the XML 1.0 specification, +changes being mandated by the possibility of embedding Scala code fragments. + +## XML expressions + +XML expressions are expressions generated by the following production, where the +opening bracket `<` of the first element must be in a position to start the lexical +[XML mode](01-lexical-syntax.html#xml-mode). + +```ebnf +XmlExpr ::= XmlContent {Element} +``` + +Well-formedness constraints of the XML specification apply, which +means for instance that start tags and end tags must match, and +attributes may only be defined once, with the exception of constraints +related to entity resolution. + +The following productions describe Scala's extensible markup language, +designed as close as possible to the W3C extensible markup language +standard. Only the productions for attribute values and character data are changed. +Scala does not support declarations, CDATA sections or processing instructions. +Entity references are not resolved at runtime. + +```ebnf +Element ::= EmptyElemTag + | STag Content ETag + +EmptyElemTag ::= ‘<’ Name {S Attribute} [S] ‘/>’ + +STag ::= ‘<’ Name {S Attribute} [S] ‘>’ +ETag ::= ‘’ +Content ::= [CharData] {Content1 [CharData]} +Content1 ::= XmlContent + | Reference + | ScalaExpr +XmlContent ::= Element + | CDSect + | PI + | Comment +``` + +If an XML expression is a single element, its value is a runtime +representation of an XML node (an instance of a subclass of +`scala.xml.Node`). If the XML expression consists of more +than one element, then its value is a runtime representation of a +sequence of XML nodes (an instance of a subclass of +`scala.Seq[scala.xml.Node]`). + +If an XML expression is an entity reference, CDATA section, processing +instruction, or a comment, it is represented by an instance of the +corresponding Scala runtime class. + +By default, beginning and trailing whitespace in element content is removed, +and consecutive occurrences of whitespace are replaced by a single space +character `\u0020`. This behavior can be changed to preserve all whitespace +with a compiler option. + +```ebnf +Attribute ::= Name Eq AttValue + +AttValue ::= ‘"’ {CharQ | CharRef} ‘"’ + | ‘'’ {CharA | CharRef} ‘'’ + | ScalaExpr + +ScalaExpr ::= Block + +CharData ::= { CharNoRef } $\textit{ without}$ {CharNoRef}`{'CharB {CharNoRef} + $\textit{ and without}$ {CharNoRef}`]]>'{CharNoRef} +``` + + +XML expressions may contain Scala expressions as attribute values or +within nodes. In the latter case, these are embedded using a single opening +brace `{` and ended by a closing brace `}`. To express a single opening braces +within XML text as generated by CharData, it must be doubled. +Thus, `{{` represents the XML text `{` and does not introduce an embedded Scala expression. + + +```ebnf +BaseChar, Char, Comment, CombiningChar, Ideographic, NameChar, S, Reference + ::= $\textit{“as in W3C XML”}$ + +Char1 ::= Char $\textit{ without}$ ‘<’ | ‘&’ +CharQ ::= Char1 $\textit{ without}$ ‘"’ +CharA ::= Char1 $\textit{ without}$ ‘'’ +CharB ::= Char1 $\textit{ without}$ ‘{’ + +Name ::= XNameStart {NameChar} + +XNameStart ::= ‘_’ | BaseChar | Ideographic + $\textit{ (as in W3C XML, but without }$ ‘:’$)$ +``` + +## XML patterns + +XML patterns are patterns generated by the following production, where +the opening bracket `<` of the element patterns must be in a position +to start the lexical [XML mode](01-lexical-syntax.html#xml-mode). + +```ebnf +XmlPattern ::= ElementPattern +``` + +Well-formedness constraints of the XML specification apply. + +An XML pattern has to be a single element pattern. It +matches exactly those runtime +representations of an XML tree +that have the same structure as described by the pattern. +XML patterns may contain [Scala patterns](08-pattern-matching.html#pattern-matching-expressions). + +Whitespace is treated the same way as in XML expressions. + +By default, beginning and trailing whitespace in element content is removed, +and consecutive occurrences of whitespace are replaced by a single space +character `\u0020`. This behavior can be changed to preserve all whitespace +with a compiler option. + +```ebnf +ElemPattern ::= EmptyElemTagP + | STagP ContentP ETagP + +EmptyElemTagP ::= ‘<’ Name [S] ‘/>’ +STagP ::= ‘<’ Name [S] ‘>’ +ETagP ::= ‘’ +ContentP ::= [CharData] {(ElemPattern|ScalaPatterns) [CharData]} +ContentP1 ::= ElemPattern + | Reference + | CDSect + | PI + | Comment + | ScalaPatterns +ScalaPatterns ::= ‘{’ Patterns ‘}’ +``` diff --git a/spec/11-user-defined-annotations.md b/spec/11-user-defined-annotations.md new file mode 100644 index 000000000000..2c5830c103cb --- /dev/null +++ b/spec/11-user-defined-annotations.md @@ -0,0 +1,163 @@ +--- +title: User-Defined Annotations +layout: default +chapter: 11 +--- + +# User-Defined Annotations + +```ebnf + Annotation ::= ‘@’ SimpleType {ArgumentExprs} + ConstrAnnotation ::= ‘@’ SimpleType ArgumentExprs +``` + +User-defined annotations associate meta-information with definitions. +A simple annotation has the form `@$c$` or `@$c(a_1 , \ldots , a_n)$`. +Here, $c$ is a constructor of a class $C$, which must conform +to the class `scala.Annotation`. + +Annotations may apply to definitions or declarations, types, or +expressions. An annotation of a definition or declaration appears in +front of that definition. An annotation of a type appears after +that type. An annotation of an expression $e$ appears after the +expression $e$, separated by a colon. More than one annotation clause +may apply to an entity. The order in which these annotations are given +does not matter. + +Examples: + +```scala +@deprecated("Use D", "1.0") class C { ... } // Class annotation +@transient @volatile var m: Int // Variable annotation +String @local // Type annotation +(e: @unchecked) match { ... } // Expression annotation +``` + +The meaning of annotation clauses is implementation-dependent. On the +Java platform, the following annotations have a standard meaning. + + * `@transient` Marks a field to be non-persistent; this is + equivalent to the `transient` + modifier in Java. + + * `@volatile` Marks a field which can change its value + outside the control of the program; this + is equivalent to the `volatile` + modifier in Java. + + * `@SerialVersionUID()` Attaches a serial version identifier (a + `long` constant) to a class. + This is equivalent to a the following field + definition in Java: + + ``` + private final static SerialVersionUID = + ``` + + * `@throws()` A Java compiler checks that a program contains handlers for checked exceptions + by analyzing which checked exceptions can result from execution of a method or + constructor. For each checked exception which is a possible result, the + `throws` + clause for the method or constructor must mention the class of that exception + or one of the superclasses of the class of that exception. + +## Java Beans Annotations + + * `@scala.beans.BeanProperty` When prefixed to a definition of some variable `X`, this + annotation causes getter and setter methods `getX`, `setX` + in the Java bean style to be added in the class containing the + variable. The first letter of the variable appears capitalized after + the `get` or `set`. When the annotation is added to the + definition of an immutable value definition `X`, only a getter is + generated. The construction of these methods is part of + code-generation; therefore, these methods become visible only once a + classfile for the containing class is generated. + + * `@scala.beans.BooleanBeanProperty` This annotation is equivalent to `scala.reflect.BeanProperty`, but + the generated getter method is named `isX` instead of `getX`. + +## Deprecation Annotations + + * `@deprecated()` Marks a definition as deprecated. Accesses to the + defined entity will then cause a deprecated warning mentioning the + message `` to be issued from the compiler. Deprecated + warnings are suppressed in code that belongs itself to a definition + that is labeled deprecated. + + * `@deprecatedName(name: )` Marks a formal parameter name as deprecated. Invocations of this entity + using named parameter syntax refering to the deprecated parameter name cause a deprecation warning. + +## Scala Compiler Annotations + + * `@unchecked` When applied to the selector of a `match` expression, + this attribute suppresses any warnings about non-exhaustive pattern + matches which would otherwise be emitted. For instance, no warnings + would be produced for the method definition below. + + ``` + def f(x: Option[Int]) = (x: @unchecked) match { + case Some(y) => y + } + ``` + + Without the `@unchecked` annotation, a Scala compiler could + infer that the pattern match is non-exhaustive, and could produce a + warning because `Option` is a `sealed` class. + + * `@uncheckedStable` When applied a value declaration or definition, it allows the defined + value to appear in a path, even if its type is [volatile](03-types.html#volatile-types). + For instance, the following member definitions are legal: + + ``` + type A { type T } + type B + @uncheckedStable val x: A with B // volatile type + val y: x.T // OK since `x' is still a path + ``` + + Without the `@uncheckedStable` annotation, the designator `x` + would not be a path since its type `A with B` is volatile. Hence, + the reference `x.T` would be malformed. + + When applied to value declarations or definitions that have non-volatile + types, the annotation has no effect. + + * `@specialized` When applied to the definition of a type parameter, this annotation causes + the compiler + to generate specialized definitions for primitive types. An optional list of + primitive + types may be given, in which case specialization takes into account only + those types. + For instance, the following code would generate specialized traits for + `Unit`, `Int` and `Double` + + ``` + trait Function0[@specialized(Unit, Int, Double) T] { + def apply: T + } + ``` + + Whenever the static type of an expression matches a specialized variant of + a definition, the compiler will instead use the specialized version. + See the [specialization sid](http://docs.scala-lang.org/sips/completed/scala-specialization.html) for more details of the implementation. + +Other annotations may be interpreted by platform- or +application-dependent tools. Class `scala.Annotation` has two +sub-traits which are used to indicate how these annotations are +retained. Instances of an annotation class inheriting from trait +`scala.ClassfileAnnotation` will be stored in the generated class +files. Instances of an annotation class inheriting from trait +`scala.StaticAnnotation` will be visible to the Scala type-checker +in every compilation unit where the annotated symbol is accessed. An +annotation class can inherit from both `scala.ClassfileAnnotation` +and `scala.StaticAnnotation`. If an annotation class inherits from +neither `scala.ClassfileAnnotation` nor +`scala.StaticAnnotation`, its instances are visible only locally +during the compilation run that analyzes them. + +Classes inheriting from `scala.ClassfileAnnotation` may be +subject to further restrictions in order to assure that they can be +mapped to the host environment. In particular, on both the Java and +the .NET platforms, such classes must be toplevel; i.e. they may not +be contained in another class or object. Additionally, on both +Java and .NET, all constructor arguments must be constant expressions. diff --git a/spec/12-the-scala-standard-library.md b/spec/12-the-scala-standard-library.md new file mode 100644 index 000000000000..988d9804ecff --- /dev/null +++ b/spec/12-the-scala-standard-library.md @@ -0,0 +1,842 @@ +--- +title: The Scala Standard Library +layout: default +chapter: 12 +--- + +# The Scala Standard Library + +The Scala standard library consists of the package `scala` with a +number of classes and modules. Some of these classes are described in +the following. + +![Class hierarchy of Scala](public/images/classhierarchy.pdf) + +## Root Classes + +The root of this hierarchy is formed by class `Any`. +Every class in a Scala execution environment inherits directly or +indirectly from this class. Class `Any` has two direct +subclasses: `AnyRef` and AnyVal`. + +The subclass `AnyRef` represents all values which are represented +as objects in the underlying host system. Classes written in other languages +inherit from `scala.AnyRef`. + +The predefined subclasses of class `AnyVal` describe +values which are not implemented as objects in the underlying host +system. + +User-defined Scala classes which do not explicitly inherit from +`AnyVal` inherit directly or indirectly from `AnyRef`. They can +not inherit from both `AnyRef` and `AnyVal`. + +Classes `AnyRef` and `AnyVal` are required to provide only +the members declared in class `Any`, but implementations may add +host-specific methods to these classes (for instance, an +implementation may identify class `AnyRef` with its own root +class for objects). + +The signatures of these root classes are described by the following +definitions. + +```scala +package scala +/** The universal root class */ +abstract class Any { + + /** Defined equality; abstract here */ + def equals(that: Any): Boolean + + /** Semantic equality between values */ + final def == (that: Any): Boolean = + if (null eq this) null eq that else this equals that + + /** Semantic inequality between values */ + final def != (that: Any): Boolean = !(this == that) + + /** Hash code; abstract here */ + def hashCode: Int = $\ldots$ + + /** Textual representation; abstract here */ + def toString: String = $\ldots$ + + /** Type test; needs to be inlined to work as given */ + def isInstanceOf[a]: Boolean + + /** Type cast; needs to be inlined to work as given */ */ + def asInstanceOf[A]: A = this match { + case x: A => x + case _ => if (this eq null) this + else throw new ClassCastException() + } +} + +/** The root class of all value types */ +final class AnyVal extends Any + +/** The root class of all reference types */ +class AnyRef extends Any { + def equals(that: Any): Boolean = this eq that + final def eq(that: AnyRef): Boolean = $\ldots$ // reference equality + final def ne(that: AnyRef): Boolean = !(this eq that) + + def hashCode: Int = $\ldots$ // hashCode computed from allocation address + def toString: String = $\ldots$ // toString computed from hashCode and class name + + def synchronized[T](body: => T): T // execute `body` in while locking `this`. +} +``` + +The type test `$x$.isInstanceOf[$T$]` is equivalent to a typed +pattern match + +```scala +$x$ match { + case _: $T'$ => true + case _ => false +} +``` + +where the type $T'$ is the same as $T$ except if $T$ is +of the form $D$ or $D[\mathit{tps}]$ where $D$ is a type member of some outer class $C$. +In this case $T'$ is `$C$#$D$` (or `$C$#$D[tps]$`, respectively), whereas $T$ itself would expand to `$C$.this.$D[tps]$`. +In other words, an `isInstanceOf` test does not check that types have the same enclosing instance. + +The test `$x$.asInstanceOf[$T$]` is treated specially if $T$ is a +[numeric value type](#value-classes). In this case the cast will +be translated to an application of a [conversion method](#numeric-value-types) +`x.to$T$`. For non-numeric values $x$ the operation will raise a +`ClassCastException`. + +## Value Classes + +Value classes are classes whose instances are not represented as +objects by the underlying host system. All value classes inherit from +class `AnyVal`. Scala implementations need to provide the +value classes `Unit`, `Boolean`, `Double`, `Float`, +`Long`, `Int`, `Char`, `Short`, and `Byte` +(but are free to provide others as well). +The signatures of these classes are defined in the following. + +### Numeric Value Types + +Classes `Double`, `Float`, +`Long`, `Int`, `Char`, `Short`, and `Byte` +are together called _numeric value types_. Classes `Byte`, +`Short`, or `Char` are called _subrange types_. +Subrange types, as well as `Int` and `Long` are called _integer types_, whereas `Float` and `Double` are called _floating point types_. + +Numeric value types are ranked in the following partial order: + +```scala +Byte - Short + \ + Int - Long - Float - Double + / + Char +``` + +`Byte` and `Short` are the lowest-ranked types in this order, +whereas `Double` is the highest-ranked. Ranking does _not_ +imply a [conformance relationship](03-types.html#conformance); for +instance `Int` is not a subtype of `Long`. However, object +[`Predef`](#the-predef-object) defines [views](07-implicit-parameters-and-views.html#views) +from every numeric value type to all higher-ranked numeric value types. +Therefore, lower-ranked types are implicitly converted to higher-ranked types +when required by the [context](06-expressions.html#implicit-conversions). + +Given two numeric value types $S$ and $T$, the _operation type_ of +$S$ and $T$ is defined as follows: If both $S$ and $T$ are subrange +types then the operation type of $S$ and $T$ is `Int`. Otherwise +the operation type of $S$ and $T$ is the larger of the two types wrt +ranking. Given two numeric values $v$ and $w$ the operation type of +$v$ and $w$ is the operation type of their run-time types. + +Any numeric value type $T$ supports the following methods. + + * Comparison methods for equals (`==`), not-equals (`!=`), + less-than (`<`), greater-than (`>`), less-than-or-equals + (`<=`), greater-than-or-equals (`>=`), which each exist in 7 + overloaded alternatives. Each alternative takes a parameter of some + numeric value type. Its result type is type `Boolean`. The + operation is evaluated by converting the receiver and its argument to + their operation type and performing the given comparison operation of + that type. + * Arithmetic methods addition (`+`), subtraction (`-`), + multiplication (`*`), division (`/`), and remainder + (`%`), which each exist in 7 overloaded alternatives. Each + alternative takes a parameter of some numeric value type $U$. Its + result type is the operation type of $T$ and $U$. The operation is + evaluated by converting the receiver and its argument to their + operation type and performing the given arithmetic operation of that + type. + * Parameterless arithmethic methods identity (`+`) and negation + (`-`), with result type $T$. The first of these returns the + receiver unchanged, whereas the second returns its negation. + * Conversion methods `toByte`, `toShort`, `toChar`, + `toInt`, `toLong`, `toFloat`, `toDouble` which + convert the receiver object to the target type, using the rules of + Java's numeric type cast operation. The conversion might truncate the + numeric value (as when going from `Long` to `Int` or from + `Int` to `Byte`) or it might lose precision (as when going + from `Double` to `Float` or when converting between + `Long` and `Float`). + +Integer numeric value types support in addition the following operations: + + * Bit manipulation methods bitwise-and (`&`), bitwise-or + {`|`}, and bitwise-exclusive-or (`^`), which each exist in 5 + overloaded alternatives. Each alternative takes a parameter of some + integer numeric value type. Its result type is the operation type of + $T$ and $U$. The operation is evaluated by converting the receiver and + its argument to their operation type and performing the given bitwise + operation of that type. + + * A parameterless bit-negation method (`~`). Its result type is + the reciver type $T$ or `Int`, whichever is larger. + The operation is evaluated by converting the receiver to the result + type and negating every bit in its value. + * Bit-shift methods left-shift (`<<`), arithmetic right-shift + (`>>`), and unsigned right-shift (`>>>`). Each of these + methods has two overloaded alternatives, which take a parameter $n$ + of type `Int`, respectively `Long`. The result type of the + operation is the receiver type $T$, or `Int`, whichever is larger. + The operation is evaluated by converting the receiver to the result + type and performing the specified shift by $n$ bits. + +Numeric value types also implement operations `equals`, +`hashCode`, and `toString` from class `Any`. + +The `equals` method tests whether the argument is a numeric value +type. If this is true, it will perform the `==` operation which +is appropriate for that type. That is, the `equals` method of a +numeric value type can be thought of being defined as follows: + +```scala +def equals(other: Any): Boolean = other match { + case that: Byte => this == that + case that: Short => this == that + case that: Char => this == that + case that: Int => this == that + case that: Long => this == that + case that: Float => this == that + case that: Double => this == that + case _ => false +} +``` + +The `hashCode` method returns an integer hashcode that maps equal +numeric values to equal results. It is guaranteed to be the identity for +for type `Int` and for all subrange types. + +The `toString` method displays its receiver as an integer or +floating point number. + +### Example + +This is the signature of the numeric value type `Int`: + +```scala +package scala +abstract sealed class Int extends AnyVal { + def == (that: Double): Boolean // double equality + def == (that: Float): Boolean // float equality + def == (that: Long): Boolean // long equality + def == (that: Int): Boolean // int equality + def == (that: Short): Boolean // int equality + def == (that: Byte): Boolean // int equality + def == (that: Char): Boolean // int equality + /* analogous for !=, <, >, <=, >= */ + + def + (that: Double): Double // double addition + def + (that: Float): Double // float addition + def + (that: Long): Long // long addition + def + (that: Int): Int // int addition + def + (that: Short): Int // int addition + def + (that: Byte): Int // int addition + def + (that: Char): Int // int addition + /* analogous for -, *, /, % */ + + def & (that: Long): Long // long bitwise and + def & (that: Int): Int // int bitwise and + def & (that: Short): Int // int bitwise and + def & (that: Byte): Int // int bitwise and + def & (that: Char): Int // int bitwise and + /* analogous for |, ^ */ + + def << (cnt: Int): Int // int left shift + def << (cnt: Long): Int // long left shift + /* analogous for >>, >>> */ + + def unary_+ : Int // int identity + def unary_- : Int // int negation + def unary_~ : Int // int bitwise negation + + def toByte: Byte // convert to Byte + def toShort: Short // convert to Short + def toChar: Char // convert to Char + def toInt: Int // convert to Int + def toLong: Long // convert to Long + def toFloat: Float // convert to Float + def toDouble: Double // convert to Double +} +``` + +### Class `Boolean` + +Class `Boolean` has only two values: `true` and +`false`. It implements operations as given in the following +class definition. + +```scala +package scala +abstract sealed class Boolean extends AnyVal { + def && (p: => Boolean): Boolean = // boolean and + if (this) p else false + def || (p: => Boolean): Boolean = // boolean or + if (this) true else p + def & (x: Boolean): Boolean = // boolean strict and + if (this) x else false + def | (x: Boolean): Boolean = // boolean strict or + if (this) true else x + def == (x: Boolean): Boolean = // boolean equality + if (this) x else x.unary_! + def != (x: Boolean): Boolean = // boolean inequality + if (this) x.unary_! else x + def unary_!: Boolean = // boolean negation + if (this) false else true +} +``` + +The class also implements operations `equals`, `hashCode`, +and `toString` from class `Any`. + +The `equals` method returns `true` if the argument is the +same boolean value as the receiver, `false` otherwise. The +`hashCode` method returns a fixed, implementation-specific hash-code when invoked on `true`, +and a different, fixed, implementation-specific hash-code when invoked on `false`. The `toString` method +returns the receiver converted to a string, i.e. either `"true"` or `"false"`. + +### Class `Unit` + +Class `Unit` has only one value: `()`. It implements only +the three methods `equals`, `hashCode`, and `toString` +from class `Any`. + +The `equals` method returns `true` if the argument is the +unit value `()`, `false` otherwise. The +`hashCode` method returns a fixed, implementation-specific hash-code, +The `toString` method returns `"()"`. + +## Standard Reference Classes + +This section presents some standard Scala reference classes which are +treated in a special way by the Scala compiler -- either Scala provides +syntactic sugar for them, or the Scala compiler generates special code +for their operations. Other classes in the standard Scala library are +documented in the Scala library documentation by HTML pages. + +### Class `String` + +Scala's `String` class is usually derived from the standard String +class of the underlying host system (and may be identified with +it). For Scala clients the class is taken to support in each case a +method + +```scala +def + (that: Any): String +``` + +which concatenates its left operand with the textual representation of its +right operand. + +### The `Tuple` classes + +Scala defines tuple classes `Tuple$n$` for $n = 2 , \ldots , 22$. +These are defined as follows. + +```scala +package scala +case class Tuple$n$[+T_1, ..., +T_n](_1: T_1, ..., _$n$: T_$n$) { + def toString = "(" ++ _1 ++ "," ++ $\ldots$ ++ "," ++ _$n$ ++ ")" +} +``` + +The implicitly imported [`Predef`](#the-predef-object) object defines +the names `Pair` as an alias of `Tuple2` and `Triple` +as an alias for `Tuple3`. + +### The `Function` Classes + +Scala defines function classes `Function$n$` for $n = 1 , \ldots , 22$. +These are defined as follows. + +```scala +package scala +trait Function$n$[-T_1, ..., -T_$n$, +R] { + def apply(x_1: T_1, ..., x_$n$: T_$n$): R + def toString = "" +} +``` + +The `PartialFunction` subclass of `Function1` represents functions that (indirectly) specify their domain. +Use the `isDefined` method to query whether the partial function is defined for a given input (i.e., whether the input is part of the function's domain). + +```scala +class PartialFunction[-A, +B] extends Function1[A, B] { + def isDefinedAt(x: A): Boolean +} +``` + +The implicitly imported [`Predef`](#the-predef-object) object defines the name +`Function` as an alias of `Function1`. + +### Class `Array` + +All operations on arrays desugar to the corresponding operations of the +underlying platform. Therefore, the following class definition is given for +informational purposes only: + +```scala +final class Array[T](_length: Int) +extends java.io.Serializable with java.lang.Cloneable { + def length: Int = $\ldots$ + def apply(i: Int): T = $\ldots$ + def update(i: Int, x: T): Unit = $\ldots$ + override def clone(): Array[T] = $\ldots$ +} +``` + +If $T$ is not a type parameter or abstract type, the type `Array[T]` +is represented as the array type `|T|[]` in the +underlying host system, where `|T|` is the erasure of `T`. +If $T$ is a type parameter or abstract type, a different representation might be +used (it is `Object` on the Java platform). + +#### Operations + +`length` returns the length of the array, `apply` means subscripting, +and `update` means element update. + +Because of the syntactic sugar for `apply` and `update` operations, +we have the following correspondences between Scala and Java code for +operations on an array `xs`: + +|_Scala_ |_Java_ | +|------------------|------------| +|`xs.length` |`xs.length` | +|`xs(i)` |`xs[i]` | +|`xs(i) = e` |`xs[i] = e` | + +Two implicit conversions exist in `Predef` that are frequently applied to arrays: +a conversion to `scala.collection.mutable.ArrayOps` and a conversion to +`scala.collection.mutable.WrappedArray` (a subtype of `scala.collection.Seq`). + +Both types make many of the standard operations found in the Scala +collections API available. The conversion to `ArrayOps` is temporary, as all operations +defined on `ArrayOps` return a value of type `Array`, while the conversion to `WrappedArray` +is permanent as all operations return a value of type `WrappedArray`. +The conversion to `ArrayOps` takes priority over the conversion to `WrappedArray`. + +Because of the tension between parametrized types in Scala and the ad-hoc +implementation of arrays in the host-languages, some subtle points +need to be taken into account when dealing with arrays. These are +explained in the following. + +#### Variance + +Unlike arrays in Java, arrays in Scala are _not_ +co-variant; That is, $S <: T$ does not imply +`Array[$S$] $<:$ Array[$T$]` in Scala. +However, it is possible to cast an array +of $S$ to an array of $T$ if such a cast is permitted in the host +environment. + +For instance `Array[String]` does not conform to +`Array[Object]`, even though `String` conforms to `Object`. +However, it is possible to cast an expression of type +`Array[String]` to `Array[Object]`, and this +cast will succeed without raising a `ClassCastException`. Example: + +```scala +val xs = new Array[String](2) +// val ys: Array[Object] = xs // **** error: incompatible types +val ys: Array[Object] = xs.asInstanceOf[Array[Object]] // OK +``` + +The instantiation of an array with a polymorphic element type $T$ requires +information about type $T$ at runtime. +This information is synthesized by adding a [context bound](07-implicit-parameters-and-views.html#context-bounds-and-view-bounds) +of `scala.reflect.ClassTag` to type $T$. +An example is the +following implementation of method `mkArray`, which creates +an array of an arbitrary type $T$, given a sequence of $T$`s which +defines its elements: + +```scala +import reflect.ClassTag +def mkArray[T : ClassTag](elems: Seq[T]): Array[T] = { + val result = new Array[T](elems.length) + var i = 0 + for (elem <- elems) { + result(i) = elem + i += 1 + } + result +} +``` + +If type $T$ is a type for which the host platform offers a specialized array +representation, this representation is used. + +###### Example +On the Java Virtual Machine, an invocation of `mkArray(List(1,2,3))` +will return a primitive array of `int`s, written as `int[]` in Java. + +#### Companion object + +`Array`'s companion object provides various factory methods for the +instantiation of single- and multi-dimensional arrays, an extractor method +[`unapplySeq`](08-pattern-matching.html#extractor-patterns) which enables pattern matching +over arrays and additional utility methods: + +```scala +package scala +object Array { + /** copies array elements from `src` to `dest`. */ + def copy(src: AnyRef, srcPos: Int, + dest: AnyRef, destPos: Int, length: Int): Unit = $\ldots$ + + /** Returns an array of length 0 */ + def empty[T: ClassTag]: Array[T] = + + /** Create an array with given elements. */ + def apply[T: ClassTag](xs: T*): Array[T] = $\ldots$ + + /** Creates array with given dimensions */ + def ofDim[T: ClassTag](n1: Int): Array[T] = $\ldots$ + /** Creates a 2-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = $\ldots$ + $\ldots$ + + /** Concatenate all argument arrays into a single array. */ + def concat[T: ClassTag](xss: Array[T]*): Array[T] = $\ldots$ + + /** Returns an array that contains the results of some element computation a number + * of times. */ + def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = $\ldots$ + /** Returns a two-dimensional array that contains the results of some element + * computation a number of times. */ + def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = $\ldots$ + $\ldots$ + + /** Returns an array containing values of a given function over a range of integer + * values starting from 0. */ + def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = $\ldots$ + /** Returns a two-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. */ + def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = $\ldots$ + $\ldots$ + + /** Returns an array containing a sequence of increasing integers in a range. */ + def range(start: Int, end: Int): Array[Int] = $\ldots$ + /** Returns an array containing equally spaced values in some integer interval. */ + def range(start: Int, end: Int, step: Int): Array[Int] = $\ldots$ + + /** Returns an array containing repeated applications of a function to a start value. */ + def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = $\ldots$ + + /** Enables pattern matching over arrays */ + def unapplySeq[A](x: Array[A]): Option[IndexedSeq[A]] = Some(x) +} +``` + +## Class Node + +```scala +package scala.xml + +trait Node { + + /** the label of this node */ + def label: String + + /** attribute axis */ + def attribute: Map[String, String] + + /** child axis (all children of this node) */ + def child: Seq[Node] + + /** descendant axis (all descendants of this node) */ + def descendant: Seq[Node] = child.toList.flatMap { + x => x::x.descendant.asInstanceOf[List[Node]] + } + + /** descendant axis (all descendants of this node) */ + def descendant_or_self: Seq[Node] = this::child.toList.flatMap { + x => x::x.descendant.asInstanceOf[List[Node]] + } + + override def equals(x: Any): Boolean = x match { + case that:Node => + that.label == this.label && + that.attribute.sameElements(this.attribute) && + that.child.sameElements(this.child) + case _ => false + } + + /** XPath style projection function. Returns all children of this node + * that are labeled with 'that'. The document order is preserved. + */ + def \(that: Symbol): NodeSeq = { + new NodeSeq({ + that.name match { + case "_" => child.toList + case _ => + var res:List[Node] = Nil + for (x <- child.elements if x.label == that.name) { + res = x::res + } + res.reverse + } + }) + } + + /** XPath style projection function. Returns all nodes labeled with the + * name 'that' from the 'descendant_or_self' axis. Document order is preserved. + */ + def \\(that: Symbol): NodeSeq = { + new NodeSeq( + that.name match { + case "_" => this.descendant_or_self + case _ => this.descendant_or_self.asInstanceOf[List[Node]]. + filter(x => x.label == that.name) + }) + } + + /** hashcode for this XML node */ + override def hashCode = + Utility.hashCode(label, attribute.toList.hashCode, child) + + /** string representation of this node */ + override def toString = Utility.toXML(this) + +} +``` + +## The `Predef` Object + +The `Predef` object defines standard functions and type aliases +for Scala programs. It is always implicitly imported, so that all its +defined members are available without qualification. Its definition +for the JVM environment conforms to the following signature: + +```scala +package scala +object Predef { + + // classOf --------------------------------------------------------- + + /** Returns the runtime representation of a class type. */ + def classOf[T]: Class[T] = null + // this is a dummy, classOf is handled by compiler. + + // Standard type aliases --------------------------------------------- + + type String = java.lang.String + type Class[T] = java.lang.Class[T] + + // Miscellaneous ----------------------------------------------------- + + type Function[-A, +B] = Function1[A, B] + + type Map[A, +B] = collection.immutable.Map[A, B] + type Set[A] = collection.immutable.Set[A] + + val Map = collection.immutable.Map + val Set = collection.immutable.Set + + // Manifest types, companions, and incantations for summoning --------- + + type ClassManifest[T] = scala.reflect.ClassManifest[T] + type Manifest[T] = scala.reflect.Manifest[T] + type OptManifest[T] = scala.reflect.OptManifest[T] + val ClassManifest = scala.reflect.ClassManifest + val Manifest = scala.reflect.Manifest + val NoManifest = scala.reflect.NoManifest + + def manifest[T](implicit m: Manifest[T]) = m + def classManifest[T](implicit m: ClassManifest[T]) = m + def optManifest[T](implicit m: OptManifest[T]) = m + + // Minor variations on identity functions ----------------------------- + def identity[A](x: A): A = x // @see `conforms` for the implicit version + def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world + @inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements + + // Asserts, Preconditions, Postconditions ----------------------------- + + def assert(assertion: Boolean) { + if (!assertion) + throw new java.lang.AssertionError("assertion failed") + } + + def assert(assertion: Boolean, message: => Any) { + if (!assertion) + throw new java.lang.AssertionError("assertion failed: " + message) + } + + def assume(assumption: Boolean) { + if (!assumption) + throw new IllegalArgumentException("assumption failed") + } + + def assume(assumption: Boolean, message: => Any) { + if (!assumption) + throw new IllegalArgumentException(message.toString) + } + + def require(requirement: Boolean) { + if (!requirement) + throw new IllegalArgumentException("requirement failed") + } + + def require(requirement: Boolean, message: => Any) { + if (!requirement) + throw new IllegalArgumentException("requirement failed: "+ message) + } +``` + +```scala + // tupling --------------------------------------------------------- + + type Pair[+A, +B] = Tuple2[A, B] + object Pair { + def apply[A, B](x: A, y: B) = Tuple2(x, y) + def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) + } + + type Triple[+A, +B, +C] = Tuple3[A, B, C] + object Triple { + def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z) + def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x) + } + + // Printing and reading ----------------------------------------------- + + def print(x: Any) = Console.print(x) + def println() = Console.println() + def println(x: Any) = Console.println(x) + def printf(text: String, xs: Any*) = Console.printf(text.format(xs: _*)) + + def readLine(): String = Console.readLine() + def readLine(text: String, args: Any*) = Console.readLine(text, args) + def readBoolean() = Console.readBoolean() + def readByte() = Console.readByte() + def readShort() = Console.readShort() + def readChar() = Console.readChar() + def readInt() = Console.readInt() + def readLong() = Console.readLong() + def readFloat() = Console.readFloat() + def readDouble() = Console.readDouble() + def readf(format: String) = Console.readf(format) + def readf1(format: String) = Console.readf1(format) + def readf2(format: String) = Console.readf2(format) + def readf3(format: String) = Console.readf3(format) + + // Implict conversions ------------------------------------------------ + + ... +} +``` + +### Predefined Implicit Definitions + +The `Predef` object also contains a number of implicit definitions, which are available by default (because `Predef` is implicitly imported). +Implicit definitions come in two priorities. High-priority implicits are defined in the `Predef` class itself whereas low priority implicits are defined in a class inherited by `Predef`. The rules of +static [overloading resolution](06-expressions.html#overloading-resolution) +stipulate that, all other things being equal, implicit resolution +prefers high-priority implicits over low-priority ones. + +The available low-priority implicits include definitions falling into the following categories. + +1. For every primitive type, a wrapper that takes values of that type + to instances of a `runtime.Rich*` class. For instance, values of type `Int` + can be implicitly converted to instances of class `runtime.RichInt`. + +1. For every array type with elements of primitive type, a wrapper that + takes the arrays of that type to instances of a `runtime.WrappedArray` class. For instance, values of type `Array[Float]` can be implicitly converted to instances of class `runtime.WrappedArray[Float]`. + There are also generic array wrappers that take elements + of type `Array[T]` for arbitrary `T` to `WrappedArray`s. + +1. An implicit conversion from `String` to `WrappedString`. + +The available high-priority implicits include definitions falling into the following categories. + + * An implicit wrapper that adds `ensuring` methods + with the following overloaded variants to type `Any`. + + ``` + def ensuring(cond: Boolean): A = { assert(cond); x } + def ensuring(cond: Boolean, msg: Any): A = { assert(cond, msg); x } + def ensuring(cond: A => Boolean): A = { assert(cond(x)); x } + def ensuring(cond: A => Boolean, msg: Any): A = { assert(cond(x), msg); x } + ``` + + * An implicit wrapper that adds a `->` method with the following implementation + to type `Any`. + + ``` + def -> [B](y: B): (A, B) = (x, y) + ``` + + * For every array type with elements of primitive type, a wrapper that + takes the arrays of that type to instances of a `runtime.ArrayOps` + class. For instance, values of type `Array[Float]` can be implicitly + converted to instances of class `runtime.ArrayOps[Float]`. There are + also generic array wrappers that take elements of type `Array[T]` for + arbitrary `T` to `ArrayOps`s. + + * An implicit wrapper that adds `+` and `formatted` method with the following + implementations to type `Any`. + + ``` + def +(other: String) = String.valueOf(self) + other + def formatted(fmtstr: String): String = fmtstr format self + ``` + + * Numeric primitive conversions that implement the transitive closure of the + following mappings: + + ``` + Byte -> Short + Short -> Int + Char -> Int + Int -> Long + Long -> Float + Float -> Double + ``` + + * Boxing and unboxing conversions between primitive types and their boxed + versions: + + ``` + Byte <-> java.lang.Byte + Short <-> java.lang.Short + Char <-> java.lang.Character + Int <-> java.lang.Integer + Long <-> java.lang.Long + Float <-> java.lang.Float + Double <-> java.lang.Double + Boolean <-> java.lang.Boolean + ``` + + * An implicit definition that generates instances of type `T <:< T`, for + any type `T`. Here, `<:<` is a class defined as follows. + + ``` + sealed abstract class <:<[-From, +To] extends (From => To) + ``` + + Implicit parameters of `<:<` types are typically used to implement type constraints. diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md new file mode 100644 index 000000000000..2b9571cc734d --- /dev/null +++ b/spec/13-syntax-summary.md @@ -0,0 +1,311 @@ +--- +title: Syntax Summary +layout: default +chapter: 13 +--- + +# Syntax Summary + +The following descriptions of Scala tokens uses literal characters `‘c’` when referring to the ASCII fragment `\u0000` – `\u007F`. + +_Unicode escapes_ are used to represent the Unicode character with the given hexadecimal code: + +```ebnf +UnicodeEscape ::= ‘\‘ ‘u‘ {‘u‘} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ +``` + +The lexical syntax of Scala is given by the following grammar in EBNF form: + +```ebnf +whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ +upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ // and Unicode category Lu +lower ::= ‘a’ | … | ‘z’ // and Unicode category Ll +letter ::= upper | lower // and Unicode categories Lo, Lt, Nl +digit ::= ‘0’ | … | ‘9’ +paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ +delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ +opchar ::= // printableChar not matched by (whiteSpace | upper | lower | + // letter | digit | paren | delim | opchar | Unicode_Sm | Unicode_So) +printableChar ::= // all characters in [\u0020, \u007F] inclusive +charEscapeSeq ::= ‘\‘ (‘b‘ | ‘t‘ | ‘n‘ | ‘f‘ | ‘r‘ | ‘"‘ | ‘'‘ | ‘\‘) + +op ::= opchar {opchar} +varid ::= lower idrest +plainid ::= upper idrest + | varid + | op +id ::= plainid + | ‘`’ stringLiteral ‘`’ +idrest ::= {letter | digit} [‘_’ op] + +integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +decimalNumeral ::= ‘0’ | nonZeroDigit {digit} +hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit {hexDigit} +digit ::= ‘0’ | nonZeroDigit +nonZeroDigit ::= ‘1’ | … | ‘9’ + +floatingPointLiteral + ::= digit {digit} ‘.’ digit {digit} [exponentPart] [floatType] + | ‘.’ digit {digit} [exponentPart] [floatType] + | digit {digit} exponentPart [floatType] + | digit {digit} [exponentPart] floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit {digit} +floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ + +booleanLiteral ::= ‘true’ | ‘false’ + +characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ + +stringLiteral ::= ‘"’ {stringElement} ‘"’ + | ‘"""’ multiLineChars ‘"""’ +stringElement ::= (printableChar except ‘"’) + | charEscapeSeq +multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} + +symbolLiteral ::= ‘'’ plainid + +comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ + | ‘//’ “any sequence of characters up to end of line” + +nl ::= $\mathit{“new line character”}$ +semi ::= ‘;’ | nl {nl} +``` + +The context-free syntax of Scala is given by the following EBNF +grammar. + +```ebnf + Literal ::= [‘-’] integerLiteral + | [‘-’] floatingPointLiteral + | booleanLiteral + | characterLiteral + | stringLiteral + | symbolLiteral + | ‘null’ + + QualId ::= id {‘.’ id} + ids ::= id {‘,’ id} + + Path ::= StableId + | [id ‘.’] ‘this’ + StableId ::= id + | Path ‘.’ id + | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id + ClassQualifier ::= ‘[’ id ‘]’ + + Type ::= FunctionArgTypes ‘=>’ Type + | InfixType [ExistentialClause] + FunctionArgTypes ::= InfixType + | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’ + ExistentialClause ::= ‘forSome’ ‘{’ ExistentialDcl {semi ExistentialDcl} ‘}’ + ExistentialDcl ::= ‘type’ TypeDcl + | ‘val’ ValDcl + InfixType ::= CompoundType {id [nl] CompoundType} + CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement] + | Refinement + AnnotType ::= SimpleType {Annotation} + SimpleType ::= SimpleType TypeArgs + | SimpleType ‘#’ id + | StableId + | Path ‘.’ ‘type’ + | ‘(’ Types ‘)’ + TypeArgs ::= ‘[’ Types ‘]’ + Types ::= Type {‘,’ Type} + Refinement ::= [nl] ‘{’ RefineStat {semi RefineStat} ‘}’ + RefineStat ::= Dcl + | ‘type’ TypeDef + | + TypePat ::= Type + + Ascription ::= ‘:’ InfixType + | ‘:’ Annotation {Annotation} + | ‘:’ ‘_’ ‘*’ + + Expr ::= (Bindings | [‘implicit’] id | ‘_’) ‘=>’ Expr + | Expr1 + Expr1 ::= `if' `(' Expr `)' {nl} Expr [[semi] `else' Expr] + | `while' `(' Expr `)' {nl} Expr + | `try' (`{' Block `}' | Expr) [`catch' `{' CaseClauses `}'] [`finally' Expr] + | `do' Expr [semi] `while' `(' Expr ')' + | `for' (`(' Enumerators `)' | `{' Enumerators `}') {nl} [`yield'] Expr + | `throw' Expr + | `return' [Expr] + | [SimpleExpr `.'] id `=' Expr + | SimpleExpr1 ArgumentExprs `=' Expr + | PostfixExpr + | PostfixExpr Ascription + | PostfixExpr `match' `{' CaseClauses `}' + PostfixExpr ::= InfixExpr [id [nl]] + InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr + PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr + SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody) + | BlockExpr + | SimpleExpr1 [‘_’] + SimpleExpr1 ::= Literal + | Path + | ‘_’ + | ‘(’ [Exprs] ‘)’ + | SimpleExpr ‘.’ id + | SimpleExpr TypeArgs + | SimpleExpr1 ArgumentExprs + | XmlExpr + Exprs ::= Expr {‘,’ Expr} + ArgumentExprs ::= ‘(’ [Exprs] ‘)’ + | ‘(’ [Exprs ‘,’] PostfixExpr ‘:’ ‘_’ ‘*’ ‘)’ + | [nl] BlockExpr + BlockExpr ::= ‘{’ CaseClauses ‘}’ + | ‘{’ Block ‘}’ + Block ::= BlockStat {semi BlockStat} [ResultExpr] + BlockStat ::= Import + | {Annotation} [‘implicit’ | ‘lazy’] Def + | {Annotation} {LocalModifier} TmplDef + | Expr1 + | + ResultExpr ::= Expr1 + | (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block + + Enumerators ::= Generator {semi Generator} + Generator ::= Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} + + CaseClauses ::= CaseClause { CaseClause } + CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block + Guard ::= ‘if’ PostfixExpr + + Pattern ::= Pattern1 { ‘|’ Pattern1 } + Pattern1 ::= varid ‘:’ TypePat + | ‘_’ ‘:’ TypePat + | Pattern2 + Pattern2 ::= varid [‘@’ Pattern3] + | Pattern3 + Pattern3 ::= SimplePattern + | SimplePattern { id [nl] SimplePattern } + SimplePattern ::= ‘_’ + | varid + | Literal + | StableId + | StableId ‘(’ [Patterns ‘)’ + | StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’ + | ‘(’ [Patterns] ‘)’ + | XmlPattern + Patterns ::= Pattern [‘,’ Patterns] + | ‘_’ * + + TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’ + FunTypeParamClause::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ + VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeParam + TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] + {‘<%’ Type} {‘:’ Type} + ParamClauses ::= {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’] + ParamClause ::= [nl] ‘(’ [Params] ‘)’ + Params ::= Param {‘,’ Param} + Param ::= {Annotation} id [‘:’ ParamType] [‘=’ Expr] + ParamType ::= Type + | ‘=>’ Type + | Type ‘*’ + ClassParamClauses ::= {ClassParamClause} + [[nl] ‘(’ ‘implicit’ ClassParams ‘)’] + ClassParamClause ::= [nl] ‘(’ [ClassParams] ‘)’ + ClassParams ::= ClassParam {‘,’ ClassParam} + ClassParam ::= {Annotation} {Modifier} [(`val' | `var')] + id ‘:’ ParamType [‘=’ Expr] + Bindings ::= ‘(’ Binding {‘,’ Binding} ‘)’ + Binding ::= (id | ‘_’) [‘:’ Type] + + Modifier ::= LocalModifier + | AccessModifier + | ‘override’ + LocalModifier ::= ‘abstract’ + | ‘final’ + | ‘sealed’ + | ‘implicit’ + | ‘lazy’ + AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] + AccessQualifier ::= ‘[’ (id | ‘this’) ‘]’ + + Annotation ::= ‘@’ SimpleType {ArgumentExprs} + ConstrAnnotation ::= ‘@’ SimpleType ArgumentExprs + + TemplateBody ::= [nl] ‘{’ [SelfType] TemplateStat {semi TemplateStat} ‘}’ + TemplateStat ::= Import + | {Annotation [nl]} {Modifier} Def + | {Annotation [nl]} {Modifier} Dcl + | Expr + | + SelfType ::= id [‘:’ Type] ‘=>’ + | ‘this’ ‘:’ Type ‘=>’ + + Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} + ImportExpr ::= StableId ‘.’ (id | ‘_’ | ImportSelectors) + ImportSelectors ::= ‘{’ {ImportSelector ‘,’} (ImportSelector | ‘_’) ‘}’ + ImportSelector ::= id [‘=>’ id | ‘=>’ ‘_’] + + Dcl ::= ‘val’ ValDcl + | ‘var’ VarDcl + | ‘def’ FunDcl + | ‘type’ {nl} TypeDcl + + ValDcl ::= ids ‘:’ Type + VarDcl ::= ids ‘:’ Type + FunDcl ::= FunSig [‘:’ Type] + FunSig ::= id [FunTypeParamClause] ParamClauses + TypeDcl ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type] + + PatVarDef ::= ‘val’ PatDef + | ‘var’ VarDef + Def ::= PatVarDef + | ‘def’ FunDef + | ‘type’ {nl} TypeDef + | TmplDef + PatDef ::= Pattern2 {‘,’ Pattern2} [‘:’ Type] ‘=’ Expr + VarDef ::= PatDef + | ids ‘:’ Type ‘=’ ‘_’ + FunDef ::= FunSig [‘:’ Type] ‘=’ Expr + | FunSig [nl] ‘{’ Block ‘}’ + | ‘this’ ParamClause ParamClauses + (‘=’ ConstrExpr | [nl] ConstrBlock) + TypeDef ::= id [TypeParamClause] ‘=’ Type + + TmplDef ::= [‘case’] ‘class’ ClassDef + | [‘case’] ‘object’ ObjectDef + | ‘trait’ TraitDef + ClassDef ::= id [TypeParamClause] {ConstrAnnotation} [AccessModifier] + ClassParamClauses ClassTemplateOpt + TraitDef ::= id [TypeParamClause] TraitTemplateOpt + ObjectDef ::= id ClassTemplateOpt + ClassTemplateOpt ::= ‘extends’ ClassTemplate | [[‘extends’] TemplateBody] + TraitTemplateOpt ::= ‘extends’ TraitTemplate | [[‘extends’] TemplateBody] + ClassTemplate ::= [EarlyDefs] ClassParents [TemplateBody] + TraitTemplate ::= [EarlyDefs] TraitParents [TemplateBody] + ClassParents ::= Constr {‘with’ AnnotType} + TraitParents ::= AnnotType {‘with’ AnnotType} + Constr ::= AnnotType {ArgumentExprs} + EarlyDefs ::= ‘{’ [EarlyDef {semi EarlyDef}] ‘}’ ‘with’ + EarlyDef ::= {Annotation [nl]} {Modifier} PatVarDef + + ConstrExpr ::= SelfInvocation + | ConstrBlock + ConstrBlock ::= ‘{’ SelfInvocation {semi BlockStat} ‘}’ + SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} + + TopStatSeq ::= TopStat {semi TopStat} + TopStat ::= {Annotation [nl]} {Modifier} TmplDef + | Import + | Packaging + | PackageObject + | + Packaging ::= ‘package’ QualId [nl] ‘{’ TopStatSeq ‘}’ + PackageObject ::= ‘package’ ‘object’ ObjectDef + + CompilationUnit ::= {‘package’ QualId semi} TopStatSeq +``` + + diff --git a/spec/14-references.md b/spec/14-references.md new file mode 100644 index 000000000000..caae5796b248 --- /dev/null +++ b/spec/14-references.md @@ -0,0 +1,207 @@ +--- +title: References +layout: default +chapter: 14 +--- + +# References + +TODO (see comments in markdown source) + + diff --git a/spec/README.md b/spec/README.md new file mode 100644 index 000000000000..97c3fdf83286 --- /dev/null +++ b/spec/README.md @@ -0,0 +1,40 @@ +# Scala Language Reference + +First of all, the language specification is meant to be correct, precise and clear. + +Second, editing, previewing and generating output for the markdown should be simple and easy. + +Third, we'd like to support different output formats. An html page per chapter with MathJax seems like a good start, as it satisfies the second requirement, and enables the first one. + +## Editing + +We use redcarpet 3.1 and jekyll 2 (currently in alpha) to generate the html. Essentially, this is what github pages use. + +## Building + +Travis CI builds the spec automatically on every commit to master and publishes to http://www.scala-lang.org/files/archive/spec/2.11/. + +To preview locally, run `bundle exec jekyll serve -d build/spec/ -s spec/ -w --baseurl=""` (in the root of your checkout of scala/scala), +and open http://0.0.0.0:4000/. Jekyll will rebuild as you edit the markdown, but make sure to restart it when you change `_config.yml`. + +## General Advice for editors + +- All files must be saved as UTF-8: ensure your editors are configured appropriately. +- Use of the appropriate unicode characters instead of the latex modifiers for accents, etc. is necessary. For example, é instead of `\'e`. +- MathJAX errors will appear within the rendered DOM as span elements with class `mtext` and style attribute `color: red` applied. It is possible to search for this combination in the development tools of the browser of your choice. In chrome, CTRL+F / CMD+F within the inspect element panel allows you to do this. + +### Macro replacements: + +- While MathJAX just support LaTeX style command definition, it is recommended to not use this as it will likely cause issues with preparing the document for PDF or ebook distribution. +- `\SS` (which I could not find defined within the latex source) seems to be closest to `\mathscr{S}` +- `\TYPE` is equivalent to `\boldsymbol{type}' +- As MathJAX has no support for slanted font (latex command \sl), so in all instances this should be replaced with \mathit{} +- The macro \U{ABCD} used for unicode character references can be replaced with \\uABCD. +- The macro \URange{ABCD}{DCBA} used for unicode character ranges can be replaced with \\uABCD-\\uDBCA. +- The macro \commadots can be replaced with ` , … , `. +- There is no adequate replacement for `\textsc{...}` (small caps) in pandoc markdown. While unicode contains a number of small capital letters, it is notably missing Q and X as these glyphs are intended for phonetic spelling, therefore these cannot be reliably used. For now, the best option is to use underscore emphasis and capitalise the text manually, `_LIKE THIS_`. + +### Unicode Character replacements + +- The unicode left and right single quotation marks (‘ and ’) have been used in place of ` and ', where the quotation marks are intended to be paired. These can be typed on a mac using Option+] for a left quote and Option+Shift+] for the right quote. +- Similarly for left and right double quotation marks (“ and ”) in place of ". These can be typed on a mac using Option+[ and Option+Shift+]. diff --git a/spec/_config.yml b/spec/_config.yml new file mode 100644 index 000000000000..1052ddedb055 --- /dev/null +++ b/spec/_config.yml @@ -0,0 +1,10 @@ +baseurl: /files/archive/spec/2.11 +safe: true +lsi: false +highlighter: null +markdown: redcarpet +encoding: utf-8 +redcarpet: + extensions: ["no_intra_emphasis", "fenced_code_blocks", "autolink", "tables", "with_toc_data", "strikethrough", "lax_spacing", "space_after_headers", "superscript", "footnotes"] +# with_toc_data requires redcarpet 3.1 to get +# pretty ID attributes for Hn headers (https://github.com/vmg/redcarpet/pull/186) diff --git a/spec/_includes/numbering.css b/spec/_includes/numbering.css new file mode 100644 index 000000000000..8df08098bc2a --- /dev/null +++ b/spec/_includes/numbering.css @@ -0,0 +1,62 @@ +// based on http://philarcher.org/css/numberheadings.css, +h1 { + /* must reset here */ + counter-reset: chapter {{ page.chapter }}; +} +h1:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + content: "Chapter " counter(chapter); + display: block; +} + +h2 { + /* must increment here */ + counter-increment: section; + counter-reset: subsection; +} +h2:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + + content: counter(chapter) "." counter(section) ; + display: inline; + margin-right: 1em; +} +h2:after { + /* can only have one counter-reset per tag, so can't do it in h2/h2:before... */ + counter-reset: example; +} + +h3 { + /* must increment here */ + counter-increment: subsection; +} +h3:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + + content: counter(chapter) "." counter(section) "." counter(subsection); + display: inline; + margin-right: 1em; +} + +h3[id*='example'] { + /* must increment here */ + counter-increment: example; + display: inline; +} +h3[id*='example']:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + + content: "Example " counter(chapter) "." counter(section) "." counter(example); + display: inline; + margin-right: 1em; +} + +.no-numbering, .no-numbering:before, .no-numbering:after { + content: normal; + counter-reset: none; + counter-increment: none; +} diff --git a/spec/_layouts/default.yml b/spec/_layouts/default.yml new file mode 100644 index 000000000000..64ba4a1639df --- /dev/null +++ b/spec/_layouts/default.yml @@ -0,0 +1,90 @@ + + + + + + + + + + + + + + + + + + + + + + + {{ page.title }} + + + + +
+ +
+{{ content }} +
+
+ + diff --git a/spec/_layouts/toc.yml b/spec/_layouts/toc.yml new file mode 100644 index 000000000000..caf0be1a3a3a --- /dev/null +++ b/spec/_layouts/toc.yml @@ -0,0 +1,28 @@ + + + + + + + + + + + {{ page.title }} + + + + + + +{{ content }} + + + diff --git a/spec/id_dsa_travis.enc b/spec/id_dsa_travis.enc new file mode 100644 index 000000000000..a9a4036807e6 --- /dev/null +++ b/spec/id_dsa_travis.enc @@ -0,0 +1,15 @@ +U2FsdGVkX1/RKhLZeL93vFQikKRRkoa3rqt6Kbs7cJStmcTI+DohoRUidRaeSULa ++xXQCwaSDs4+l1HdW2R4ZV62AVGhvIeKEZxc449c6qT9+wUd2PKkDghuJCy1dLTo +2OdFLDeop0X32bsauzPQGWwrpb/Llck4KeKffJq2257Hu6T/HnzSfDnvXbjAsVeH +ZLeXURAyDAdK9vFmFzFiEEztLkW8E3ZVyrk7Qa3GPNpmATiBdhVM8d0JJptKVgwQ +mZfhbItLrj490sPd5zpUFKAxJjPoKIa75n/+u4butn+ON97vr7xOy6ElX7HSJUgr +FJdVJgcO7lki0j+lfJVAP0zLnH80CgOkOJSq0Sso/ofs+lQIobo8fQqIdmoqV3z2 +KpYrgnqap1U2+ekIUKsUxk4LuO8uJhwPeMJs6FoDb+O4Aauqpy9242+P05gWkQVd +KVWRcHVE7DulS8Fp/o5GXJUdw+rdxvQ/voJ8i0HbYpp6UcmQwBheQMSmqtp5+ML9 +rBiBe2sr7pahqI5NKoF3iZCkZW74ge3/GP2d6m2tpOzD+IfdFDXQ/r8DbK2Dvwvz +eutOb0zrUtua2e2zvvpVxldPVpXA7A1hE0P3lns9o+TqNhEauTQimQ8/X51BHO6E +Ap4odrf2odocacY5VC4LFYDO3vat0wSTpi6SxkemUMX5yB7euqwD3ZrMcbpPFR1B +IU5XxW20NxUo8n+WuMUNkXTgk/Cr4OUiavVv4oLsHkmgD9LN3IYI6Rj/DSCzSbDx +hyWc7R47iu9f5okQScx62DwVK3AyAuVWer94x0Kj8AcIRwU/VwiXjnZ59I89AKTN +sjZJw1FfpJPqYs7fPtEiotUdaJHzJH8tiEWFrtOTuOg3h6fy0KJTPVh0WjcGXfb6 +Uh1SEgeHtMSUVhq8nd8LGQ== diff --git a/spec/index.md b/spec/index.md new file mode 100644 index 000000000000..ee9c2a5f7880 --- /dev/null +++ b/spec/index.md @@ -0,0 +1,71 @@ +--- +title: Scala Language Reference +layout: toc +--- + +# The Scala Language Specification +# Version 2.11 + +### Maintained online at [https://github.com/scala/scala/tree/2.11.x/spec](https://github.com/scala/scala/tree/2.11.x/spec) + +### Martin Odersky, Philippe Altherr, Vincent Cremet, Gilles Dubochet, Burak Emir, Philipp Haller, Stéphane Micheloud, Nikolay Mihaylov, Adriaan Moors, Lukas Rytz, Michel Schinz, Erik Stenman, Matthias Zenger + +### Markdown Conversion by Iain McGinniss. + +## Table of Contents + +
    + {% assign sorted_pages = site.pages | sort:"name" %} + {% for post in sorted_pages %} + + {% if post.chapter >= 0 %} +
  1. + {{ post.title }} +
  2. + {% endif %} + {% endfor %} +
+ +## Preface + +Scala is a Java-like programming language which unifies +object-oriented and functional programming. It is a pure +object-oriented language in the sense that every value is an +object. Types and behavior of objects are described by +classes. Classes can be composed using mixin composition. Scala is +designed to work seamlessly with less pure but mainstream +object-oriented languages like Java. + +Scala is a functional language in the sense that every function is a +value. Nesting of function definitions and higher-order functions are +naturally supported. Scala also supports a general notion of pattern +matching which can model the algebraic types used in many functional +languages. + +Scala has been designed to interoperate seamlessly with Java. +Scala classes can call Java methods, create Java objects, inherit from Java +classes and implement Java interfaces. None of this requires interface +definitions or glue code. + +Scala has been developed from 2001 in the programming methods +laboratory at EPFL. Version 1.0 was released in November 2003. This +document describes the second version of the language, which was +released in March 2006. It acts a reference for the language +definition and some core library modules. It is not intended to teach +Scala or its concepts; for this there are [other documents](14-references.html). + +Scala has been a collective effort of many people. The design and the +implementation of version 1.0 was completed by Philippe Altherr, +Vincent Cremet, Gilles Dubochet, Burak Emir, Stéphane Micheloud, +Nikolay Mihaylov, Michel Schinz, Erik Stenman, Matthias Zenger, and +the author. Iulian Dragos, Gilles Dubochet, Philipp Haller, Sean +McDirmid, Lex Spoon, and Geoffrey Washburn joined in the effort to +develop the second version of the language and tools. Gilad Bracha, +Craig Chambers, Erik Ernst, Matthias Felleisen, Shriram Krishnamurti, +Gary Leavens, Sebastian Maneth, Erik Meijer, Klaus Ostermann, Didier +Rémy, Mads Torgersen, and Philip Wadler have shaped the design of +the language through lively and inspiring discussions and comments on +previous versions of this document. The contributors to the Scala +mailing list have also given very useful feedback that helped us +improve the language and its tools. diff --git a/spec/public/favicon.ico b/spec/public/favicon.ico new file mode 100644 index 000000000000..9eb6ef516488 Binary files /dev/null and b/spec/public/favicon.ico differ diff --git a/spec/public/highlight/LICENSE b/spec/public/highlight/LICENSE new file mode 100644 index 000000000000..422deb7350fe --- /dev/null +++ b/spec/public/highlight/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2006, Ivan Sagalaev +All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of highlight.js nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/spec/public/highlight/highlight.pack.js b/spec/public/highlight/highlight.pack.js new file mode 100644 index 000000000000..bfeca09abb51 --- /dev/null +++ b/spec/public/highlight/highlight.pack.js @@ -0,0 +1 @@ +var hljs=new function(){function j(v){return v.replace(/&/gm,"&").replace(//gm,">")}function t(v){return v.nodeName.toLowerCase()}function h(w,x){var v=w&&w.exec(x);return v&&v.index==0}function r(w){var v=(w.className+" "+(w.parentNode?w.parentNode.className:"")).split(/\s+/);v=v.map(function(x){return x.replace(/^lang(uage)?-/,"")});return v.filter(function(x){return i(x)||/no(-?)highlight/.test(x)})[0]}function o(x,y){var v={};for(var w in x){v[w]=x[w]}if(y){for(var w in y){v[w]=y[w]}}return v}function u(x){var v=[];(function w(y,z){for(var A=y.firstChild;A;A=A.nextSibling){if(A.nodeType==3){z+=A.nodeValue.length}else{if(A.nodeType==1){v.push({event:"start",offset:z,node:A});z=w(A,z);if(!t(A).match(/br|hr|img|input/)){v.push({event:"stop",offset:z,node:A})}}}}return z})(x,0);return v}function q(w,y,C){var x=0;var F="";var z=[];function B(){if(!w.length||!y.length){return w.length?w:y}if(w[0].offset!=y[0].offset){return(w[0].offset"}function E(G){F+=""}function v(G){(G.event=="start"?A:E)(G.node)}while(w.length||y.length){var D=B();F+=j(C.substr(x,D[0].offset-x));x=D[0].offset;if(D==w){z.reverse().forEach(E);do{v(D.splice(0,1)[0]);D=B()}while(D==w&&D.length&&D[0].offset==x);z.reverse().forEach(A)}else{if(D[0].event=="start"){z.push(D[0].node)}else{z.pop()}v(D.splice(0,1)[0])}}return F+j(C.substr(x))}function m(y){function v(z){return(z&&z.source)||z}function w(A,z){return RegExp(v(A),"m"+(y.cI?"i":"")+(z?"g":""))}function x(D,C){if(D.compiled){return}D.compiled=true;D.k=D.k||D.bK;if(D.k){var z={};var E=function(G,F){if(y.cI){F=F.toLowerCase()}F.split(" ").forEach(function(H){var I=H.split("|");z[I[0]]=[G,I[1]?Number(I[1]):1]})};if(typeof D.k=="string"){E("keyword",D.k)}else{Object.keys(D.k).forEach(function(F){E(F,D.k[F])})}D.k=z}D.lR=w(D.l||/\b[A-Za-z0-9_]+\b/,true);if(C){if(D.bK){D.b="\\b("+D.bK.split(" ").join("|")+")\\b"}if(!D.b){D.b=/\B|\b/}D.bR=w(D.b);if(!D.e&&!D.eW){D.e=/\B|\b/}if(D.e){D.eR=w(D.e)}D.tE=v(D.e)||"";if(D.eW&&C.tE){D.tE+=(D.e?"|":"")+C.tE}}if(D.i){D.iR=w(D.i)}if(D.r===undefined){D.r=1}if(!D.c){D.c=[]}var B=[];D.c.forEach(function(F){if(F.v){F.v.forEach(function(G){B.push(o(F,G))})}else{B.push(F=="self"?D:F)}});D.c=B;D.c.forEach(function(F){x(F,D)});if(D.starts){x(D.starts,C)}var A=D.c.map(function(F){return F.bK?"\\.?("+F.b+")\\.?":F.b}).concat([D.tE,D.i]).map(v).filter(Boolean);D.t=A.length?w(A.join("|"),true):{exec:function(F){return null}}}x(y)}function c(T,L,J,R){function v(V,W){for(var U=0;U";V+=aa+'">';return V+Y+Z}function N(){if(!I.k){return j(C)}var U="";var X=0;I.lR.lastIndex=0;var V=I.lR.exec(C);while(V){U+=j(C.substr(X,V.index-X));var W=E(I,V);if(W){H+=W[1];U+=w(W[0],j(V[0]))}else{U+=j(V[0])}X=I.lR.lastIndex;V=I.lR.exec(C)}return U+j(C.substr(X))}function F(){if(I.sL&&!f[I.sL]){return j(C)}var U=I.sL?c(I.sL,C,true,S):e(C);if(I.r>0){H+=U.r}if(I.subLanguageMode=="continuous"){S=U.top}return w(U.language,U.value,false,true)}function Q(){return I.sL!==undefined?F():N()}function P(W,V){var U=W.cN?w(W.cN,"",true):"";if(W.rB){D+=U;C=""}else{if(W.eB){D+=j(V)+U;C=""}else{D+=U;C=V}}I=Object.create(W,{parent:{value:I}})}function G(U,Y){C+=U;if(Y===undefined){D+=Q();return 0}var W=v(Y,I);if(W){D+=Q();P(W,Y);return W.rB?0:Y.length}var X=z(I,Y);if(X){var V=I;if(!(V.rE||V.eE)){C+=Y}D+=Q();do{if(I.cN){D+=""}H+=I.r;I=I.parent}while(I!=X.parent);if(V.eE){D+=j(Y)}C="";if(X.starts){P(X.starts,"")}return V.rE?0:Y.length}if(A(Y,I)){throw new Error('Illegal lexeme "'+Y+'" for mode "'+(I.cN||"")+'"')}C+=Y;return Y.length||1}var M=i(T);if(!M){throw new Error('Unknown language: "'+T+'"')}m(M);var I=R||M;var S;var D="";for(var K=I;K!=M;K=K.parent){if(K.cN){D=w(K.cN,"",true)+D}}var C="";var H=0;try{var B,y,x=0;while(true){I.t.lastIndex=x;B=I.t.exec(L);if(!B){break}y=G(L.substr(x,B.index-x),B[0]);x=B.index+y}G(L.substr(x));for(var K=I;K.parent;K=K.parent){if(K.cN){D+=""}}return{r:H,value:D,language:T,top:I}}catch(O){if(O.message.indexOf("Illegal")!=-1){return{r:0,value:j(L)}}else{throw O}}}function e(y,x){x=x||b.languages||Object.keys(f);var v={r:0,value:j(y)};var w=v;x.forEach(function(z){if(!i(z)){return}var A=c(z,y,false);A.language=z;if(A.r>w.r){w=A}if(A.r>v.r){w=v;v=A}});if(w.language){v.second_best=w}return v}function g(v){if(b.tabReplace){v=v.replace(/^((<[^>]+>|\t)+)/gm,function(w,z,y,x){return z.replace(/\t/g,b.tabReplace)})}if(b.useBR){v=v.replace(/\n/g,"
")}return v}function p(A){var B=r(A);if(/no(-?)highlight/.test(B)){return}var y;if(b.useBR){y=document.createElementNS("http://www.w3.org/1999/xhtml","div");y.innerHTML=A.innerHTML.replace(/\n/g,"").replace(//g,"\n")}else{y=A}var z=y.textContent;var v=B?c(B,z,true):e(z);var x=u(y);if(x.length){var w=document.createElementNS("http://www.w3.org/1999/xhtml","div");w.innerHTML=v.value;v.value=q(x,u(w),z)}v.value=g(v.value);A.innerHTML=v.value;A.className+=" hljs "+(!B&&v.language||"");A.result={language:v.language,re:v.r};if(v.second_best){A.second_best={language:v.second_best.language,re:v.second_best.r}}}var b={classPrefix:"hljs-",tabReplace:null,useBR:false,languages:undefined};function s(v){b=o(b,v)}function l(){if(l.called){return}l.called=true;var v=document.querySelectorAll("pre code");Array.prototype.forEach.call(v,p)}function a(){addEventListener("DOMContentLoaded",l,false);addEventListener("load",l,false)}var f={};var n={};function d(v,x){var w=f[v]=x(this);if(w.aliases){w.aliases.forEach(function(y){n[y]=v})}}function k(){return Object.keys(f)}function i(v){return f[v]||f[n[v]]}this.highlight=c;this.highlightAuto=e;this.fixMarkup=g;this.highlightBlock=p;this.configure=s;this.initHighlighting=l;this.initHighlightingOnLoad=a;this.registerLanguage=d;this.listLanguages=k;this.getLanguage=i;this.inherit=o;this.IR="[a-zA-Z][a-zA-Z0-9_]*";this.UIR="[a-zA-Z_][a-zA-Z0-9_]*";this.NR="\\b\\d+(\\.\\d+)?";this.CNR="(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)";this.BNR="\\b(0b[01]+)";this.RSR="!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~";this.BE={b:"\\\\[\\s\\S]",r:0};this.ASM={cN:"string",b:"'",e:"'",i:"\\n",c:[this.BE]};this.QSM={cN:"string",b:'"',e:'"',i:"\\n",c:[this.BE]};this.PWM={b:/\b(a|an|the|are|I|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such)\b/};this.CLCM={cN:"comment",b:"//",e:"$",c:[this.PWM]};this.CBCM={cN:"comment",b:"/\\*",e:"\\*/",c:[this.PWM]};this.HCM={cN:"comment",b:"#",e:"$",c:[this.PWM]};this.NM={cN:"number",b:this.NR,r:0};this.CNM={cN:"number",b:this.CNR,r:0};this.BNM={cN:"number",b:this.BNR,r:0};this.CSSNM={cN:"number",b:this.NR+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",r:0};this.RM={cN:"regexp",b:/\//,e:/\/[gim]*/,i:/\n/,c:[this.BE,{b:/\[/,e:/\]/,r:0,c:[this.BE]}]};this.TM={cN:"title",b:this.IR,r:0};this.UTM={cN:"title",b:this.UIR,r:0}}();hljs.registerLanguage("scala",function(d){var b={cN:"annotation",b:"@[A-Za-z]+"};var c={cN:"string",b:'u?r?"""',e:'"""',r:10};var a={cN:"symbol",b:"'\\w[\\w\\d_]*(?!')"};var e={cN:"type",b:"\\b[A-Z][A-Za-z0-9_]*",r:0};var h={cN:"title",b:/[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/,r:0};var i={cN:"class",bK:"class object trait type",e:/[:={\[(\n;]/,c:[{cN:"keyword",bK:"extends with",r:10},h]};var g={cN:"function",bK:"def val",e:/[:={\[(\n;]/,c:[h]};var f={cN:"javadoc",b:"/\\*\\*",e:"\\*/",c:[{cN:"javadoctag",b:"@[A-Za-z]+"}],r:10};return{k:{literal:"true false null",keyword:"type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit"},c:[d.CLCM,d.CBCM,c,d.QSM,a,e,g,i,d.CNM,b]}}); \ No newline at end of file diff --git a/spec/public/images/classhierarchy.pdf b/spec/public/images/classhierarchy.pdf new file mode 100644 index 000000000000..58e050174b65 Binary files /dev/null and b/spec/public/images/classhierarchy.pdf differ diff --git a/spec/public/images/scala-logo-red-spiral-dark.png b/spec/public/images/scala-logo-red-spiral-dark.png new file mode 100644 index 000000000000..09b66b5e6a33 Binary files /dev/null and b/spec/public/images/scala-logo-red-spiral-dark.png differ diff --git a/spec/public/octicons/LICENSE.txt b/spec/public/octicons/LICENSE.txt new file mode 100644 index 000000000000..259b43d14de3 --- /dev/null +++ b/spec/public/octicons/LICENSE.txt @@ -0,0 +1,9 @@ +(c) 2012-2014 GitHub + +When using the GitHub logos, be sure to follow the GitHub logo guidelines (https://github.com/logos) + +Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL) +Applies to all font files + +Code License: MIT (http://choosealicense.com/licenses/mit/) +Applies to all other files diff --git a/spec/public/octicons/octicons.css b/spec/public/octicons/octicons.css new file mode 100644 index 000000000000..a5dcd153a856 --- /dev/null +++ b/spec/public/octicons/octicons.css @@ -0,0 +1,235 @@ +@font-face { + font-family: 'octicons'; + src: url('octicons.eot?#iefix') format('embedded-opentype'), + url('octicons.woff') format('woff'), + url('octicons.ttf') format('truetype'), + url('octicons.svg#octicons') format('svg'); + font-weight: normal; + font-style: normal; +} + +/* + +.octicon is optimized for 16px. +.mega-octicon is optimized for 32px but can be used larger. + +*/ +.octicon, .mega-octicon { + font: normal normal normal 16px/1 octicons; + display: inline-block; + text-decoration: none; + text-rendering: auto; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} +.mega-octicon { font-size: 32px; } + + +.octicon-alert:before { content: '\f02d'} /*  */ +.octicon-alignment-align:before { content: '\f08a'} /*  */ +.octicon-alignment-aligned-to:before { content: '\f08e'} /*  */ +.octicon-alignment-unalign:before { content: '\f08b'} /*  */ +.octicon-arrow-down:before { content: '\f03f'} /*  */ +.octicon-arrow-left:before { content: '\f040'} /*  */ +.octicon-arrow-right:before { content: '\f03e'} /*  */ +.octicon-arrow-small-down:before { content: '\f0a0'} /*  */ +.octicon-arrow-small-left:before { content: '\f0a1'} /*  */ +.octicon-arrow-small-right:before { content: '\f071'} /*  */ +.octicon-arrow-small-up:before { content: '\f09f'} /*  */ +.octicon-arrow-up:before { content: '\f03d'} /*  */ +.octicon-beer:before { content: '\f069'} /*  */ +.octicon-book:before { content: '\f007'} /*  */ +.octicon-bookmark:before { content: '\f07b'} /*  */ +.octicon-briefcase:before { content: '\f0d3'} /*  */ +.octicon-broadcast:before { content: '\f048'} /*  */ +.octicon-browser:before { content: '\f0c5'} /*  */ +.octicon-bug:before { content: '\f091'} /*  */ +.octicon-calendar:before { content: '\f068'} /*  */ +.octicon-check:before { content: '\f03a'} /*  */ +.octicon-checklist:before { content: '\f076'} /*  */ +.octicon-chevron-down:before { content: '\f0a3'} /*  */ +.octicon-chevron-left:before { content: '\f0a4'} /*  */ +.octicon-chevron-right:before { content: '\f078'} /*  */ +.octicon-chevron-up:before { content: '\f0a2'} /*  */ +.octicon-circle-slash:before { content: '\f084'} /*  */ +.octicon-circuit-board:before { content: '\f0d6'} /*  */ +.octicon-clippy:before { content: '\f035'} /*  */ +.octicon-clock:before { content: '\f046'} /*  */ +.octicon-cloud-download:before { content: '\f00b'} /*  */ +.octicon-cloud-upload:before { content: '\f00c'} /*  */ +.octicon-code:before { content: '\f05f'} /*  */ +.octicon-color-mode:before { content: '\f065'} /*  */ +.octicon-comment-add:before, +.octicon-comment:before { content: '\f02b'} /*  */ +.octicon-comment-discussion:before { content: '\f04f'} /*  */ +.octicon-credit-card:before { content: '\f045'} /*  */ +.octicon-dash:before { content: '\f0ca'} /*  */ +.octicon-dashboard:before { content: '\f07d'} /*  */ +.octicon-database:before { content: '\f096'} /*  */ +.octicon-device-camera:before { content: '\f056'} /*  */ +.octicon-device-camera-video:before { content: '\f057'} /*  */ +.octicon-device-desktop:before { content: '\f27c'} /*  */ +.octicon-device-mobile:before { content: '\f038'} /*  */ +.octicon-diff:before { content: '\f04d'} /*  */ +.octicon-diff-added:before { content: '\f06b'} /*  */ +.octicon-diff-ignored:before { content: '\f099'} /*  */ +.octicon-diff-modified:before { content: '\f06d'} /*  */ +.octicon-diff-removed:before { content: '\f06c'} /*  */ +.octicon-diff-renamed:before { content: '\f06e'} /*  */ +.octicon-ellipsis:before { content: '\f09a'} /*  */ +.octicon-eye-unwatch:before, +.octicon-eye-watch:before, +.octicon-eye:before { content: '\f04e'} /*  */ +.octicon-file-binary:before { content: '\f094'} /*  */ +.octicon-file-code:before { content: '\f010'} /*  */ +.octicon-file-directory:before { content: '\f016'} /*  */ +.octicon-file-media:before { content: '\f012'} /*  */ +.octicon-file-pdf:before { content: '\f014'} /*  */ +.octicon-file-submodule:before { content: '\f017'} /*  */ +.octicon-file-symlink-directory:before { content: '\f0b1'} /*  */ +.octicon-file-symlink-file:before { content: '\f0b0'} /*  */ +.octicon-file-text:before { content: '\f011'} /*  */ +.octicon-file-zip:before { content: '\f013'} /*  */ +.octicon-flame:before { content: '\f0d2'} /*  */ +.octicon-fold:before { content: '\f0cc'} /*  */ +.octicon-gear:before { content: '\f02f'} /*  */ +.octicon-gift:before { content: '\f042'} /*  */ +.octicon-gist:before { content: '\f00e'} /*  */ +.octicon-gist-secret:before { content: '\f08c'} /*  */ +.octicon-git-branch-create:before, +.octicon-git-branch-delete:before, +.octicon-git-branch:before { content: '\f020'} /*  */ +.octicon-git-commit:before { content: '\f01f'} /*  */ +.octicon-git-compare:before { content: '\f0ac'} /*  */ +.octicon-git-merge:before { content: '\f023'} /*  */ +.octicon-git-pull-request-abandoned:before, +.octicon-git-pull-request:before { content: '\f009'} /*  */ +.octicon-globe:before { content: '\f0b6'} /*  */ +.octicon-graph:before { content: '\f043'} /*  */ +.octicon-heart:before { content: '\2665'} /* ♥ */ +.octicon-history:before { content: '\f07e'} /*  */ +.octicon-home:before { content: '\f08d'} /*  */ +.octicon-horizontal-rule:before { content: '\f070'} /*  */ +.octicon-hourglass:before { content: '\f09e'} /*  */ +.octicon-hubot:before { content: '\f09d'} /*  */ +.octicon-inbox:before { content: '\f0cf'} /*  */ +.octicon-info:before { content: '\f059'} /*  */ +.octicon-issue-closed:before { content: '\f028'} /*  */ +.octicon-issue-opened:before { content: '\f026'} /*  */ +.octicon-issue-reopened:before { content: '\f027'} /*  */ +.octicon-jersey:before { content: '\f019'} /*  */ +.octicon-jump-down:before { content: '\f072'} /*  */ +.octicon-jump-left:before { content: '\f0a5'} /*  */ +.octicon-jump-right:before { content: '\f0a6'} /*  */ +.octicon-jump-up:before { content: '\f073'} /*  */ +.octicon-key:before { content: '\f049'} /*  */ +.octicon-keyboard:before { content: '\f00d'} /*  */ +.octicon-law:before { content: '\f0d8'} /* */ +.octicon-light-bulb:before { content: '\f000'} /*  */ +.octicon-link:before { content: '\f05c'} /*  */ +.octicon-link-external:before { content: '\f07f'} /*  */ +.octicon-list-ordered:before { content: '\f062'} /*  */ +.octicon-list-unordered:before { content: '\f061'} /*  */ +.octicon-location:before { content: '\f060'} /*  */ +.octicon-gist-private:before, +.octicon-mirror-private:before, +.octicon-git-fork-private:before, +.octicon-lock:before { content: '\f06a'} /*  */ +.octicon-logo-github:before { content: '\f092'} /*  */ +.octicon-mail:before { content: '\f03b'} /*  */ +.octicon-mail-read:before { content: '\f03c'} /*  */ +.octicon-mail-reply:before { content: '\f051'} /*  */ +.octicon-mark-github:before { content: '\f00a'} /*  */ +.octicon-markdown:before { content: '\f0c9'} /*  */ +.octicon-megaphone:before { content: '\f077'} /*  */ +.octicon-mention:before { content: '\f0be'} /*  */ +.octicon-microscope:before { content: '\f089'} /*  */ +.octicon-milestone:before { content: '\f075'} /*  */ +.octicon-mirror-public:before, +.octicon-mirror:before { content: '\f024'} /*  */ +.octicon-mortar-board:before { content: '\f0d7'} /* */ +.octicon-move-down:before { content: '\f0a8'} /*  */ +.octicon-move-left:before { content: '\f074'} /*  */ +.octicon-move-right:before { content: '\f0a9'} /*  */ +.octicon-move-up:before { content: '\f0a7'} /*  */ +.octicon-mute:before { content: '\f080'} /*  */ +.octicon-no-newline:before { content: '\f09c'} /*  */ +.octicon-octoface:before { content: '\f008'} /*  */ +.octicon-organization:before { content: '\f037'} /*  */ +.octicon-package:before { content: '\f0c4'} /*  */ +.octicon-paintcan:before { content: '\f0d1'} /*  */ +.octicon-pencil:before { content: '\f058'} /*  */ +.octicon-person-add:before, +.octicon-person-follow:before, +.octicon-person:before { content: '\f018'} /*  */ +.octicon-pin:before { content: '\f041'} /*  */ +.octicon-playback-fast-forward:before { content: '\f0bd'} /*  */ +.octicon-playback-pause:before { content: '\f0bb'} /*  */ +.octicon-playback-play:before { content: '\f0bf'} /*  */ +.octicon-playback-rewind:before { content: '\f0bc'} /*  */ +.octicon-plug:before { content: '\f0d4'} /*  */ +.octicon-repo-create:before, +.octicon-gist-new:before, +.octicon-file-directory-create:before, +.octicon-file-add:before, +.octicon-plus:before { content: '\f05d'} /*  */ +.octicon-podium:before { content: '\f0af'} /*  */ +.octicon-primitive-dot:before { content: '\f052'} /*  */ +.octicon-primitive-square:before { content: '\f053'} /*  */ +.octicon-pulse:before { content: '\f085'} /*  */ +.octicon-puzzle:before { content: '\f0c0'} /*  */ +.octicon-question:before { content: '\f02c'} /*  */ +.octicon-quote:before { content: '\f063'} /*  */ +.octicon-radio-tower:before { content: '\f030'} /*  */ +.octicon-repo-delete:before, +.octicon-repo:before { content: '\f001'} /*  */ +.octicon-repo-clone:before { content: '\f04c'} /*  */ +.octicon-repo-force-push:before { content: '\f04a'} /*  */ +.octicon-gist-fork:before, +.octicon-repo-forked:before { content: '\f002'} /*  */ +.octicon-repo-pull:before { content: '\f006'} /*  */ +.octicon-repo-push:before { content: '\f005'} /*  */ +.octicon-rocket:before { content: '\f033'} /*  */ +.octicon-rss:before { content: '\f034'} /*  */ +.octicon-ruby:before { content: '\f047'} /*  */ +.octicon-screen-full:before { content: '\f066'} /*  */ +.octicon-screen-normal:before { content: '\f067'} /*  */ +.octicon-search-save:before, +.octicon-search:before { content: '\f02e'} /*  */ +.octicon-server:before { content: '\f097'} /*  */ +.octicon-settings:before { content: '\f07c'} /*  */ +.octicon-log-in:before, +.octicon-sign-in:before { content: '\f036'} /*  */ +.octicon-log-out:before, +.octicon-sign-out:before { content: '\f032'} /*  */ +.octicon-split:before { content: '\f0c6'} /*  */ +.octicon-squirrel:before { content: '\f0b2'} /*  */ +.octicon-star-add:before, +.octicon-star-delete:before, +.octicon-star:before { content: '\f02a'} /*  */ +.octicon-steps:before { content: '\f0c7'} /*  */ +.octicon-stop:before { content: '\f08f'} /*  */ +.octicon-repo-sync:before, +.octicon-sync:before { content: '\f087'} /*  */ +.octicon-tag-remove:before, +.octicon-tag-add:before, +.octicon-tag:before { content: '\f015'} /*  */ +.octicon-telescope:before { content: '\f088'} /*  */ +.octicon-terminal:before { content: '\f0c8'} /*  */ +.octicon-three-bars:before { content: '\f05e'} /*  */ +.octicon-tools:before { content: '\f031'} /*  */ +.octicon-trashcan:before { content: '\f0d0'} /*  */ +.octicon-triangle-down:before { content: '\f05b'} /*  */ +.octicon-triangle-left:before { content: '\f044'} /*  */ +.octicon-triangle-right:before { content: '\f05a'} /*  */ +.octicon-triangle-up:before { content: '\f0aa'} /*  */ +.octicon-unfold:before { content: '\f039'} /*  */ +.octicon-unmute:before { content: '\f0ba'} /*  */ +.octicon-versions:before { content: '\f064'} /*  */ +.octicon-remove-close:before, +.octicon-x:before { content: '\f081'} /*  */ +.octicon-zap:before { content: '\26A1'} /* ⚡ */ diff --git a/spec/public/octicons/octicons.eot b/spec/public/octicons/octicons.eot new file mode 100644 index 000000000000..22881a8b6c43 Binary files /dev/null and b/spec/public/octicons/octicons.eot differ diff --git a/spec/public/octicons/octicons.svg b/spec/public/octicons/octicons.svg new file mode 100644 index 000000000000..ea3e0f161528 --- /dev/null +++ b/spec/public/octicons/octicons.svg @@ -0,0 +1,198 @@ + + + + +(c) 2012-2014 GitHub + +When using the GitHub logos, be sure to follow the GitHub logo guidelines (https://github.com/logos) + +Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL) +Applies to all font files + +Code License: MIT (http://choosealicense.com/licenses/mit/) +Applies to all other files + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spec/public/octicons/octicons.ttf b/spec/public/octicons/octicons.ttf new file mode 100644 index 000000000000..189ca2813d49 Binary files /dev/null and b/spec/public/octicons/octicons.ttf differ diff --git a/spec/public/octicons/octicons.woff b/spec/public/octicons/octicons.woff new file mode 100644 index 000000000000..2b770e429f38 Binary files /dev/null and b/spec/public/octicons/octicons.woff differ diff --git a/spec/public/scripts/navigation.js b/spec/public/scripts/navigation.js new file mode 100644 index 000000000000..c046bf4d5460 --- /dev/null +++ b/spec/public/scripts/navigation.js @@ -0,0 +1,70 @@ +(function($){ $.fn.navigation = function() { + + // the TOC already contains H1 so we start at H2 + var headers = $('h2, h3, h4, h5').filter(function() { + // exclude examples + if (this.id.substr(0, 7) == 'example') { + return false; + } + + // get all headers with an id + return this.id; + }); + + var output = $(this); + + var get_level = function(n) { return parseInt(n.nodeName.replace('H', ''), 10); } + + var back_to_top = ''; + + if (headers.length && output.length) { + var level = get_level(headers[0]); + var current_level; + var html = '
    '; + + headers.each(function(_, header) { + current_level = get_level(header); + + if (current_level === level) { + // same level as before + html += '
  1. ' + header.innerHTML + ''; + } else if (current_level <= level) { + // higher level, we go back up and chose intermediary lists + for(i = current_level; i < level; i++) { + html += '
'; + } + html += '
  • ' + header.innerHTML + ''; + } else if (current_level > level) { + // lower level, we open new nested lists + for(i = current_level; i > level; i--) { + html += '
    1. '; + } + html += '' + header.innerHTML + ''; + } + + var header_link = ''; + $(header).prepend(header_link); + + if (!$(header).prev().is('h1')) { + $(header).after(back_to_top); + } + + level = current_level; + }); + + html += '
    '; + + output.html(html); + } + + // back to top links + $(document).on('click', '.to_top', function() { + $(window).scrollTop(0); + window.location.hash = ''; + }); + + // we add one more at the end of the document + $('#content-container').append(back_to_top); + +};})(jQuery); + diff --git a/spec/public/stylesheets/print.css b/spec/public/stylesheets/print.css new file mode 100644 index 000000000000..3fbc5596c055 --- /dev/null +++ b/spec/public/stylesheets/print.css @@ -0,0 +1,15 @@ +/* This removes a few things from screen.css for printing */ + +body { + padding: 0px; + margin: 0.5em; +} + +.anchor, #navigation, .to_top { + display: none; +} + +#content-container { + width: 100%; + float: none; +} diff --git a/spec/public/stylesheets/screen.css b/spec/public/stylesheets/screen.css new file mode 100644 index 000000000000..dbb3ebe1b352 --- /dev/null +++ b/spec/public/stylesheets/screen.css @@ -0,0 +1,428 @@ +/* from https://gist.github.com/andyferra/2554919 */ + +body { + font-family: Helvetica, arial, sans-serif; + font-size: 14px; + line-height: 1.6; + + padding-bottom: 10px; + background-color: white; + padding-left: 30px; +} + +#content-container > *:first-child { + margin-top: 0 !important; +} +#content-container > *:last-child { + margin-bottom: 0 !important; +} + +a { + color: #4183C4; +} +a.absent { + color: #cc0000; +} +a.anchor { + display: block; + margin-left: -35px; + padding-left: 10px; + cursor: pointer; + position: absolute; + top: 0; + left: 0; + bottom: 0; + color: black; + width: 35px; height: 100%; +} + +a.anchor span { + vertical-align: middle; +} + +h1, h2, h3, h4, h5, h6 { + margin: 20px 0 10px; + font-weight: bold; + -webkit-font-smoothing: antialiased; + cursor: text; + position: relative; +} + +h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor, h5:hover a.anchor, h6:hover a.anchor { + text-decoration: none; +} + +h1:hover a.anchor span, h2:hover a.anchor span, h3:hover a.anchor span, h4:hover a.anchor span, h5:hover a.anchor span, h6:hover a.anchor span { + display: inline-block; +} + +h1 a.anchor span, h2 a.anchor span, h3 a.anchor span, h4 a.anchor span, h5 a.anchor span, h6 a.anchor span { + display: none; +} + +h1 a.anchor:hover span, h2 a.anchor:hover span, h3 a.anchor:hover span, h4 a.anchor:hover span, h5 a.anchor:hover span, h6 a.anchor:hover span { + display: inline-block; +} + +h1 tt, h1 code { + font-size: inherit; +} + +h2 tt, h2 code { + font-size: inherit; +} + +h3 tt, h3 code { + font-size: inherit; +} + +h4 tt, h4 code { + font-size: inherit; +} + +h5 tt, h5 code { + font-size: inherit; +} + +h6 tt, h6 code { + font-size: inherit; +} + +h1 { + font-size: 28px; + color: black; +} + +h2 { + font-size: 24px; + border-bottom: 1px solid #cccccc; + color: black; +} + +h3 { + font-size: 18px; +} + +h4 { + font-size: 16px; +} + +h5 { + font-size: 14px; +} + +h6 { + color: #777777; + font-size: 14px; +} + +p, blockquote, ul, ol, dl, li, table, pre { + margin: 15px 0; + -moz-font-feature-settings: "onum"; + -ms-font-feature-settings: "onum"; + -webkit-font-feature-settings: "onum"; + font-feature-settings: "onum"; +} + +hr { + background: transparent url("../../images/modules/pulls/dirty-shade.png") repeat-x 0 0; + border: 0 none; + color: #cccccc; + height: 4px; + padding: 0; +} + +body > h2:first-child { + margin-top: 0; + padding-top: 0; +} +body > h1:first-child { + margin-top: 0; + padding-top: 0; +} +body > h1:first-child + h2 { + margin-top: 0; + padding-top: 0; +} +body > h3:first-child, body > h4:first-child, body > h5:first-child, body > h6:first-child { + margin-top: 0; + padding-top: 0; +} + +a:first-child h1, a:first-child h2, a:first-child h3, a:first-child h4, a:first-child h5, a:first-child h6 { + margin-top: 0; + padding-top: 0; +} + +h1 p, h2 p, h3 p, h4 p, h5 p, h6 p { + margin-top: 0; +} + +li p.first { + display: inline-block; +} + +ul, ol { + padding-left: 30px; +} + +ul :first-child, ol :first-child { + margin-top: 0; +} + +ul :last-child, ol :last-child { + margin-bottom: 0; +} + +dl { + padding: 0; +} +dl dt { + font-size: 14px; + font-weight: bold; + font-style: italic; + padding: 0; + margin: 15px 0 5px; +} +dl dt:first-child { + padding: 0; +} +dl dt > :first-child { + margin-top: 0; +} +dl dt > :last-child { + margin-bottom: 0; +} +dl dd { + margin: 0 0 15px; + padding: 0 15px; +} +dl dd > :first-child { + margin-top: 0; +} +dl dd > :last-child { + margin-bottom: 0; +} + +blockquote { + border-left: 4px solid #dddddd; + padding: 0 15px; + color: #777777; +} +blockquote > :first-child { + margin-top: 0; +} +blockquote > :last-child { + margin-bottom: 0; +} + +table { + padding: 0; +} +table tr { + border-top: 1px solid #cccccc; + background-color: white; + margin: 0; + padding: 0; +} +table tr:nth-child(2n) { + background-color: #f8f8f8; +} +table tr th { + font-weight: bold; + border: 1px solid #cccccc; + text-align: left; + margin: 0; + padding: 6px 13px; +} +table tr td { + border: 1px solid #cccccc; + text-align: left; + margin: 0; + padding: 6px 13px; +} +table tr th :first-child, table tr td :first-child { + margin-top: 0; +} +table tr th :last-child, table tr td :last-child { + margin-bottom: 0; +} + +img { + max-width: 100%; +} + +span.frame { + display: block; + overflow: hidden; +} +span.frame > span { + border: 1px solid #dddddd; + display: block; + float: left; + overflow: hidden; + margin: 13px 0 0; + padding: 7px; + width: auto; +} +span.frame span img { + display: block; + float: left; +} +span.frame span span { + clear: both; + color: #333333; + display: block; + padding: 5px 0 0; +} +span.align-center { + display: block; + overflow: hidden; + clear: both; +} +span.align-center > span { + display: block; + overflow: hidden; + margin: 13px auto 0; + text-align: center; +} +span.align-center span img { + margin: 0 auto; + text-align: center; +} +span.align-right { + display: block; + overflow: hidden; + clear: both; +} +span.align-right > span { + display: block; + overflow: hidden; + margin: 13px 0 0; + text-align: right; +} +span.align-right span img { + margin: 0; + text-align: right; +} +span.float-left { + display: block; + margin-right: 13px; + overflow: hidden; + float: left; +} +span.float-left span { + margin: 13px 0 0; +} +span.float-right { + display: block; + margin-left: 13px; + overflow: hidden; + float: right; +} +span.float-right > span { + display: block; + overflow: hidden; + margin: 13px auto 0; + text-align: right; +} + +.highlight pre { + border: 1px solid #eaeaea; + background-color: #f8f8f8; + border-radius: 3px; + line-height: 19px; + overflow: auto; + padding: 6px 10px; + white-space: nowrap; +} + +code { + background-color: transparent; + border: none; + margin: 0; + padding: 0; + white-space: pre; + font-size: 16px; +} + +#navigation { + margin-right: 10px; + float: right; + width: 26%; + display: inline; + color: #8B8B8B; + font-size: 15px; + font-weight: bold; + background-color: #F3F4F4; +} + +#content-container { + float: left; + width: 70%; + display: inline; +} + +#container { + padding-top: 10px; + width: 100%; +} + +#navigation a { + text-decoration: none; + color: #8B8B8B; +} + +#navigation a:hover { + text-decoration: underline; +} + +.active-page { + color: #171717; +} + +.active-page a { + color: #171717 !important; +} + +.to_top { + position: absolute; + margin-top: -35px; + right: 27%; + color: gray; + cursor: pointer; + width: 16px; height: 16px; + display: block; +} + +.to_top:hover { + color: black; +} + +#scala-logo { + float: left; + width: 168px; + height: 48px; + margin-right: 25px; +} + +#header { + padding-top: 16px; + padding-bottom: 10px; + margin-bottom: 10px; + height: 64px; + border-bottom: 1px solid #cccccc; +} + +#header a { + height: 100%; + display: block; + text-decoration: none; +} + +#header h1 { + cursor: pointer; + padding-top: 6px; + margin-bottom: 0px; + font-size: 30px; +} diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala index 75160fa18f72..293335f720b1 100644 --- a/src/actors/scala/actors/Actor.scala +++ b/src/actors/scala/actors/Actor.scala @@ -205,7 +205,7 @@ object Actor extends Combinators { * Actions in `f` have to contain the rest of the computation of `self`, * as this method will never return. * - * A common method of continuting the computation is to send a message + * A common method of continuing the computation is to send a message * to another actor: * {{{ * react { diff --git a/src/actors/scala/actors/LinkedQueue.java b/src/actors/scala/actors/LinkedQueue.java index 796f428cf54c..3f7b93c3862e 100644 --- a/src/actors/scala/actors/LinkedQueue.java +++ b/src/actors/scala/actors/LinkedQueue.java @@ -22,7 +22,7 @@ * and takes when the queue is not empty. * Normally a put and a take can proceed simultaneously. * (Although it does not allow multiple concurrent puts or takes.) - * This class tends to perform more efficently than + * This class tends to perform more efficiently than * other Channel implementations in producer/consumer * applications. *

    [ Introduction to this package. ] diff --git a/src/actors/scala/actors/remote/Proxy.scala b/src/actors/scala/actors/remote/Proxy.scala index 9949b3618192..2cb03544f29c 100644 --- a/src/actors/scala/actors/remote/Proxy.scala +++ b/src/actors/scala/actors/remote/Proxy.scala @@ -84,7 +84,7 @@ private[remote] class Proxy(node: Node, name: Symbol, @transient var kernel: Net } // Proxy is private[remote], but these classes are public and use it in a public -// method signature. That makes the only method they have non-overriddable. +// method signature. That makes the only method they have non-overridable. // So I made them final, which seems appropriate anyway. final class LinkToFun extends Function2[AbstractActor, Proxy, Unit] with Serializable { diff --git a/src/actors/scala/actors/threadpool/AbstractCollection.java b/src/actors/scala/actors/threadpool/AbstractCollection.java index f3dc1e129293..195a0064ab5c 100644 --- a/src/actors/scala/actors/threadpool/AbstractCollection.java +++ b/src/actors/scala/actors/threadpool/AbstractCollection.java @@ -1,6 +1,6 @@ /* * Written by Dawid Kurzyniec, based on public domain code written by Doug Lea - * and publictly available documentation, and released to the public domain, as + * and publicly available documentation, and released to the public domain, as * explained at http://creativecommons.org/licenses/publicdomain */ diff --git a/src/actors/scala/actors/threadpool/ExecutorCompletionService.java b/src/actors/scala/actors/threadpool/ExecutorCompletionService.java index 9a4a4fb71c48..02e9bbe29704 100644 --- a/src/actors/scala/actors/threadpool/ExecutorCompletionService.java +++ b/src/actors/scala/actors/threadpool/ExecutorCompletionService.java @@ -135,7 +135,7 @@ public ExecutorCompletionService(Executor executor) { * @param completionQueue the queue to use as the completion queue * normally one dedicated for use by this service. This queue is * treated as unbounded -- failed attempted Queue.add - * operations for completed taskes cause them not to be + * operations for completed tasks cause them not to be * retrievable. * @throws NullPointerException if executor or completionQueue are null */ diff --git a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java index 437af77c7acf..914d242100b0 100644 --- a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java +++ b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java @@ -20,13 +20,13 @@ * *

    The order of entry * to the read and write lock is unspecified, subject to reentrancy - * constraints. A nonfair lock that is continously contended may + * constraints. A nonfair lock that is continuously contended may * indefinitely postpone one or more reader or writer threads, but * will normally have higher throughput than a fair lock. *

    * * DEPARTURE FROM java.util.concurrent: this implementation impose - * a writer-preferrence and thus its acquisition order may be different + * a writer-preference and thus its acquisition order may be different * than in java.util.concurrent. * *

  • Reentrancy diff --git a/src/asm/README b/src/asm/README new file mode 100644 index 000000000000..3ceac8809821 --- /dev/null +++ b/src/asm/README @@ -0,0 +1,30 @@ +Version 5.0.2, SVN r1741, tags/ASM_5_0_2 + +Git SVN repo: https://github.com/lrytz/asm + - git svn howto: https://github.com/lrytz/asm/issues/1 + +Upgrading ASM +------------- + +Start by deleting all source files in src/asm/ and copy the ones from the latest ASM release. + +Excluded Files (don't copy): + - package.html files + - org/objectweb/asm/commons + - org/objectweb/asm/optimizer + - org/objectweb/asm/xml + +Re-packaging and cosmetic changes: + - convert line endings (there are some CRLF) + find src/asm/scala/tools/asm -name '*.java' | xargs dos2unix + - change package clauses + find src/asm/scala/tools/asm -name '*.java' | xargs sed -i '' -e 's/package org\.objectweb\.asm/package scala.tools.asm/' + - update imports + find src/asm/scala/tools/asm -name '*.java' | xargs sed -i '' -e 's/import org\.objectweb\.asm/import scala.tools.asm/' + - update @links, @associates + find src/asm/scala/tools/asm -name '*.java' | xargs sed -i '' -e 's/@link org\.objectweb\.asm/@link scala.tools.asm/' + find src/asm/scala/tools/asm -name '*.java' | xargs sed -i '' -e 's/@associates org\.objectweb\.asm/@associates scala.tools.asm/' + - remove trailing whitespace + find src/asm/scala/tools/asm -name '*.java' | xargs sed -i '' -e 's/[ ]*$//' + +Actual changes: check the git log for [asm-cherry-pick] after the previous upgrade. diff --git a/src/asm/scala/tools/asm/AnnotationVisitor.java b/src/asm/scala/tools/asm/AnnotationVisitor.java index c806ca71e8fa..abcaf1d6d151 100644 --- a/src/asm/scala/tools/asm/AnnotationVisitor.java +++ b/src/asm/scala/tools/asm/AnnotationVisitor.java @@ -41,7 +41,7 @@ public abstract class AnnotationVisitor { /** * The ASM API version implemented by this visitor. The value of this field - * must be one of {@link Opcodes#ASM4}. + * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ protected final int api; @@ -56,7 +56,7 @@ public abstract class AnnotationVisitor { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ public AnnotationVisitor(final int api) { this(api, null); @@ -67,13 +67,13 @@ public AnnotationVisitor(final int api) { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param av * the annotation visitor to which this visitor must delegate * method calls. May be null. */ public AnnotationVisitor(final int api, final AnnotationVisitor av) { - if (api != Opcodes.ASM4) { + if (api != Opcodes.ASM4 && api != Opcodes.ASM5) { throw new IllegalArgumentException(); } this.api = api; diff --git a/src/asm/scala/tools/asm/AnnotationWriter.java b/src/asm/scala/tools/asm/AnnotationWriter.java index 8eb5b2ef4831..6de74ce04122 100644 --- a/src/asm/scala/tools/asm/AnnotationWriter.java +++ b/src/asm/scala/tools/asm/AnnotationWriter.java @@ -104,7 +104,7 @@ final class AnnotationWriter extends AnnotationVisitor { */ AnnotationWriter(final ClassWriter cw, final boolean named, final ByteVector bv, final ByteVector parent, final int offset) { - super(Opcodes.ASM4); + super(Opcodes.ASM5); this.cw = cw; this.named = named; this.bv = bv; @@ -315,4 +315,57 @@ static void put(final AnnotationWriter[] panns, final int off, } } } + + /** + * Puts the given type reference and type path into the given bytevector. + * LOCAL_VARIABLE and RESOURCE_VARIABLE target types are not supported. + * + * @param typeRef + * a reference to the annotated type. See {@link TypeReference}. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + * @param out + * where the type reference and type path must be put. + */ + static void putTarget(int typeRef, TypePath typePath, ByteVector out) { + switch (typeRef >>> 24) { + case 0x00: // CLASS_TYPE_PARAMETER + case 0x01: // METHOD_TYPE_PARAMETER + case 0x16: // METHOD_FORMAL_PARAMETER + out.putShort(typeRef >>> 16); + break; + case 0x13: // FIELD + case 0x14: // METHOD_RETURN + case 0x15: // METHOD_RECEIVER + out.putByte(typeRef >>> 24); + break; + case 0x47: // CAST + case 0x48: // CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT + case 0x49: // METHOD_INVOCATION_TYPE_ARGUMENT + case 0x4A: // CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT + case 0x4B: // METHOD_REFERENCE_TYPE_ARGUMENT + out.putInt(typeRef); + break; + // case 0x10: // CLASS_EXTENDS + // case 0x11: // CLASS_TYPE_PARAMETER_BOUND + // case 0x12: // METHOD_TYPE_PARAMETER_BOUND + // case 0x17: // THROWS + // case 0x42: // EXCEPTION_PARAMETER + // case 0x43: // INSTANCEOF + // case 0x44: // NEW + // case 0x45: // CONSTRUCTOR_REFERENCE + // case 0x46: // METHOD_REFERENCE + default: + out.put12(typeRef >>> 24, (typeRef & 0xFFFF00) >> 8); + break; + } + if (typePath == null) { + out.putByte(0); + } else { + int length = typePath.b[typePath.offset] * 2 + 1; + out.putByteArray(typePath.b, typePath.offset, length); + } + } } diff --git a/src/asm/scala/tools/asm/ByteVector.java b/src/asm/scala/tools/asm/ByteVector.java index 2bc63eb38442..3bca7af12a44 100644 --- a/src/asm/scala/tools/asm/ByteVector.java +++ b/src/asm/scala/tools/asm/ByteVector.java @@ -204,11 +204,14 @@ public ByteVector putLong(final long l) { * automatically enlarged if necessary. * * @param s - * a String. + * a String whose UTF8 encoded length must be less than 65536. * @return this byte vector. */ public ByteVector putUTF8(final String s) { int charLength = s.length(); + if (charLength > 65535) { + throw new IllegalArgumentException(); + } int len = length; if (len + 2 + charLength > data.length) { enlarge(2 + charLength); @@ -227,38 +230,68 @@ public ByteVector putUTF8(final String s) { if (c >= '\001' && c <= '\177') { data[len++] = (byte) c; } else { - int byteLength = i; - for (int j = i; j < charLength; ++j) { - c = s.charAt(j); - if (c >= '\001' && c <= '\177') { - byteLength++; - } else if (c > '\u07FF') { - byteLength += 3; - } else { - byteLength += 2; - } - } - data[length] = (byte) (byteLength >>> 8); - data[length + 1] = (byte) byteLength; - if (length + 2 + byteLength > data.length) { - length = len; - enlarge(2 + byteLength); - data = this.data; - } - for (int j = i; j < charLength; ++j) { - c = s.charAt(j); - if (c >= '\001' && c <= '\177') { - data[len++] = (byte) c; - } else if (c > '\u07FF') { - data[len++] = (byte) (0xE0 | c >> 12 & 0xF); - data[len++] = (byte) (0x80 | c >> 6 & 0x3F); - data[len++] = (byte) (0x80 | c & 0x3F); - } else { - data[len++] = (byte) (0xC0 | c >> 6 & 0x1F); - data[len++] = (byte) (0x80 | c & 0x3F); - } - } - break; + length = len; + return encodeUTF8(s, i, 65535); + } + } + length = len; + return this; + } + + /** + * Puts an UTF8 string into this byte vector. The byte vector is + * automatically enlarged if necessary. The string length is encoded in two + * bytes before the encoded characters, if there is space for that (i.e. if + * this.length - i - 2 >= 0). + * + * @param s + * the String to encode. + * @param i + * the index of the first character to encode. The previous + * characters are supposed to have already been encoded, using + * only one byte per character. + * @param maxByteLength + * the maximum byte length of the encoded string, including the + * already encoded characters. + * @return this byte vector. + */ + ByteVector encodeUTF8(final String s, int i, int maxByteLength) { + int charLength = s.length(); + int byteLength = i; + char c; + for (int j = i; j < charLength; ++j) { + c = s.charAt(j); + if (c >= '\001' && c <= '\177') { + byteLength++; + } else if (c > '\u07FF') { + byteLength += 3; + } else { + byteLength += 2; + } + } + if (byteLength > maxByteLength) { + throw new IllegalArgumentException(); + } + int start = length - i - 2; + if (start >= 0) { + data[start] = (byte) (byteLength >>> 8); + data[start + 1] = (byte) byteLength; + } + if (length + byteLength - i > data.length) { + enlarge(byteLength - i); + } + int len = length; + for (int j = i; j < charLength; ++j) { + c = s.charAt(j); + if (c >= '\001' && c <= '\177') { + data[len++] = (byte) c; + } else if (c > '\u07FF') { + data[len++] = (byte) (0xE0 | c >> 12 & 0xF); + data[len++] = (byte) (0x80 | c >> 6 & 0x3F); + data[len++] = (byte) (0x80 | c & 0x3F); + } else { + data[len++] = (byte) (0xC0 | c >> 6 & 0x1F); + data[len++] = (byte) (0x80 | c & 0x3F); } } length = len; diff --git a/src/asm/scala/tools/asm/ClassReader.java b/src/asm/scala/tools/asm/ClassReader.java index cc655c1b6274..8b0e12cb049b 100644 --- a/src/asm/scala/tools/asm/ClassReader.java +++ b/src/asm/scala/tools/asm/ClassReader.java @@ -166,7 +166,7 @@ public ClassReader(final byte[] b) { public ClassReader(final byte[] b, final int off, final int len) { this.b = b; // checks the class version - if (readShort(off + 6) > Opcodes.V1_7) { + if (readShort(off + 6) > Opcodes.V1_8) { throw new IllegalArgumentException(); } // parses the constant pool @@ -557,6 +557,8 @@ public void accept(final ClassVisitor classVisitor, String enclosingDesc = null; int anns = 0; int ianns = 0; + int tanns = 0; + int itanns = 0; int innerClasses = 0; Attribute attributes = null; @@ -581,6 +583,9 @@ public void accept(final ClassVisitor classVisitor, } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) { anns = u + 8; + } else if (ANNOTATIONS + && "RuntimeVisibleTypeAnnotations".equals(attrName)) { + tanns = u + 8; } else if ("Deprecated".equals(attrName)) { access |= Opcodes.ACC_DEPRECATED; } else if ("Synthetic".equals(attrName)) { @@ -592,6 +597,9 @@ public void accept(final ClassVisitor classVisitor, } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) { ianns = u + 8; + } else if (ANNOTATIONS + && "RuntimeInvisibleTypeAnnotations".equals(attrName)) { + itanns = u + 8; } else if ("BootstrapMethods".equals(attrName)) { int[] bootstrapMethods = new int[readUnsignedShort(u + 8)]; for (int j = 0, v = u + 10; j < bootstrapMethods.length; j++) { @@ -626,7 +634,7 @@ public void accept(final ClassVisitor classVisitor, enclosingDesc); } - // visits the class annotations + // visits the class annotations and type annotations if (ANNOTATIONS && anns != 0) { for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) { v = readAnnotationValues(v + 2, c, true, @@ -639,6 +647,22 @@ public void accept(final ClassVisitor classVisitor, classVisitor.visitAnnotation(readUTF8(v, c), false)); } } + if (ANNOTATIONS && tanns != 0) { + for (int i = readUnsignedShort(tanns), v = tanns + 2; i > 0; --i) { + v = readAnnotationTarget(context, v); + v = readAnnotationValues(v + 2, c, true, + classVisitor.visitTypeAnnotation(context.typeRef, + context.typePath, readUTF8(v, c), true)); + } + } + if (ANNOTATIONS && itanns != 0) { + for (int i = readUnsignedShort(itanns), v = itanns + 2; i > 0; --i) { + v = readAnnotationTarget(context, v); + v = readAnnotationValues(v + 2, c, true, + classVisitor.visitTypeAnnotation(context.typeRef, + context.typePath, readUTF8(v, c), false)); + } + } // visits the attributes while (attributes != null) { @@ -697,6 +721,8 @@ private int readField(final ClassVisitor classVisitor, String signature = null; int anns = 0; int ianns = 0; + int tanns = 0; + int itanns = 0; Object value = null; Attribute attributes = null; @@ -717,9 +743,15 @@ private int readField(final ClassVisitor classVisitor, } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) { anns = u + 8; + } else if (ANNOTATIONS + && "RuntimeVisibleTypeAnnotations".equals(attrName)) { + tanns = u + 8; } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) { ianns = u + 8; + } else if (ANNOTATIONS + && "RuntimeInvisibleTypeAnnotations".equals(attrName)) { + itanns = u + 8; } else { Attribute attr = readAttribute(context.attrs, attrName, u + 8, readInt(u + 4), c, -1, null); @@ -739,7 +771,7 @@ private int readField(final ClassVisitor classVisitor, return u; } - // visits the field annotations + // visits the field annotations and type annotations if (ANNOTATIONS && anns != 0) { for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) { v = readAnnotationValues(v + 2, c, true, @@ -752,6 +784,22 @@ private int readField(final ClassVisitor classVisitor, fv.visitAnnotation(readUTF8(v, c), false)); } } + if (ANNOTATIONS && tanns != 0) { + for (int i = readUnsignedShort(tanns), v = tanns + 2; i > 0; --i) { + v = readAnnotationTarget(context, v); + v = readAnnotationValues(v + 2, c, true, + fv.visitTypeAnnotation(context.typeRef, + context.typePath, readUTF8(v, c), true)); + } + } + if (ANNOTATIONS && itanns != 0) { + for (int i = readUnsignedShort(itanns), v = itanns + 2; i > 0; --i) { + v = readAnnotationTarget(context, v); + v = readAnnotationValues(v + 2, c, true, + fv.visitTypeAnnotation(context.typeRef, + context.typePath, readUTF8(v, c), false)); + } + } // visits the field attributes while (attributes != null) { @@ -782,9 +830,9 @@ private int readMethod(final ClassVisitor classVisitor, final Context context, int u) { // reads the method declaration char[] c = context.buffer; - int access = readUnsignedShort(u); - String name = readUTF8(u + 2, c); - String desc = readUTF8(u + 4, c); + context.access = readUnsignedShort(u); + context.name = readUTF8(u + 2, c); + context.desc = readUTF8(u + 4, c); u += 6; // reads the method attributes @@ -792,8 +840,11 @@ private int readMethod(final ClassVisitor classVisitor, int exception = 0; String[] exceptions = null; String signature = null; + int methodParameters = 0; int anns = 0; int ianns = 0; + int tanns = 0; + int itanns = 0; int dann = 0; int mpanns = 0; int impanns = 0; @@ -818,24 +869,32 @@ private int readMethod(final ClassVisitor classVisitor, } else if (SIGNATURES && "Signature".equals(attrName)) { signature = readUTF8(u + 8, c); } else if ("Deprecated".equals(attrName)) { - access |= Opcodes.ACC_DEPRECATED; + context.access |= Opcodes.ACC_DEPRECATED; } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) { anns = u + 8; + } else if (ANNOTATIONS + && "RuntimeVisibleTypeAnnotations".equals(attrName)) { + tanns = u + 8; } else if (ANNOTATIONS && "AnnotationDefault".equals(attrName)) { dann = u + 8; } else if ("Synthetic".equals(attrName)) { - access |= Opcodes.ACC_SYNTHETIC + context.access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE; } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) { ianns = u + 8; + } else if (ANNOTATIONS + && "RuntimeInvisibleTypeAnnotations".equals(attrName)) { + itanns = u + 8; } else if (ANNOTATIONS && "RuntimeVisibleParameterAnnotations".equals(attrName)) { mpanns = u + 8; } else if (ANNOTATIONS && "RuntimeInvisibleParameterAnnotations".equals(attrName)) { impanns = u + 8; + } else if ("MethodParameters".equals(attrName)) { + methodParameters = u + 8; } else { Attribute attr = readAttribute(context.attrs, attrName, u + 8, readInt(u + 4), c, -1, null); @@ -849,8 +908,8 @@ private int readMethod(final ClassVisitor classVisitor, u += 2; // visits the method declaration - MethodVisitor mv = classVisitor.visitMethod(access, name, desc, - signature, exceptions); + MethodVisitor mv = classVisitor.visitMethod(context.access, + context.name, context.desc, signature, exceptions); if (mv == null) { return u; } @@ -894,6 +953,13 @@ private int readMethod(final ClassVisitor classVisitor, } } + // visit the method parameters + if (methodParameters != 0) { + for (int i = b[methodParameters] & 0xFF, v = methodParameters + 1; i > 0; --i, v = v + 4) { + mv.visitParameter(readUTF8(v, c), readUnsignedShort(v + 2)); + } + } + // visits the method annotations if (ANNOTATIONS && dann != 0) { AnnotationVisitor dv = mv.visitAnnotationDefault(); @@ -914,11 +980,27 @@ private int readMethod(final ClassVisitor classVisitor, mv.visitAnnotation(readUTF8(v, c), false)); } } + if (ANNOTATIONS && tanns != 0) { + for (int i = readUnsignedShort(tanns), v = tanns + 2; i > 0; --i) { + v = readAnnotationTarget(context, v); + v = readAnnotationValues(v + 2, c, true, + mv.visitTypeAnnotation(context.typeRef, + context.typePath, readUTF8(v, c), true)); + } + } + if (ANNOTATIONS && itanns != 0) { + for (int i = readUnsignedShort(itanns), v = itanns + 2; i > 0; --i) { + v = readAnnotationTarget(context, v); + v = readAnnotationValues(v + 2, c, true, + mv.visitTypeAnnotation(context.typeRef, + context.typePath, readUTF8(v, c), false)); + } + } if (ANNOTATIONS && mpanns != 0) { - readParameterAnnotations(mpanns, desc, c, true, mv); + readParameterAnnotations(mv, context, mpanns, true); } if (ANNOTATIONS && impanns != 0) { - readParameterAnnotations(impanns, desc, c, false, mv); + readParameterAnnotations(mv, context, impanns, false); } // visits the method attributes @@ -931,9 +1013,6 @@ private int readMethod(final ClassVisitor classVisitor, // visits the method code if (code != 0) { - context.access = access; - context.name = name; - context.desc = desc; mv.visitCode(); readCode(mv, context, code); } @@ -966,7 +1045,7 @@ private void readCode(final MethodVisitor mv, final Context context, int u) { // reads the bytecode to find the labels int codeStart = u; int codeEnd = u + codeLength; - Label[] labels = new Label[codeLength + 2]; + Label[] labels = context.labels = new Label[codeLength + 2]; readLabel(codeLength + 1, labels); while (u < codeEnd) { int offset = u - codeStart; @@ -1049,6 +1128,12 @@ private void readCode(final MethodVisitor mv, final Context context, int u) { u += 2; // reads the code attributes + int[] tanns = null; // start index of each visible type annotation + int[] itanns = null; // start index of each invisible type annotation + int tann = 0; // current index in tanns array + int itann = 0; // current index in itanns array + int ntoff = -1; // next visible type annotation code offset + int nitoff = -1; // next invisible type annotation code offset int varTable = 0; int varTypeTable = 0; boolean zip = true; @@ -1089,6 +1174,16 @@ private void readCode(final MethodVisitor mv, final Context context, int u) { v += 4; } } + } else if (ANNOTATIONS + && "RuntimeVisibleTypeAnnotations".equals(attrName)) { + tanns = readTypeAnnotations(mv, context, u + 8, true); + ntoff = tanns.length == 0 || readByte(tanns[0]) < 0x43 ? -1 + : readUnsignedShort(tanns[0] + 1); + } else if (ANNOTATIONS + && "RuntimeInvisibleTypeAnnotations".equals(attrName)) { + itanns = readTypeAnnotations(mv, context, u + 8, false); + nitoff = itanns.length == 0 || readByte(itanns[0]) < 0x43 ? -1 + : readUnsignedShort(itanns[0] + 1); } else if (FRAMES && "StackMapTable".equals(attrName)) { if ((context.flags & SKIP_FRAMES) == 0) { stackMap = u + 10; @@ -1211,7 +1306,7 @@ private void readCode(final MethodVisitor mv, final Context context, int u) { } } if (frameCount > 0) { - stackMap = readFrame(stackMap, zip, unzip, labels, frame); + stackMap = readFrame(stackMap, zip, unzip, frame); --frameCount; } else { frame = null; @@ -1310,6 +1405,7 @@ private void readCode(final MethodVisitor mv, final Context context, int u) { case ClassWriter.FIELDORMETH_INSN: case ClassWriter.ITFMETH_INSN: { int cpIndex = items[readUnsignedShort(u + 1)]; + boolean itf = b[cpIndex - 1] == ClassWriter.IMETH; String iowner = readClass(cpIndex, c); cpIndex = items[readUnsignedShort(cpIndex + 2)]; String iname = readUTF8(cpIndex, c); @@ -1317,7 +1413,7 @@ private void readCode(final MethodVisitor mv, final Context context, int u) { if (opcode < Opcodes.INVOKEVIRTUAL) { mv.visitFieldInsn(opcode, iowner, iname, idesc); } else { - mv.visitMethodInsn(opcode, iowner, iname, idesc); + mv.visitMethodInsn(opcode, iowner, iname, idesc, itf); } if (opcode == Opcodes.INVOKEINTERFACE) { u += 5; @@ -1358,6 +1454,29 @@ private void readCode(final MethodVisitor mv, final Context context, int u) { u += 4; break; } + + // visit the instruction annotations, if any + while (tanns != null && tann < tanns.length && ntoff <= offset) { + if (ntoff == offset) { + int v = readAnnotationTarget(context, tanns[tann]); + readAnnotationValues(v + 2, c, true, + mv.visitInsnAnnotation(context.typeRef, + context.typePath, readUTF8(v, c), true)); + } + ntoff = ++tann >= tanns.length || readByte(tanns[tann]) < 0x43 ? -1 + : readUnsignedShort(tanns[tann] + 1); + } + while (itanns != null && itann < itanns.length && nitoff <= offset) { + if (nitoff == offset) { + int v = readAnnotationTarget(context, itanns[itann]); + readAnnotationValues(v + 2, c, true, + mv.visitInsnAnnotation(context.typeRef, + context.typePath, readUTF8(v, c), false)); + } + nitoff = ++itann >= itanns.length + || readByte(itanns[itann]) < 0x43 ? -1 + : readUnsignedShort(itanns[itann] + 1); + } } if (labels[codeLength] != null) { mv.visitLabel(labels[codeLength]); @@ -1397,6 +1516,32 @@ private void readCode(final MethodVisitor mv, final Context context, int u) { } } + // visits the local variables type annotations + if (tanns != null) { + for (int i = 0; i < tanns.length; ++i) { + if ((readByte(tanns[i]) >> 1) == (0x40 >> 1)) { + int v = readAnnotationTarget(context, tanns[i]); + v = readAnnotationValues(v + 2, c, true, + mv.visitLocalVariableAnnotation(context.typeRef, + context.typePath, context.start, + context.end, context.index, readUTF8(v, c), + true)); + } + } + } + if (itanns != null) { + for (int i = 0; i < itanns.length; ++i) { + if ((readByte(itanns[i]) >> 1) == (0x40 >> 1)) { + int v = readAnnotationTarget(context, itanns[i]); + v = readAnnotationValues(v + 2, c, true, + mv.visitLocalVariableAnnotation(context.typeRef, + context.typePath, context.start, + context.end, context.index, readUTF8(v, c), + false)); + } + } + } + // visits the code attributes while (attributes != null) { Attribute attr = attributes.next; @@ -1409,25 +1554,176 @@ private void readCode(final MethodVisitor mv, final Context context, int u) { mv.visitMaxs(maxStack, maxLocals); } + /** + * Parses a type annotation table to find the labels, and to visit the try + * catch block annotations. + * + * @param u + * the start offset of a type annotation table. + * @param mv + * the method visitor to be used to visit the try catch block + * annotations. + * @param context + * information about the class being parsed. + * @param visible + * if the type annotation table to parse contains runtime visible + * annotations. + * @return the start offset of each type annotation in the parsed table. + */ + private int[] readTypeAnnotations(final MethodVisitor mv, + final Context context, int u, boolean visible) { + char[] c = context.buffer; + int[] offsets = new int[readUnsignedShort(u)]; + u += 2; + for (int i = 0; i < offsets.length; ++i) { + offsets[i] = u; + int target = readInt(u); + switch (target >>> 24) { + case 0x00: // CLASS_TYPE_PARAMETER + case 0x01: // METHOD_TYPE_PARAMETER + case 0x16: // METHOD_FORMAL_PARAMETER + u += 2; + break; + case 0x13: // FIELD + case 0x14: // METHOD_RETURN + case 0x15: // METHOD_RECEIVER + u += 1; + break; + case 0x40: // LOCAL_VARIABLE + case 0x41: // RESOURCE_VARIABLE + for (int j = readUnsignedShort(u + 1); j > 0; --j) { + int start = readUnsignedShort(u + 3); + int length = readUnsignedShort(u + 5); + readLabel(start, context.labels); + readLabel(start + length, context.labels); + u += 6; + } + u += 3; + break; + case 0x47: // CAST + case 0x48: // CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT + case 0x49: // METHOD_INVOCATION_TYPE_ARGUMENT + case 0x4A: // CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT + case 0x4B: // METHOD_REFERENCE_TYPE_ARGUMENT + u += 4; + break; + // case 0x10: // CLASS_EXTENDS + // case 0x11: // CLASS_TYPE_PARAMETER_BOUND + // case 0x12: // METHOD_TYPE_PARAMETER_BOUND + // case 0x17: // THROWS + // case 0x42: // EXCEPTION_PARAMETER + // case 0x43: // INSTANCEOF + // case 0x44: // NEW + // case 0x45: // CONSTRUCTOR_REFERENCE + // case 0x46: // METHOD_REFERENCE + default: + u += 3; + break; + } + int pathLength = readByte(u); + if ((target >>> 24) == 0x42) { + TypePath path = pathLength == 0 ? null : new TypePath(b, u); + u += 1 + 2 * pathLength; + u = readAnnotationValues(u + 2, c, true, + mv.visitTryCatchAnnotation(target, path, + readUTF8(u, c), visible)); + } else { + u = readAnnotationValues(u + 3 + 2 * pathLength, c, true, null); + } + } + return offsets; + } + + /** + * Parses the header of a type annotation to extract its target_type and + * target_path (the result is stored in the given context), and returns the + * start offset of the rest of the type_annotation structure (i.e. the + * offset to the type_index field, which is followed by + * num_element_value_pairs and then the name,value pairs). + * + * @param context + * information about the class being parsed. This is where the + * extracted target_type and target_path must be stored. + * @param u + * the start offset of a type_annotation structure. + * @return the start offset of the rest of the type_annotation structure. + */ + private int readAnnotationTarget(final Context context, int u) { + int target = readInt(u); + switch (target >>> 24) { + case 0x00: // CLASS_TYPE_PARAMETER + case 0x01: // METHOD_TYPE_PARAMETER + case 0x16: // METHOD_FORMAL_PARAMETER + target &= 0xFFFF0000; + u += 2; + break; + case 0x13: // FIELD + case 0x14: // METHOD_RETURN + case 0x15: // METHOD_RECEIVER + target &= 0xFF000000; + u += 1; + break; + case 0x40: // LOCAL_VARIABLE + case 0x41: { // RESOURCE_VARIABLE + target &= 0xFF000000; + int n = readUnsignedShort(u + 1); + context.start = new Label[n]; + context.end = new Label[n]; + context.index = new int[n]; + u += 3; + for (int i = 0; i < n; ++i) { + int start = readUnsignedShort(u); + int length = readUnsignedShort(u + 2); + context.start[i] = readLabel(start, context.labels); + context.end[i] = readLabel(start + length, context.labels); + context.index[i] = readUnsignedShort(u + 4); + u += 6; + } + break; + } + case 0x47: // CAST + case 0x48: // CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT + case 0x49: // METHOD_INVOCATION_TYPE_ARGUMENT + case 0x4A: // CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT + case 0x4B: // METHOD_REFERENCE_TYPE_ARGUMENT + target &= 0xFF0000FF; + u += 4; + break; + // case 0x10: // CLASS_EXTENDS + // case 0x11: // CLASS_TYPE_PARAMETER_BOUND + // case 0x12: // METHOD_TYPE_PARAMETER_BOUND + // case 0x17: // THROWS + // case 0x42: // EXCEPTION_PARAMETER + // case 0x43: // INSTANCEOF + // case 0x44: // NEW + // case 0x45: // CONSTRUCTOR_REFERENCE + // case 0x46: // METHOD_REFERENCE + default: + target &= (target >>> 24) < 0x43 ? 0xFFFFFF00 : 0xFF000000; + u += 3; + break; + } + int pathLength = readByte(u); + context.typeRef = target; + context.typePath = pathLength == 0 ? null : new TypePath(b, u); + return u + 1 + 2 * pathLength; + } + /** * Reads parameter annotations and makes the given visitor visit them. * + * @param mv + * the visitor that must visit the annotations. + * @param context + * information about the class being parsed. * @param v * start offset in {@link #b b} of the annotations to be read. - * @param desc - * the method descriptor. - * @param buf - * buffer to be used to call {@link #readUTF8 readUTF8}, - * {@link #readClass(int,char[]) readClass} or {@link #readConst - * readConst}. * @param visible * true if the annotations to be read are visible at * runtime. - * @param mv - * the visitor that must visit the annotations. */ - private void readParameterAnnotations(int v, final String desc, - final char[] buf, final boolean visible, final MethodVisitor mv) { + private void readParameterAnnotations(final MethodVisitor mv, + final Context context, int v, final boolean visible) { int i; int n = b[v++] & 0xFF; // workaround for a bug in javac (javac compiler generates a parameter @@ -1436,7 +1732,7 @@ private void readParameterAnnotations(int v, final String desc, // equal to the number of parameters in the method descriptor - which // includes the synthetic parameters added by the compiler). This work- // around supposes that the synthetic parameters are the first ones. - int synthetics = Type.getArgumentTypes(desc).length - n; + int synthetics = Type.getArgumentTypes(context.desc).length - n; AnnotationVisitor av; for (i = 0; i < synthetics; ++i) { // virtual annotation to detect synthetic parameters in MethodWriter @@ -1445,12 +1741,13 @@ private void readParameterAnnotations(int v, final String desc, av.visitEnd(); } } + char[] c = context.buffer; for (; i < n + synthetics; ++i) { int j = readUnsignedShort(v); v += 2; for (; j > 0; --j) { - av = mv.visitParameterAnnotation(i, readUTF8(v, buf), visible); - v = readAnnotationValues(v + 2, buf, true, av); + av = mv.visitParameterAnnotation(i, readUTF8(v, c), visible); + v = readAnnotationValues(v + 2, c, true, av); } } } @@ -1729,17 +2026,14 @@ private void getImplicitFrame(final Context frame) { * if the stack map frame at stackMap is compressed or not. * @param unzip * if the stack map frame must be uncompressed. - * @param labels - * the labels of the method currently being parsed, indexed by - * their offset. A new label for the parsed stack map frame is - * stored in this array if it does not already exist. * @param frame * where the parsed stack map frame must be stored. * @return the offset of the first byte following the parsed frame. */ private int readFrame(int stackMap, boolean zip, boolean unzip, - Label[] labels, Context frame) { + Context frame) { char[] c = frame.buffer; + Label[] labels = frame.labels; int tag; int delta; if (zip) { diff --git a/src/asm/scala/tools/asm/ClassVisitor.java b/src/asm/scala/tools/asm/ClassVisitor.java index 3fc364d5e5b6..48dc2ca6ae4d 100644 --- a/src/asm/scala/tools/asm/ClassVisitor.java +++ b/src/asm/scala/tools/asm/ClassVisitor.java @@ -33,8 +33,9 @@ * A visitor to visit a Java class. The methods of this class must be called in * the following order: visit [ visitSource ] [ * visitOuterClass ] ( visitAnnotation | - * visitAttribute )* ( visitInnerClass | visitField | - * visitMethod )* visitEnd. + * visitTypeAnnotation | visitAttribute )* ( + * visitInnerClass | visitField | visitMethod )* + * visitEnd. * * @author Eric Bruneton */ @@ -42,7 +43,7 @@ public abstract class ClassVisitor { /** * The ASM API version implemented by this visitor. The value of this field - * must be one of {@link Opcodes#ASM4}. + * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ protected final int api; @@ -57,7 +58,7 @@ public abstract class ClassVisitor { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ public ClassVisitor(final int api) { this(api, null); @@ -68,13 +69,13 @@ public ClassVisitor(final int api) { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param cv * the class visitor to which this visitor must delegate method * calls. May be null. */ public ClassVisitor(final int api, final ClassVisitor cv) { - if (api != Opcodes.ASM4) { + if (api != Opcodes.ASM4 && api != Opcodes.ASM5) { throw new IllegalArgumentException(); } this.api = api; @@ -168,6 +169,39 @@ public AnnotationVisitor visitAnnotation(String desc, boolean visible) { return null; } + /** + * Visits an annotation on a type in the class signature. + * + * @param typeRef + * a reference to the annotated type. The sort of this type + * reference must be {@link TypeReference#CLASS_TYPE_PARAMETER + * CLASS_TYPE_PARAMETER}, + * {@link TypeReference#CLASS_TYPE_PARAMETER_BOUND + * CLASS_TYPE_PARAMETER_BOUND} or + * {@link TypeReference#CLASS_EXTENDS CLASS_EXTENDS}. See + * {@link TypeReference}. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + * @param desc + * the class descriptor of the annotation class. + * @param visible + * true if the annotation is visible at runtime. + * @return a visitor to visit the annotation values, or null if + * this visitor is not interested in visiting this annotation. + */ + public AnnotationVisitor visitTypeAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + if (api < Opcodes.ASM5) { + throw new RuntimeException(); + } + if (cv != null) { + return cv.visitTypeAnnotation(typeRef, typePath, desc, visible); + } + return null; + } + /** * Visits a non standard attribute of the class. * diff --git a/src/asm/scala/tools/asm/ClassWriter.java b/src/asm/scala/tools/asm/ClassWriter.java index 93ed7313c7c5..5c2de3f98228 100644 --- a/src/asm/scala/tools/asm/ClassWriter.java +++ b/src/asm/scala/tools/asm/ClassWriter.java @@ -416,6 +416,16 @@ public class ClassWriter extends ClassVisitor { */ private AnnotationWriter ianns; + /** + * The runtime visible type annotations of this class. + */ + private AnnotationWriter tanns; + + /** + * The runtime invisible type annotations of this class. + */ + private AnnotationWriter itanns; + /** * The non standard attributes of this class. */ @@ -477,12 +487,12 @@ public class ClassWriter extends ClassVisitor { * true if the maximum stack size and number of local variables * must be automatically computed. */ - private final boolean computeMaxs; + private boolean computeMaxs; /** * true if the stack map frames must be recomputed from scratch. */ - private final boolean computeFrames; + private boolean computeFrames; /** * true if the stack map tables of this class are invalid. The @@ -595,7 +605,7 @@ public class ClassWriter extends ClassVisitor { * {@link #COMPUTE_FRAMES}. */ public ClassWriter(final int flags) { - super(Opcodes.ASM4); + super(Opcodes.ASM5); index = 1; pool = new ByteVector(); items = new Item[256]; @@ -677,7 +687,8 @@ public final void visitSource(final String file, final String debug) { sourceFile = newUTF8(file); } if (debug != null) { - sourceDebug = new ByteVector().putUTF8(debug); + sourceDebug = new ByteVector().encodeUTF8(debug, 0, + Integer.MAX_VALUE); } } @@ -710,6 +721,29 @@ public final AnnotationVisitor visitAnnotation(final String desc, return aw; } + @Override + public final AnnotationVisitor visitTypeAnnotation(int typeRef, + TypePath typePath, final String desc, final boolean visible) { + if (!ClassReader.ANNOTATIONS) { + return null; + } + ByteVector bv = new ByteVector(); + // write target_type and target_info + AnnotationWriter.putTarget(typeRef, typePath, bv); + // write type, and reserve space for values count + bv.putShort(newUTF8(desc)).putShort(0); + AnnotationWriter aw = new AnnotationWriter(this, true, bv, bv, + bv.length - 2); + if (visible) { + aw.next = tanns; + tanns = aw; + } else { + aw.next = itanns; + itanns = aw; + } + return aw; + } + @Override public final void visitAttribute(final Attribute attr) { attr.next = attrs; @@ -722,11 +756,29 @@ public final void visitInnerClass(final String name, if (innerClasses == null) { innerClasses = new ByteVector(); } - ++innerClassesCount; - innerClasses.putShort(name == null ? 0 : newClass(name)); - innerClasses.putShort(outerName == null ? 0 : newClass(outerName)); - innerClasses.putShort(innerName == null ? 0 : newUTF8(innerName)); - innerClasses.putShort(access); + // Sec. 4.7.6 of the JVMS states "Every CONSTANT_Class_info entry in the + // constant_pool table which represents a class or interface C that is + // not a package member must have exactly one corresponding entry in the + // classes array". To avoid duplicates we keep track in the intVal field + // of the Item of each CONSTANT_Class_info entry C whether an inner + // class entry has already been added for C (this field is unused for + // class entries, and changing its value does not change the hashcode + // and equality tests). If so we store the index of this inner class + // entry (plus one) in intVal. This hack allows duplicate detection in + // O(1) time. + Item nameItem = newClassItem(name); + if (nameItem.intVal == 0) { + ++innerClassesCount; + innerClasses.putShort(nameItem.index); + innerClasses.putShort(outerName == null ? 0 : newClass(outerName)); + innerClasses.putShort(innerName == null ? 0 : newUTF8(innerName)); + innerClasses.putShort(access); + nameItem.intVal = innerClassesCount; + } else { + // Compare the inner classes entry nameItem.intVal - 1 with the + // arguments of this method and throw an exception if there is a + // difference? + } } @Override @@ -795,7 +847,7 @@ public byte[] toByteArray() { } if (sourceDebug != null) { ++attributeCount; - size += sourceDebug.length + 4; + size += sourceDebug.length + 6; newUTF8("SourceDebugExtension"); } if (enclosingMethodOwner != 0) { @@ -831,6 +883,16 @@ public byte[] toByteArray() { size += 8 + ianns.getSize(); newUTF8("RuntimeInvisibleAnnotations"); } + if (ClassReader.ANNOTATIONS && tanns != null) { + ++attributeCount; + size += 8 + tanns.getSize(); + newUTF8("RuntimeVisibleTypeAnnotations"); + } + if (ClassReader.ANNOTATIONS && itanns != null) { + ++attributeCount; + size += 8 + itanns.getSize(); + newUTF8("RuntimeInvisibleTypeAnnotations"); + } if (attrs != null) { attributeCount += attrs.getCount(); size += attrs.getSize(this, null, 0, -1, -1); @@ -874,9 +936,9 @@ public byte[] toByteArray() { out.putShort(newUTF8("SourceFile")).putInt(2).putShort(sourceFile); } if (sourceDebug != null) { - int len = sourceDebug.length - 2; + int len = sourceDebug.length; out.putShort(newUTF8("SourceDebugExtension")).putInt(len); - out.putByteArray(sourceDebug.data, 2, len); + out.putByteArray(sourceDebug.data, 0, len); } if (enclosingMethodOwner != 0) { out.putShort(newUTF8("EnclosingMethod")).putInt(4); @@ -904,13 +966,34 @@ public byte[] toByteArray() { out.putShort(newUTF8("RuntimeInvisibleAnnotations")); ianns.put(out); } + if (ClassReader.ANNOTATIONS && tanns != null) { + out.putShort(newUTF8("RuntimeVisibleTypeAnnotations")); + tanns.put(out); + } + if (ClassReader.ANNOTATIONS && itanns != null) { + out.putShort(newUTF8("RuntimeInvisibleTypeAnnotations")); + itanns.put(out); + } if (attrs != null) { attrs.put(this, null, 0, -1, -1, out); } if (invalidFrames) { - ClassWriter cw = new ClassWriter(COMPUTE_FRAMES); - new ClassReader(out.data).accept(cw, ClassReader.SKIP_FRAMES); - return cw.toByteArray(); + anns = null; + ianns = null; + attrs = null; + innerClassesCount = 0; + innerClasses = null; + bootstrapMethodsCount = 0; + bootstrapMethods = null; + firstField = null; + lastField = null; + firstMethod = null; + lastMethod = null; + computeMaxs = false; + computeFrames = true; + invalidFrames = false; + new ClassReader(out.data).accept(this, ClassReader.SKIP_FRAMES); + return toByteArray(); } return out.data; } @@ -1577,7 +1660,7 @@ int getMergedType(final int type1, final int type2) { /** * Returns the common super type of the two given types. The default - * implementation of this method loads the two given classes and uses + * implementation of this method loads the two given classes and uses * the java.lang.Class methods to find the common super class. It can be * overridden to compute this common super type in other ways, in particular * without actually loading any class, or to take into account the class @@ -1663,6 +1746,15 @@ private void put(final Item i) { items[index] = i; } + /** + * Find item that whose index is `index`. + */ + public Item findItemByIndex(int index) { + int i = 0; + while (i < items.length && (items[i] == null || items[i].index != index)) i++; + return items[i]; + } + /** * Puts one byte and two shorts into the constant pool. * diff --git a/src/asm/scala/tools/asm/Context.java b/src/asm/scala/tools/asm/Context.java index 7b3a2ad9dd38..24546969e38e 100644 --- a/src/asm/scala/tools/asm/Context.java +++ b/src/asm/scala/tools/asm/Context.java @@ -72,11 +72,46 @@ class Context { */ String desc; + /** + * The label objects, indexed by bytecode offset, of the method currently + * being parsed (only bytecode offsets for which a label is needed have a + * non null associated Label object). + */ + Label[] labels; + + /** + * The target of the type annotation currently being parsed. + */ + int typeRef; + + /** + * The path of the type annotation currently being parsed. + */ + TypePath typePath; + /** * The offset of the latest stack map frame that has been parsed. */ int offset; + /** + * The labels corresponding to the start of the local variable ranges in the + * local variable type annotation currently being parsed. + */ + Label[] start; + + /** + * The labels corresponding to the end of the local variable ranges in the + * local variable type annotation currently being parsed. + */ + Label[] end; + + /** + * The local variable indices for each local variable range in the local + * variable type annotation currently being parsed. + */ + int[] index; + /** * The encoding of the latest stack map frame that has been parsed. */ diff --git a/src/asm/scala/tools/asm/CustomAttr.java b/src/asm/scala/tools/asm/CustomAttr.java index 22b5d287b7c9..5ecfd283d058 100644 --- a/src/asm/scala/tools/asm/CustomAttr.java +++ b/src/asm/scala/tools/asm/CustomAttr.java @@ -1,5 +1,5 @@ /* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL + * Copyright 2005-2012 LAMP/EPFL */ package scala.tools.asm; diff --git a/src/asm/scala/tools/asm/FieldVisitor.java b/src/asm/scala/tools/asm/FieldVisitor.java index 9171f331e5ad..708c1d322e34 100644 --- a/src/asm/scala/tools/asm/FieldVisitor.java +++ b/src/asm/scala/tools/asm/FieldVisitor.java @@ -31,8 +31,8 @@ /** * A visitor to visit a Java field. The methods of this class must be called in - * the following order: ( visitAnnotation | visitAttribute )* - * visitEnd. + * the following order: ( visitAnnotation | + * visitTypeAnnotation | visitAttribute )* visitEnd. * * @author Eric Bruneton */ @@ -40,7 +40,7 @@ public abstract class FieldVisitor { /** * The ASM API version implemented by this visitor. The value of this field - * must be one of {@link Opcodes#ASM4}. + * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ protected final int api; @@ -55,7 +55,7 @@ public abstract class FieldVisitor { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ public FieldVisitor(final int api) { this(api, null); @@ -66,13 +66,13 @@ public FieldVisitor(final int api) { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param fv * the field visitor to which this visitor must delegate method * calls. May be null. */ public FieldVisitor(final int api, final FieldVisitor fv) { - if (api != Opcodes.ASM4) { + if (api != Opcodes.ASM4 && api != Opcodes.ASM5) { throw new IllegalArgumentException(); } this.api = api; @@ -96,6 +96,35 @@ public AnnotationVisitor visitAnnotation(String desc, boolean visible) { return null; } + /** + * Visits an annotation on the type of the field. + * + * @param typeRef + * a reference to the annotated type. The sort of this type + * reference must be {@link TypeReference#FIELD FIELD}. See + * {@link TypeReference}. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + * @param desc + * the class descriptor of the annotation class. + * @param visible + * true if the annotation is visible at runtime. + * @return a visitor to visit the annotation values, or null if + * this visitor is not interested in visiting this annotation. + */ + public AnnotationVisitor visitTypeAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + if (api < Opcodes.ASM5) { + throw new RuntimeException(); + } + if (fv != null) { + return fv.visitTypeAnnotation(typeRef, typePath, desc, visible); + } + return null; + } + /** * Visits a non standard attribute of the field. * diff --git a/src/asm/scala/tools/asm/FieldWriter.java b/src/asm/scala/tools/asm/FieldWriter.java index 02c6059b9149..e640a8d4060b 100644 --- a/src/asm/scala/tools/asm/FieldWriter.java +++ b/src/asm/scala/tools/asm/FieldWriter.java @@ -80,6 +80,17 @@ final class FieldWriter extends FieldVisitor { */ private AnnotationWriter ianns; + /** + * The runtime visible type annotations of this field. May be null. + */ + private AnnotationWriter tanns; + + /** + * The runtime invisible type annotations of this field. May be + * null. + */ + private AnnotationWriter itanns; + /** * The non standard attributes of this field. May be null. */ @@ -107,7 +118,7 @@ final class FieldWriter extends FieldVisitor { */ FieldWriter(final ClassWriter cw, final int access, final String name, final String desc, final String signature, final Object value) { - super(Opcodes.ASM4); + super(Opcodes.ASM5); if (cw.firstField == null) { cw.firstField = this; } else { @@ -150,6 +161,29 @@ public AnnotationVisitor visitAnnotation(final String desc, return aw; } + @Override + public AnnotationVisitor visitTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + if (!ClassReader.ANNOTATIONS) { + return null; + } + ByteVector bv = new ByteVector(); + // write target_type and target_info + AnnotationWriter.putTarget(typeRef, typePath, bv); + // write type, and reserve space for values count + bv.putShort(cw.newUTF8(desc)).putShort(0); + AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, + bv.length - 2); + if (visible) { + aw.next = tanns; + tanns = aw; + } else { + aw.next = itanns; + itanns = aw; + } + return aw; + } + @Override public void visitAttribute(final Attribute attr) { attr.next = attrs; @@ -198,6 +232,14 @@ int getSize() { cw.newUTF8("RuntimeInvisibleAnnotations"); size += 8 + ianns.getSize(); } + if (ClassReader.ANNOTATIONS && tanns != null) { + cw.newUTF8("RuntimeVisibleTypeAnnotations"); + size += 8 + tanns.getSize(); + } + if (ClassReader.ANNOTATIONS && itanns != null) { + cw.newUTF8("RuntimeInvisibleTypeAnnotations"); + size += 8 + itanns.getSize(); + } if (attrs != null) { size += attrs.getSize(cw, null, 0, -1, -1); } @@ -237,6 +279,12 @@ void put(final ByteVector out) { if (ClassReader.ANNOTATIONS && ianns != null) { ++attributeCount; } + if (ClassReader.ANNOTATIONS && tanns != null) { + ++attributeCount; + } + if (ClassReader.ANNOTATIONS && itanns != null) { + ++attributeCount; + } if (attrs != null) { attributeCount += attrs.getCount(); } @@ -266,6 +314,14 @@ void put(final ByteVector out) { out.putShort(cw.newUTF8("RuntimeInvisibleAnnotations")); ianns.put(out); } + if (ClassReader.ANNOTATIONS && tanns != null) { + out.putShort(cw.newUTF8("RuntimeVisibleTypeAnnotations")); + tanns.put(out); + } + if (ClassReader.ANNOTATIONS && itanns != null) { + out.putShort(cw.newUTF8("RuntimeInvisibleTypeAnnotations")); + itanns.put(out); + } if (attrs != null) { attrs.put(cw, null, 0, -1, -1, out); } diff --git a/src/asm/scala/tools/asm/Frame.java b/src/asm/scala/tools/asm/Frame.java index bcc3e8450b66..85ad3269ab21 100644 --- a/src/asm/scala/tools/asm/Frame.java +++ b/src/asm/scala/tools/asm/Frame.java @@ -70,8 +70,8 @@ final class Frame { * stack types. VALUE depends on KIND. For LOCAL types, it is an index in * the input local variable types. For STACK types, it is a position * relatively to the top of input frame stack. For BASE types, it is either - * one of the constants defined in FrameVisitor, or for OBJECT and - * UNINITIALIZED types, a tag and an index in the type table. + * one of the constants defined below, or for OBJECT and UNINITIALIZED + * types, a tag and an index in the type table. * * Output frames can contain types of any kind and with a positive or * negative dimension (and even unassigned types, represented by 0 - which @@ -1417,6 +1417,7 @@ private static boolean merge(final ClassWriter cw, int t, // if t is the NULL type, merge(u,t)=u, so there is no change return false; } else if ((t & (DIM | BASE_KIND)) == (u & (DIM | BASE_KIND))) { + // if t and u have the same dimension and same base kind if ((u & BASE_KIND) == OBJECT) { // if t is also a reference type, and if u and t have the // same dimension merge(u,t) = dim(t) | common parent of the @@ -1425,13 +1426,21 @@ private static boolean merge(final ClassWriter cw, int t, | cw.getMergedType(t & BASE_VALUE, u & BASE_VALUE); } else { // if u and t are array types, but not with the same element - // type, merge(u,t)=java/lang/Object - v = OBJECT | cw.addType("java/lang/Object"); + // type, merge(u,t) = dim(u) - 1 | java/lang/Object + int vdim = ELEMENT_OF + (u & DIM); + v = vdim | OBJECT | cw.addType("java/lang/Object"); } } else if ((t & BASE_KIND) == OBJECT || (t & DIM) != 0) { - // if t is any other reference or array type, - // merge(u,t)=java/lang/Object - v = OBJECT | cw.addType("java/lang/Object"); + // if t is any other reference or array type, the merged type + // is min(udim, tdim) | java/lang/Object, where udim is the + // array dimension of u, minus 1 if u is an array type with a + // primitive element type (and similarly for tdim). + int tdim = (((t & DIM) == 0 || (t & BASE_KIND) == OBJECT) ? 0 + : ELEMENT_OF) + (t & DIM); + int udim = (((u & DIM) == 0 || (u & BASE_KIND) == OBJECT) ? 0 + : ELEMENT_OF) + (u & DIM); + v = Math.min(tdim, udim) | OBJECT + | cw.addType("java/lang/Object"); } else { // if t is any other type, merge(u,t)=TOP v = TOP; diff --git a/src/asm/scala/tools/asm/Handle.java b/src/asm/scala/tools/asm/Handle.java index 5dd06a54b953..cf12bb761332 100644 --- a/src/asm/scala/tools/asm/Handle.java +++ b/src/asm/scala/tools/asm/Handle.java @@ -49,7 +49,8 @@ public final class Handle { final int tag; /** - * The internal name of the field or method designed by this handle. + * The internal name of the class that owns the field or method designated + * by this handle. */ final String owner; @@ -76,8 +77,8 @@ public final class Handle { * {@link Opcodes#H_NEWINVOKESPECIAL} or * {@link Opcodes#H_INVOKEINTERFACE}. * @param owner - * the internal name of the field or method designed by this - * handle. + * the internal name of the class that owns the field or method + * designated by this handle. * @param name * the name of the field or method designated by this handle. * @param desc @@ -106,9 +107,11 @@ public int getTag() { } /** - * Returns the internal name of the field or method designed by this handle. + * Returns the internal name of the class that owns the field or method + * designated by this handle. * - * @return the internal name of the field or method designed by this handle. + * @return the internal name of the class that owns the field or method + * designated by this handle. */ public String getOwner() { return owner; diff --git a/src/asm/scala/tools/asm/Item.java b/src/asm/scala/tools/asm/Item.java index 94195a1082ee..4693f5ae9927 100644 --- a/src/asm/scala/tools/asm/Item.java +++ b/src/asm/scala/tools/asm/Item.java @@ -208,9 +208,10 @@ void set(final int type, final String strVal1, final String strVal2, this.strVal2 = strVal2; this.strVal3 = strVal3; switch (type) { + case ClassWriter.CLASS: + this.intVal = 0; // intVal of a class must be zero, see visitInnerClass case ClassWriter.UTF8: case ClassWriter.STR: - case ClassWriter.CLASS: case ClassWriter.MTYPE: case ClassWriter.TYPE_NORMAL: hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()); diff --git a/src/asm/scala/tools/asm/Label.java b/src/asm/scala/tools/asm/Label.java index 5d5529ce743f..c094eba408f0 100644 --- a/src/asm/scala/tools/asm/Label.java +++ b/src/asm/scala/tools/asm/Label.java @@ -545,7 +545,7 @@ void visitSubroutine(final Label JSR, final long id, final int nbSubroutines) { } // ------------------------------------------------------------------------ - // Overriden Object methods + // Overridden Object methods // ------------------------------------------------------------------------ /** diff --git a/src/asm/scala/tools/asm/MethodVisitor.java b/src/asm/scala/tools/asm/MethodVisitor.java index e43ca9782351..bddc325020dc 100644 --- a/src/asm/scala/tools/asm/MethodVisitor.java +++ b/src/asm/scala/tools/asm/MethodVisitor.java @@ -31,18 +31,24 @@ /** * A visitor to visit a Java method. The methods of this class must be called in - * the following order: [ visitAnnotationDefault ] ( - * visitAnnotation | visitParameterAnnotation | - * visitAttribute )* [ visitCode ( visitFrame | - * visitXInsn | visitLabel | - * visitTryCatchBlock | visitLocalVariable | + * the following order: ( visitParameter )* [ + * visitAnnotationDefault ] ( visitAnnotation | + * visitTypeAnnotation | visitAttribute )* [ + * visitCode ( visitFrame | visitXInsn | + * visitLabel | visitInsnAnnotation | + * visitTryCatchBlock | visitTryCatchBlockAnnotation | + * visitLocalVariable | visitLocalVariableAnnotation | * visitLineNumber )* visitMaxs ] visitEnd. In - * addition, the visitXInsn and visitLabel methods - * must be called in the sequential order of the bytecode instructions of the - * visited code, visitTryCatchBlock must be called before the - * labels passed as arguments have been visited, and the - * visitLocalVariable and visitLineNumber methods must be - * called after the labels passed as arguments have been visited. + * addition, the visitXInsn and visitLabel methods must + * be called in the sequential order of the bytecode instructions of the visited + * code, visitInsnAnnotation must be called after the annotated + * instruction, visitTryCatchBlock must be called before the + * labels passed as arguments have been visited, + * visitTryCatchBlockAnnotation must be called after the + * corresponding try catch block has been visited, and the + * visitLocalVariable, visitLocalVariableAnnotation and + * visitLineNumber methods must be called after the labels + * passed as arguments have been visited. * * @author Eric Bruneton */ @@ -50,7 +56,7 @@ public abstract class MethodVisitor { /** * The ASM API version implemented by this visitor. The value of this field - * must be one of {@link Opcodes#ASM4}. + * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ protected final int api; @@ -65,7 +71,7 @@ public abstract class MethodVisitor { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ public MethodVisitor(final int api) { this(api, null); @@ -76,13 +82,13 @@ public MethodVisitor(final int api) { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param mv * the method visitor to which this visitor must delegate method * calls. May be null. */ public MethodVisitor(final int api, final MethodVisitor mv) { - if (api != Opcodes.ASM4) { + if (api != Opcodes.ASM4 && api != Opcodes.ASM5) { throw new IllegalArgumentException(); } this.api = api; @@ -90,9 +96,28 @@ public MethodVisitor(final int api, final MethodVisitor mv) { } // ------------------------------------------------------------------------- - // Annotations and non standard attributes + // Parameters, annotations and non standard attributes // ------------------------------------------------------------------------- + /** + * Visits a parameter of this method. + * + * @param name + * parameter name or null if none is provided. + * @param access + * the parameter's access flags, only ACC_FINAL, + * ACC_SYNTHETIC or/and ACC_MANDATED are + * allowed (see {@link Opcodes}). + */ + public void visitParameter(String name, int access) { + if (api < Opcodes.ASM5) { + throw new RuntimeException(); + } + if (mv != null) { + mv.visitParameter(name, access); + } + } + /** * Visits the default value of this annotation interface method. * @@ -127,6 +152,42 @@ public AnnotationVisitor visitAnnotation(String desc, boolean visible) { return null; } + /** + * Visits an annotation on a type in the method signature. + * + * @param typeRef + * a reference to the annotated type. The sort of this type + * reference must be {@link TypeReference#METHOD_TYPE_PARAMETER + * METHOD_TYPE_PARAMETER}, + * {@link TypeReference#METHOD_TYPE_PARAMETER_BOUND + * METHOD_TYPE_PARAMETER_BOUND}, + * {@link TypeReference#METHOD_RETURN METHOD_RETURN}, + * {@link TypeReference#METHOD_RECEIVER METHOD_RECEIVER}, + * {@link TypeReference#METHOD_FORMAL_PARAMETER + * METHOD_FORMAL_PARAMETER} or {@link TypeReference#THROWS + * THROWS}. See {@link TypeReference}. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + * @param desc + * the class descriptor of the annotation class. + * @param visible + * true if the annotation is visible at runtime. + * @return a visitor to visit the annotation values, or null if + * this visitor is not interested in visiting this annotation. + */ + public AnnotationVisitor visitTypeAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + if (api < Opcodes.ASM5) { + throw new RuntimeException(); + } + if (mv != null) { + return mv.visitTypeAnnotation(typeRef, typePath, desc, visible); + } + return null; + } + /** * Visits an annotation of a parameter this method. * @@ -201,9 +262,11 @@ public void visitCode() { *
  • {@link Opcodes#F_CHOP} representing frame with current locals are the * same as the locals in the previous frame, except that the last 1-3 locals * are absent and with the empty stack (nLocals is 1, 2 or 3).
  • - *
  • {@link Opcodes#F_FULL} representing complete frame data.
  • + *
  • {@link Opcodes#F_FULL} representing complete frame data.
  • + * + * * - *
    + *
    * In both cases the first frame, corresponding to the method's parameters * and access flags, is implicit and must not be visited. Also, it is * illegal to visit two or more frames for the same code location (i.e., at @@ -376,13 +439,52 @@ public void visitFieldInsn(int opcode, String owner, String name, * @param desc * the method's descriptor (see {@link Type Type}). */ + @Deprecated public void visitMethodInsn(int opcode, String owner, String name, String desc) { + if (api >= Opcodes.ASM5) { + boolean itf = opcode == Opcodes.INVOKEINTERFACE; + visitMethodInsn(opcode, owner, name, desc, itf); + return; + } if (mv != null) { mv.visitMethodInsn(opcode, owner, name, desc); } } + /** + * Visits a method instruction. A method instruction is an instruction that + * invokes a method. + * + * @param opcode + * the opcode of the type instruction to be visited. This opcode + * is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or + * INVOKEINTERFACE. + * @param owner + * the internal name of the method's owner class (see + * {@link Type#getInternalName() getInternalName}). + * @param name + * the method's name. + * @param desc + * the method's descriptor (see {@link Type Type}). + * @param itf + * if the method's owner class is an interface. + */ + public void visitMethodInsn(int opcode, String owner, String name, + String desc, boolean itf) { + if (api < Opcodes.ASM5) { + if (itf != (opcode == Opcodes.INVOKEINTERFACE)) { + throw new IllegalArgumentException( + "INVOKESPECIAL/STATIC on interfaces require ASM 5"); + } + visitMethodInsn(opcode, owner, name, desc); + return; + } + if (mv != null) { + mv.visitMethodInsn(opcode, owner, name, desc, itf); + } + } + /** * Visits an invokedynamic instruction. * @@ -558,6 +660,48 @@ public void visitMultiANewArrayInsn(String desc, int dims) { } } + /** + * Visits an annotation on an instruction. This method must be called just + * after the annotated instruction. It can be called several times + * for the same instruction. + * + * @param typeRef + * a reference to the annotated type. The sort of this type + * reference must be {@link TypeReference#INSTANCEOF INSTANCEOF}, + * {@link TypeReference#NEW NEW}, + * {@link TypeReference#CONSTRUCTOR_REFERENCE + * CONSTRUCTOR_REFERENCE}, {@link TypeReference#METHOD_REFERENCE + * METHOD_REFERENCE}, {@link TypeReference#CAST CAST}, + * {@link TypeReference#CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT + * CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT}, + * {@link TypeReference#METHOD_INVOCATION_TYPE_ARGUMENT + * METHOD_INVOCATION_TYPE_ARGUMENT}, + * {@link TypeReference#CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT + * CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT}, or + * {@link TypeReference#METHOD_REFERENCE_TYPE_ARGUMENT + * METHOD_REFERENCE_TYPE_ARGUMENT}. See {@link TypeReference}. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + * @param desc + * the class descriptor of the annotation class. + * @param visible + * true if the annotation is visible at runtime. + * @return a visitor to visit the annotation values, or null if + * this visitor is not interested in visiting this annotation. + */ + public AnnotationVisitor visitInsnAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + if (api < Opcodes.ASM5) { + throw new RuntimeException(); + } + if (mv != null) { + return mv.visitInsnAnnotation(typeRef, typePath, desc, visible); + } + return null; + } + // ------------------------------------------------------------------------- // Exceptions table entries, debug information, max stack and max locals // ------------------------------------------------------------------------- @@ -586,6 +730,38 @@ public void visitTryCatchBlock(Label start, Label end, Label handler, } } + /** + * Visits an annotation on an exception handler type. This method must be + * called after the {@link #visitTryCatchBlock} for the annotated + * exception handler. It can be called several times for the same exception + * handler. + * + * @param typeRef + * a reference to the annotated type. The sort of this type + * reference must be {@link TypeReference#EXCEPTION_PARAMETER + * EXCEPTION_PARAMETER}. See {@link TypeReference}. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + * @param desc + * the class descriptor of the annotation class. + * @param visible + * true if the annotation is visible at runtime. + * @return a visitor to visit the annotation values, or null if + * this visitor is not interested in visiting this annotation. + */ + public AnnotationVisitor visitTryCatchAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + if (api < Opcodes.ASM5) { + throw new RuntimeException(); + } + if (mv != null) { + return mv.visitTryCatchAnnotation(typeRef, typePath, desc, visible); + } + return null; + } + /** * Visits a local variable declaration. * @@ -616,6 +792,48 @@ public void visitLocalVariable(String name, String desc, String signature, } } + /** + * Visits an annotation on a local variable type. + * + * @param typeRef + * a reference to the annotated type. The sort of this type + * reference must be {@link TypeReference#LOCAL_VARIABLE + * LOCAL_VARIABLE} or {@link TypeReference#RESOURCE_VARIABLE + * RESOURCE_VARIABLE}. See {@link TypeReference}. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + * @param start + * the fist instructions corresponding to the continuous ranges + * that make the scope of this local variable (inclusive). + * @param end + * the last instructions corresponding to the continuous ranges + * that make the scope of this local variable (exclusive). This + * array must have the same size as the 'start' array. + * @param index + * the local variable's index in each range. This array must have + * the same size as the 'start' array. + * @param desc + * the class descriptor of the annotation class. + * @param visible + * true if the annotation is visible at runtime. + * @return a visitor to visit the annotation values, or null if + * this visitor is not interested in visiting this annotation. + */ + public AnnotationVisitor visitLocalVariableAnnotation(int typeRef, + TypePath typePath, Label[] start, Label[] end, int[] index, + String desc, boolean visible) { + if (api < Opcodes.ASM5) { + throw new RuntimeException(); + } + if (mv != null) { + return mv.visitLocalVariableAnnotation(typeRef, typePath, start, + end, index, desc, visible); + } + return null; + } + /** * Visits a line number declaration. * diff --git a/src/asm/scala/tools/asm/MethodWriter.java b/src/asm/scala/tools/asm/MethodWriter.java index 87acab17c994..d30e04c62554 100644 --- a/src/asm/scala/tools/asm/MethodWriter.java +++ b/src/asm/scala/tools/asm/MethodWriter.java @@ -37,7 +37,7 @@ * @author Eric Bruneton * @author Eugene Kuleshov */ -class MethodWriter extends MethodVisitor { +public class MethodWriter extends MethodVisitor { /** * Pseudo access flag used to denote constructors. @@ -191,6 +191,18 @@ class MethodWriter extends MethodVisitor { */ private AnnotationWriter ianns; + /** + * The runtime visible type annotations of this method. May be null + * . + */ + private AnnotationWriter tanns; + + /** + * The runtime invisible type annotations of this method. May be + * null. + */ + private AnnotationWriter itanns; + /** * The runtime visible parameter annotations of this method. May be * null. @@ -223,11 +235,19 @@ class MethodWriter extends MethodVisitor { */ private int maxStack; + public int getMaxStack() { + return maxStack; + } + /** * Maximum number of local variables for this method. */ private int maxLocals; + public int getMaxLocals() { + return maxLocals; + } + /** * Number of local variables in the current stack map frame. */ @@ -282,6 +302,16 @@ class MethodWriter extends MethodVisitor { */ private Handler lastHandler; + /** + * Number of entries in the MethodParameters attribute. + */ + private int methodParametersCount; + + /** + * The MethodParameters attribute. + */ + private ByteVector methodParameters; + /** * Number of entries in the LocalVariableTable attribute. */ @@ -312,6 +342,21 @@ class MethodWriter extends MethodVisitor { */ private ByteVector lineNumber; + /** + * The start offset of the last visited instruction. + */ + private int lastCodeOffset; + + /** + * The runtime visible type annotations of the code. May be null. + */ + private AnnotationWriter ctanns; + + /** + * The runtime invisible type annotations of the code. May be null. + */ + private AnnotationWriter ictanns; + /** * The non standard attributes of the method's code. */ @@ -416,7 +461,7 @@ class MethodWriter extends MethodVisitor { final String desc, final String signature, final String[] exceptions, final boolean computeMaxs, final boolean computeFrames) { - super(Opcodes.ASM4); + super(Opcodes.ASM5); if (cw.firstMethod == null) { cw.firstMethod = this; } else { @@ -461,6 +506,16 @@ class MethodWriter extends MethodVisitor { // Implementation of the MethodVisitor abstract class // ------------------------------------------------------------------------ + @Override + public void visitParameter(String name, int access) { + if (methodParameters == null) { + methodParameters = new ByteVector(); + } + ++methodParametersCount; + methodParameters.putShort((name == null) ? 0 : cw.newUTF8(name)) + .putShort(access); + } + @Override public AnnotationVisitor visitAnnotationDefault() { if (!ClassReader.ANNOTATIONS) { @@ -490,6 +545,29 @@ public AnnotationVisitor visitAnnotation(final String desc, return aw; } + @Override + public AnnotationVisitor visitTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + if (!ClassReader.ANNOTATIONS) { + return null; + } + ByteVector bv = new ByteVector(); + // write target_type and target_info + AnnotationWriter.putTarget(typeRef, typePath, bv); + // write type, and reserve space for values count + bv.putShort(cw.newUTF8(desc)).putShort(0); + AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, + bv.length - 2); + if (visible) { + aw.next = tanns; + tanns = aw; + } else { + aw.next = itanns; + itanns = aw; + } + return aw; + } + @Override public AnnotationVisitor visitParameterAnnotation(final int parameter, final String desc, final boolean visible) { @@ -642,6 +720,7 @@ public void visitFrame(final int type, final int nLocal, @Override public void visitInsn(final int opcode) { + lastCodeOffset = code.length; // adds the instruction to the bytecode of the method code.putByte(opcode); // update currentBlock @@ -667,6 +746,7 @@ public void visitInsn(final int opcode) { @Override public void visitIntInsn(final int opcode, final int operand) { + lastCodeOffset = code.length; // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES) { @@ -691,6 +771,7 @@ public void visitIntInsn(final int opcode, final int operand) { @Override public void visitVarInsn(final int opcode, final int var) { + lastCodeOffset = code.length; // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES) { @@ -749,6 +830,7 @@ public void visitVarInsn(final int opcode, final int var) { @Override public void visitTypeInsn(final int opcode, final String type) { + lastCodeOffset = code.length; Item i = cw.newClassItem(type); // Label currentBlock = this.currentBlock; if (currentBlock != null) { @@ -771,6 +853,7 @@ public void visitTypeInsn(final int opcode, final String type) { @Override public void visitFieldInsn(final int opcode, final String owner, final String name, final String desc) { + lastCodeOffset = code.length; Item i = cw.newFieldItem(owner, name, desc); // Label currentBlock = this.currentBlock; if (currentBlock != null) { @@ -808,8 +891,8 @@ public void visitFieldInsn(final int opcode, final String owner, @Override public void visitMethodInsn(final int opcode, final String owner, - final String name, final String desc) { - boolean itf = opcode == Opcodes.INVOKEINTERFACE; + final String name, final String desc, final boolean itf) { + lastCodeOffset = code.length; Item i = cw.newMethodItem(owner, name, desc, itf); int argSize = i.intVal; // Label currentBlock = this.currentBlock; @@ -847,7 +930,7 @@ public void visitMethodInsn(final int opcode, final String owner, } } // adds the instruction to the bytecode of the method - if (itf) { + if (opcode == Opcodes.INVOKEINTERFACE) { if (argSize == 0) { argSize = Type.getArgumentsAndReturnSizes(desc); i.intVal = argSize; @@ -861,6 +944,7 @@ public void visitMethodInsn(final int opcode, final String owner, @Override public void visitInvokeDynamicInsn(final String name, final String desc, final Handle bsm, final Object... bsmArgs) { + lastCodeOffset = code.length; Item i = cw.newInvokeDynamicItem(name, desc, bsm, bsmArgs); int argSize = i.intVal; // Label currentBlock = this.currentBlock; @@ -900,6 +984,7 @@ public void visitInvokeDynamicInsn(final String name, final String desc, @Override public void visitJumpInsn(final int opcode, final Label label) { + lastCodeOffset = code.length; Label nextInsn = null; // Label currentBlock = this.currentBlock; if (currentBlock != null) { @@ -1045,6 +1130,7 @@ public void visitLabel(final Label label) { @Override public void visitLdcInsn(final Object cst) { + lastCodeOffset = code.length; Item i = cw.newConstItem(cst); // Label currentBlock = this.currentBlock; if (currentBlock != null) { @@ -1078,6 +1164,7 @@ public void visitLdcInsn(final Object cst) { @Override public void visitIincInsn(final int var, final int increment) { + lastCodeOffset = code.length; if (currentBlock != null) { if (compute == FRAMES) { currentBlock.frame.execute(Opcodes.IINC, var, null, null); @@ -1102,6 +1189,7 @@ public void visitIincInsn(final int var, final int increment) { @Override public void visitTableSwitchInsn(final int min, final int max, final Label dflt, final Label... labels) { + lastCodeOffset = code.length; // adds the instruction to the bytecode of the method int source = code.length; code.putByte(Opcodes.TABLESWITCH); @@ -1118,6 +1206,7 @@ public void visitTableSwitchInsn(final int min, final int max, @Override public void visitLookupSwitchInsn(final Label dflt, final int[] keys, final Label[] labels) { + lastCodeOffset = code.length; // adds the instruction to the bytecode of the method int source = code.length; code.putByte(Opcodes.LOOKUPSWITCH); @@ -1160,6 +1249,7 @@ private void visitSwitchInsn(final Label dflt, final Label[] labels) { @Override public void visitMultiANewArrayInsn(final String desc, final int dims) { + lastCodeOffset = code.length; Item i = cw.newClassItem(desc); // Label currentBlock = this.currentBlock; if (currentBlock != null) { @@ -1175,6 +1265,30 @@ public void visitMultiANewArrayInsn(final String desc, final int dims) { code.put12(Opcodes.MULTIANEWARRAY, i.index).putByte(dims); } + @Override + public AnnotationVisitor visitInsnAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + if (!ClassReader.ANNOTATIONS) { + return null; + } + ByteVector bv = new ByteVector(); + // write target_type and target_info + typeRef = (typeRef & 0xFF0000FF) | (lastCodeOffset << 8); + AnnotationWriter.putTarget(typeRef, typePath, bv); + // write type, and reserve space for values count + bv.putShort(cw.newUTF8(desc)).putShort(0); + AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, + bv.length - 2); + if (visible) { + aw.next = ctanns; + ctanns = aw; + } else { + aw.next = ictanns; + ictanns = aw; + } + return aw; + } + @Override public void visitTryCatchBlock(final Label start, final Label end, final Label handler, final String type) { @@ -1193,6 +1307,29 @@ public void visitTryCatchBlock(final Label start, final Label end, lastHandler = h; } + @Override + public AnnotationVisitor visitTryCatchAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + if (!ClassReader.ANNOTATIONS) { + return null; + } + ByteVector bv = new ByteVector(); + // write target_type and target_info + AnnotationWriter.putTarget(typeRef, typePath, bv); + // write type, and reserve space for values count + bv.putShort(cw.newUTF8(desc)).putShort(0); + AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, + bv.length - 2); + if (visible) { + aw.next = ctanns; + ctanns = aw; + } else { + aw.next = ictanns; + ictanns = aw; + } + return aw; + } + @Override public void visitLocalVariable(final String name, final String desc, final String signature, final Label start, final Label end, @@ -1225,6 +1362,41 @@ public void visitLocalVariable(final String name, final String desc, } } + @Override + public AnnotationVisitor visitLocalVariableAnnotation(int typeRef, + TypePath typePath, Label[] start, Label[] end, int[] index, + String desc, boolean visible) { + if (!ClassReader.ANNOTATIONS) { + return null; + } + ByteVector bv = new ByteVector(); + // write target_type and target_info + bv.putByte(typeRef >>> 24).putShort(start.length); + for (int i = 0; i < start.length; ++i) { + bv.putShort(start[i].position) + .putShort(end[i].position - start[i].position) + .putShort(index[i]); + } + if (typePath == null) { + bv.putByte(0); + } else { + int length = typePath.b[typePath.offset] * 2 + 1; + bv.putByteArray(typePath.b, typePath.offset, length); + } + // write type, and reserve space for values count + bv.putShort(cw.newUTF8(desc)).putShort(0); + AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, + bv.length - 2); + if (visible) { + aw.next = ctanns; + ctanns = aw; + } else { + aw.next = ictanns; + ictanns = aw; + } + return aw; + } + @Override public void visitLineNumber(final int line, final Label start) { if (lineNumber == null) { @@ -1237,6 +1409,14 @@ public void visitLineNumber(final int line, final Label start) { @Override public void visitMaxs(final int maxStack, final int maxLocals) { + if (resize) { + // replaces the temporary jump opcodes introduced by Label.resolve. + if (ClassReader.RESIZE) { + resizeInstructions(); + } else { + throw new RuntimeException("Method code too large!"); + } + } if (ClassReader.FRAMES && compute == FRAMES) { // completes the control flow graph with exception handler blocks Handler handler = firstHandler; @@ -1858,22 +2038,12 @@ final int getSize() { if (classReaderOffset != 0) { return 6 + classReaderLength; } - if (resize) { - // replaces the temporary jump opcodes introduced by Label.resolve. - if (ClassReader.RESIZE) { - resizeInstructions(); - } else { - throw new RuntimeException("Method code too large!"); - } - } int size = 8; if (code.length > 0) { if (code.length > 65536) { String nameString = ""; - int i = 0; - // find item that corresponds to the index of our name - while (i < cw.items.length && (cw.items[i] == null || cw.items[i].index != name)) i++; - if (cw.items[i] != null) nameString = cw.items[i].strVal1 +"'s "; + Item nameItem = cw.findItemByIndex(name); + if (nameItem != null) nameString = nameItem.strVal1 +"'s "; throw new RuntimeException("Method "+ nameString +"code too large!"); } cw.newUTF8("Code"); @@ -1895,6 +2065,14 @@ final int getSize() { cw.newUTF8(zip ? "StackMapTable" : "StackMap"); size += 8 + stackMap.length; } + if (ClassReader.ANNOTATIONS && ctanns != null) { + cw.newUTF8("RuntimeVisibleTypeAnnotations"); + size += 8 + ctanns.getSize(); + } + if (ClassReader.ANNOTATIONS && ictanns != null) { + cw.newUTF8("RuntimeInvisibleTypeAnnotations"); + size += 8 + ictanns.getSize(); + } if (cattrs != null) { size += cattrs.getSize(cw, code.data, code.length, maxStack, maxLocals); @@ -1920,6 +2098,10 @@ final int getSize() { cw.newUTF8(signature); size += 8; } + if (methodParameters != null) { + cw.newUTF8("MethodParameters"); + size += 7 + methodParameters.length; + } if (ClassReader.ANNOTATIONS && annd != null) { cw.newUTF8("AnnotationDefault"); size += 6 + annd.length; @@ -1932,6 +2114,14 @@ final int getSize() { cw.newUTF8("RuntimeInvisibleAnnotations"); size += 8 + ianns.getSize(); } + if (ClassReader.ANNOTATIONS && tanns != null) { + cw.newUTF8("RuntimeVisibleTypeAnnotations"); + size += 8 + tanns.getSize(); + } + if (ClassReader.ANNOTATIONS && itanns != null) { + cw.newUTF8("RuntimeInvisibleTypeAnnotations"); + size += 8 + itanns.getSize(); + } if (ClassReader.ANNOTATIONS && panns != null) { cw.newUTF8("RuntimeVisibleParameterAnnotations"); size += 7 + 2 * (panns.length - synthetics); @@ -1988,6 +2178,9 @@ final void put(final ByteVector out) { if (ClassReader.SIGNATURES && signature != null) { ++attributeCount; } + if (methodParameters != null) { + ++attributeCount; + } if (ClassReader.ANNOTATIONS && annd != null) { ++attributeCount; } @@ -1997,6 +2190,12 @@ final void put(final ByteVector out) { if (ClassReader.ANNOTATIONS && ianns != null) { ++attributeCount; } + if (ClassReader.ANNOTATIONS && tanns != null) { + ++attributeCount; + } + if (ClassReader.ANNOTATIONS && itanns != null) { + ++attributeCount; + } if (ClassReader.ANNOTATIONS && panns != null) { ++attributeCount; } @@ -2021,6 +2220,12 @@ final void put(final ByteVector out) { if (stackMap != null) { size += 8 + stackMap.length; } + if (ClassReader.ANNOTATIONS && ctanns != null) { + size += 8 + ctanns.getSize(); + } + if (ClassReader.ANNOTATIONS && ictanns != null) { + size += 8 + ictanns.getSize(); + } if (cattrs != null) { size += cattrs.getSize(cw, code.data, code.length, maxStack, maxLocals); @@ -2050,6 +2255,12 @@ final void put(final ByteVector out) { if (stackMap != null) { ++attributeCount; } + if (ClassReader.ANNOTATIONS && ctanns != null) { + ++attributeCount; + } + if (ClassReader.ANNOTATIONS && ictanns != null) { + ++attributeCount; + } if (cattrs != null) { attributeCount += cattrs.getCount(); } @@ -2075,6 +2286,14 @@ final void put(final ByteVector out) { out.putInt(stackMap.length + 2).putShort(frameCount); out.putByteArray(stackMap.data, 0, stackMap.length); } + if (ClassReader.ANNOTATIONS && ctanns != null) { + out.putShort(cw.newUTF8("RuntimeVisibleTypeAnnotations")); + ctanns.put(out); + } + if (ClassReader.ANNOTATIONS && ictanns != null) { + out.putShort(cw.newUTF8("RuntimeInvisibleTypeAnnotations")); + ictanns.put(out); + } if (cattrs != null) { cattrs.put(cw, code.data, code.length, maxLocals, maxStack, out); } @@ -2100,6 +2319,12 @@ final void put(final ByteVector out) { out.putShort(cw.newUTF8("Signature")).putInt(2) .putShort(cw.newUTF8(signature)); } + if (methodParameters != null) { + out.putShort(cw.newUTF8("MethodParameters")); + out.putInt(methodParameters.length + 1).putByte( + methodParametersCount); + out.putByteArray(methodParameters.data, 0, methodParameters.length); + } if (ClassReader.ANNOTATIONS && annd != null) { out.putShort(cw.newUTF8("AnnotationDefault")); out.putInt(annd.length); @@ -2113,6 +2338,14 @@ final void put(final ByteVector out) { out.putShort(cw.newUTF8("RuntimeInvisibleAnnotations")); ianns.put(out); } + if (ClassReader.ANNOTATIONS && tanns != null) { + out.putShort(cw.newUTF8("RuntimeVisibleTypeAnnotations")); + tanns.put(out); + } + if (ClassReader.ANNOTATIONS && itanns != null) { + out.putShort(cw.newUTF8("RuntimeInvisibleTypeAnnotations")); + itanns.put(out); + } if (ClassReader.ANNOTATIONS && panns != null) { out.putShort(cw.newUTF8("RuntimeVisibleParameterAnnotations")); AnnotationWriter.put(panns, synthetics, out); @@ -2464,49 +2697,50 @@ private void resizeInstructions() { } } - // recomputes the stack map frames - if (frameCount > 0) { - if (compute == FRAMES) { - frameCount = 0; - stackMap = null; - previousFrame = null; - frame = null; - Frame f = new Frame(); - f.owner = labels; - Type[] args = Type.getArgumentTypes(descriptor); - f.initInputFrame(cw, access, args, maxLocals); - visitFrame(f); - Label l = labels; - while (l != null) { - /* - * here we need the original label position. getNewOffset - * must therefore never have been called for this label. - */ - u = l.position - 3; - if ((l.status & Label.STORE) != 0 || (u >= 0 && resize[u])) { - getNewOffset(allIndexes, allSizes, l); - // TODO update offsets in UNINITIALIZED values - visitFrame(l.frame); - } - l = l.successor; - } - } else { + // updates the stack map frame labels + if (compute == FRAMES) { + Label l = labels; + while (l != null) { /* - * Resizing an existing stack map frame table is really hard. - * Not only the table must be parsed to update the offets, but - * new frames may be needed for jump instructions that were - * inserted by this method. And updating the offsets or - * inserting frames can change the format of the following - * frames, in case of packed frames. In practice the whole table - * must be recomputed. For this the frames are marked as - * potentially invalid. This will cause the whole class to be - * reread and rewritten with the COMPUTE_FRAMES option (see the - * ClassWriter.toByteArray method). This is not very efficient - * but is much easier and requires much less code than any other - * method I can think of. + * Detects the labels that are just after an IF instruction that + * has been resized with the IFNOT GOTO_W pattern. These labels + * are now the target of a jump instruction (the IFNOT + * instruction). Note that we need the original label position + * here. getNewOffset must therefore never have been called for + * this label. */ - cw.invalidFrames = true; + u = l.position - 3; + if (u >= 0 && resize[u]) { + l.status |= Label.TARGET; + } + getNewOffset(allIndexes, allSizes, l); + l = l.successor; } + // Update the offsets in the uninitialized types + for (i = 0; i < cw.typeTable.length; ++i) { + Item item = cw.typeTable[i]; + if (item != null && item.type == ClassWriter.TYPE_UNINIT) { + item.intVal = getNewOffset(allIndexes, allSizes, 0, + item.intVal); + } + } + // The stack map frames are not serialized yet, so we don't need + // to update them. They will be serialized in visitMaxs. + } else if (frameCount > 0) { + /* + * Resizing an existing stack map frame table is really hard. Not + * only the table must be parsed to update the offets, but new + * frames may be needed for jump instructions that were inserted by + * this method. And updating the offsets or inserting frames can + * change the format of the following frames, in case of packed + * frames. In practice the whole table must be recomputed. For this + * the frames are marked as potentially invalid. This will cause the + * whole class to be reread and rewritten with the COMPUTE_FRAMES + * option (see the ClassWriter.toByteArray method). This is not very + * efficient but is much easier and requires much less code than any + * other method I can think of. + */ + cw.invalidFrames = true; } // updates the exception handler block labels Handler h = firstHandler; diff --git a/src/asm/scala/tools/asm/Opcodes.java b/src/asm/scala/tools/asm/Opcodes.java index 809e5ae5908e..24eaffa71794 100644 --- a/src/asm/scala/tools/asm/Opcodes.java +++ b/src/asm/scala/tools/asm/Opcodes.java @@ -46,6 +46,7 @@ public interface Opcodes { // ASM API versions int ASM4 = 4 << 16 | 0 << 8 | 0; + int ASM5 = 5 << 16 | 0 << 8 | 0; // versions @@ -56,6 +57,7 @@ public interface Opcodes { int V1_5 = 0 << 16 | 49; int V1_6 = 0 << 16 | 50; int V1_7 = 0 << 16 | 51; + int V1_8 = 0 << 16 | 52; // access flags @@ -63,7 +65,7 @@ public interface Opcodes { int ACC_PRIVATE = 0x0002; // class, field, method int ACC_PROTECTED = 0x0004; // class, field, method int ACC_STATIC = 0x0008; // field, method - int ACC_FINAL = 0x0010; // class, field, method + int ACC_FINAL = 0x0010; // class, field, method, parameter int ACC_SUPER = 0x0020; // class int ACC_SYNCHRONIZED = 0x0020; // method int ACC_VOLATILE = 0x0040; // field @@ -74,9 +76,10 @@ public interface Opcodes { int ACC_INTERFACE = 0x0200; // class int ACC_ABSTRACT = 0x0400; // class, method int ACC_STRICT = 0x0800; // method - int ACC_SYNTHETIC = 0x1000; // class, field, method + int ACC_SYNTHETIC = 0x1000; // class, field, method, parameter int ACC_ANNOTATION = 0x2000; // class int ACC_ENUM = 0x4000; // class(?) field inner + int ACC_MANDATED = 0x8000; // parameter // ASM specific pseudo access flags diff --git a/src/asm/scala/tools/asm/Type.java b/src/asm/scala/tools/asm/Type.java index 7821a492e661..7887080dee89 100644 --- a/src/asm/scala/tools/asm/Type.java +++ b/src/asm/scala/tools/asm/Type.java @@ -401,8 +401,8 @@ public static Type getReturnType(final Method method) { * @return the size of the arguments of the method (plus one for the * implicit this argument), argSize, and the size of its return * value, retSize, packed into a single int i = - * (argSize << 2) | retSize (argSize is therefore equal to - * i >> 2, and retSize to i & 0x03). + * (argSize << 2) | retSize (argSize is therefore equal to + * i >> 2, and retSize to i & 0x03). */ public static int getArgumentsAndReturnSizes(final String desc) { int n = 1; @@ -606,9 +606,10 @@ public Type getReturnType() { * * @return the size of the arguments (plus one for the implicit this * argument), argSize, and the size of the return value, retSize, - * packed into a single int i = (argSize << 2) | retSize - * (argSize is therefore equal to i >> 2, and retSize to - * i & 0x03). + * packed into a single + * int i = (argSize << 2) | retSize + * (argSize is therefore equal to i >> 2, + * and retSize to i & 0x03). */ public int getArgumentsAndReturnSizes() { return getArgumentsAndReturnSizes(getDescriptor()); diff --git a/src/asm/scala/tools/asm/TypePath.java b/src/asm/scala/tools/asm/TypePath.java new file mode 100644 index 000000000000..d4c6f0d8570c --- /dev/null +++ b/src/asm/scala/tools/asm/TypePath.java @@ -0,0 +1,193 @@ +/*** + * ASM: a very small and fast Java bytecode manipulation framework + * Copyright (c) 2000-2013 INRIA, France Telecom + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +package scala.tools.asm; + +/** + * The path to a type argument, wildcard bound, array element type, or static + * inner type within an enclosing type. + * + * @author Eric Bruneton + */ +public class TypePath { + + /** + * A type path step that steps into the element type of an array type. See + * {@link #getStep getStep}. + */ + public final static int ARRAY_ELEMENT = 0; + + /** + * A type path step that steps into the nested type of a class type. See + * {@link #getStep getStep}. + */ + public final static int INNER_TYPE = 1; + + /** + * A type path step that steps into the bound of a wildcard type. See + * {@link #getStep getStep}. + */ + public final static int WILDCARD_BOUND = 2; + + /** + * A type path step that steps into a type argument of a generic type. See + * {@link #getStep getStep}. + */ + public final static int TYPE_ARGUMENT = 3; + + /** + * The byte array where the path is stored, in Java class file format. + */ + byte[] b; + + /** + * The offset of the first byte of the type path in 'b'. + */ + int offset; + + /** + * Creates a new type path. + * + * @param b + * the byte array containing the type path in Java class file + * format. + * @param offset + * the offset of the first byte of the type path in 'b'. + */ + TypePath(byte[] b, int offset) { + this.b = b; + this.offset = offset; + } + + /** + * Returns the length of this path. + * + * @return the length of this path. + */ + public int getLength() { + return b[offset]; + } + + /** + * Returns the value of the given step of this path. + * + * @param index + * an index between 0 and {@link #getLength()}, exclusive. + * @return {@link #ARRAY_ELEMENT ARRAY_ELEMENT}, {@link #INNER_TYPE + * INNER_TYPE}, {@link #WILDCARD_BOUND WILDCARD_BOUND}, or + * {@link #TYPE_ARGUMENT TYPE_ARGUMENT}. + */ + public int getStep(int index) { + return b[offset + 2 * index + 1]; + } + + /** + * Returns the index of the type argument that the given step is stepping + * into. This method should only be used for steps whose value is + * {@link #TYPE_ARGUMENT TYPE_ARGUMENT}. + * + * @param index + * an index between 0 and {@link #getLength()}, exclusive. + * @return the index of the type argument that the given step is stepping + * into. + */ + public int getStepArgument(int index) { + return b[offset + 2 * index + 2]; + } + + /** + * Converts a type path in string form, in the format used by + * {@link #toString()}, into a TypePath object. + * + * @param typePath + * a type path in string form, in the format used by + * {@link #toString()}. May be null or empty. + * @return the corresponding TypePath object, or null if the path is empty. + */ + public static TypePath fromString(final String typePath) { + if (typePath == null || typePath.length() == 0) { + return null; + } + int n = typePath.length(); + ByteVector out = new ByteVector(n); + out.putByte(0); + for (int i = 0; i < n;) { + char c = typePath.charAt(i++); + if (c == '[') { + out.put11(ARRAY_ELEMENT, 0); + } else if (c == '.') { + out.put11(INNER_TYPE, 0); + } else if (c == '*') { + out.put11(WILDCARD_BOUND, 0); + } else if (c >= '0' && c <= '9') { + int typeArg = c - '0'; + while (i < n && (c = typePath.charAt(i)) >= '0' && c <= '9') { + typeArg = typeArg * 10 + c - '0'; + i += 1; + } + out.put11(TYPE_ARGUMENT, typeArg); + } + } + out.data[0] = (byte) (out.length / 2); + return new TypePath(out.data, 0); + } + + /** + * Returns a string representation of this type path. {@link #ARRAY_ELEMENT + * ARRAY_ELEMENT} steps are represented with '[', {@link #INNER_TYPE + * INNER_TYPE} steps with '.', {@link #WILDCARD_BOUND WILDCARD_BOUND} steps + * with '*' and {@link #TYPE_ARGUMENT TYPE_ARGUMENT} steps with their type + * argument index in decimal form. + */ + @Override + public String toString() { + int length = getLength(); + StringBuilder result = new StringBuilder(length * 2); + for (int i = 0; i < length; ++i) { + switch (getStep(i)) { + case ARRAY_ELEMENT: + result.append('['); + break; + case INNER_TYPE: + result.append('.'); + break; + case WILDCARD_BOUND: + result.append('*'); + break; + case TYPE_ARGUMENT: + result.append(getStepArgument(i)); + break; + default: + result.append('_'); + } + } + return result.toString(); + } +} diff --git a/src/asm/scala/tools/asm/TypeReference.java b/src/asm/scala/tools/asm/TypeReference.java new file mode 100644 index 000000000000..118b0f6529f9 --- /dev/null +++ b/src/asm/scala/tools/asm/TypeReference.java @@ -0,0 +1,452 @@ +/*** + * ASM: a very small and fast Java bytecode manipulation framework + * Copyright (c) 2000-2013 INRIA, France Telecom + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +package scala.tools.asm; + +/** + * A reference to a type appearing in a class, field or method declaration, or + * on an instruction. Such a reference designates the part of the class where + * the referenced type is appearing (e.g. an 'extends', 'implements' or 'throws' + * clause, a 'new' instruction, a 'catch' clause, a type cast, a local variable + * declaration, etc). + * + * @author Eric Bruneton + */ +public class TypeReference { + + /** + * The sort of type references that target a type parameter of a generic + * class. See {@link #getSort getSort}. + */ + public final static int CLASS_TYPE_PARAMETER = 0x00; + + /** + * The sort of type references that target a type parameter of a generic + * method. See {@link #getSort getSort}. + */ + public final static int METHOD_TYPE_PARAMETER = 0x01; + + /** + * The sort of type references that target the super class of a class or one + * of the interfaces it implements. See {@link #getSort getSort}. + */ + public final static int CLASS_EXTENDS = 0x10; + + /** + * The sort of type references that target a bound of a type parameter of a + * generic class. See {@link #getSort getSort}. + */ + public final static int CLASS_TYPE_PARAMETER_BOUND = 0x11; + + /** + * The sort of type references that target a bound of a type parameter of a + * generic method. See {@link #getSort getSort}. + */ + public final static int METHOD_TYPE_PARAMETER_BOUND = 0x12; + + /** + * The sort of type references that target the type of a field. See + * {@link #getSort getSort}. + */ + public final static int FIELD = 0x13; + + /** + * The sort of type references that target the return type of a method. See + * {@link #getSort getSort}. + */ + public final static int METHOD_RETURN = 0x14; + + /** + * The sort of type references that target the receiver type of a method. + * See {@link #getSort getSort}. + */ + public final static int METHOD_RECEIVER = 0x15; + + /** + * The sort of type references that target the type of a formal parameter of + * a method. See {@link #getSort getSort}. + */ + public final static int METHOD_FORMAL_PARAMETER = 0x16; + + /** + * The sort of type references that target the type of an exception declared + * in the throws clause of a method. See {@link #getSort getSort}. + */ + public final static int THROWS = 0x17; + + /** + * The sort of type references that target the type of a local variable in a + * method. See {@link #getSort getSort}. + */ + public final static int LOCAL_VARIABLE = 0x40; + + /** + * The sort of type references that target the type of a resource variable + * in a method. See {@link #getSort getSort}. + */ + public final static int RESOURCE_VARIABLE = 0x41; + + /** + * The sort of type references that target the type of the exception of a + * 'catch' clause in a method. See {@link #getSort getSort}. + */ + public final static int EXCEPTION_PARAMETER = 0x42; + + /** + * The sort of type references that target the type declared in an + * 'instanceof' instruction. See {@link #getSort getSort}. + */ + public final static int INSTANCEOF = 0x43; + + /** + * The sort of type references that target the type of the object created by + * a 'new' instruction. See {@link #getSort getSort}. + */ + public final static int NEW = 0x44; + + /** + * The sort of type references that target the receiver type of a + * constructor reference. See {@link #getSort getSort}. + */ + public final static int CONSTRUCTOR_REFERENCE = 0x45; + + /** + * The sort of type references that target the receiver type of a method + * reference. See {@link #getSort getSort}. + */ + public final static int METHOD_REFERENCE = 0x46; + + /** + * The sort of type references that target the type declared in an explicit + * or implicit cast instruction. See {@link #getSort getSort}. + */ + public final static int CAST = 0x47; + + /** + * The sort of type references that target a type parameter of a generic + * constructor in a constructor call. See {@link #getSort getSort}. + */ + public final static int CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT = 0x48; + + /** + * The sort of type references that target a type parameter of a generic + * method in a method call. See {@link #getSort getSort}. + */ + public final static int METHOD_INVOCATION_TYPE_ARGUMENT = 0x49; + + /** + * The sort of type references that target a type parameter of a generic + * constructor in a constructor reference. See {@link #getSort getSort}. + */ + public final static int CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT = 0x4A; + + /** + * The sort of type references that target a type parameter of a generic + * method in a method reference. See {@link #getSort getSort}. + */ + public final static int METHOD_REFERENCE_TYPE_ARGUMENT = 0x4B; + + /** + * The type reference value in Java class file format. + */ + private int value; + + /** + * Creates a new TypeReference. + * + * @param typeRef + * the int encoded value of the type reference, as received in a + * visit method related to type annotations, like + * visitTypeAnnotation. + */ + public TypeReference(int typeRef) { + this.value = typeRef; + } + + /** + * Returns a type reference of the given sort. + * + * @param sort + * {@link #FIELD FIELD}, {@link #METHOD_RETURN METHOD_RETURN}, + * {@link #METHOD_RECEIVER METHOD_RECEIVER}, + * {@link #LOCAL_VARIABLE LOCAL_VARIABLE}, + * {@link #RESOURCE_VARIABLE RESOURCE_VARIABLE}, + * {@link #INSTANCEOF INSTANCEOF}, {@link #NEW NEW}, + * {@link #CONSTRUCTOR_REFERENCE CONSTRUCTOR_REFERENCE}, or + * {@link #METHOD_REFERENCE METHOD_REFERENCE}. + * @return a type reference of the given sort. + */ + public static TypeReference newTypeReference(int sort) { + return new TypeReference(sort << 24); + } + + /** + * Returns a reference to a type parameter of a generic class or method. + * + * @param sort + * {@link #CLASS_TYPE_PARAMETER CLASS_TYPE_PARAMETER} or + * {@link #METHOD_TYPE_PARAMETER METHOD_TYPE_PARAMETER}. + * @param paramIndex + * the type parameter index. + * @return a reference to the given generic class or method type parameter. + */ + public static TypeReference newTypeParameterReference(int sort, + int paramIndex) { + return new TypeReference((sort << 24) | (paramIndex << 16)); + } + + /** + * Returns a reference to a type parameter bound of a generic class or + * method. + * + * @param sort + * {@link #CLASS_TYPE_PARAMETER CLASS_TYPE_PARAMETER} or + * {@link #METHOD_TYPE_PARAMETER METHOD_TYPE_PARAMETER}. + * @param paramIndex + * the type parameter index. + * @param boundIndex + * the type bound index within the above type parameters. + * @return a reference to the given generic class or method type parameter + * bound. + */ + public static TypeReference newTypeParameterBoundReference(int sort, + int paramIndex, int boundIndex) { + return new TypeReference((sort << 24) | (paramIndex << 16) + | (boundIndex << 8)); + } + + /** + * Returns a reference to the super class or to an interface of the + * 'implements' clause of a class. + * + * @param itfIndex + * the index of an interface in the 'implements' clause of a + * class, or -1 to reference the super class of the class. + * @return a reference to the given super type of a class. + */ + public static TypeReference newSuperTypeReference(int itfIndex) { + itfIndex &= 0xFFFF; + return new TypeReference((CLASS_EXTENDS << 24) | (itfIndex << 8)); + } + + /** + * Returns a reference to the type of a formal parameter of a method. + * + * @param paramIndex + * the formal parameter index. + * + * @return a reference to the type of the given method formal parameter. + */ + public static TypeReference newFormalParameterReference(int paramIndex) { + return new TypeReference((METHOD_FORMAL_PARAMETER << 24) + | (paramIndex << 16)); + } + + /** + * Returns a reference to the type of an exception, in a 'throws' clause of + * a method. + * + * @param exceptionIndex + * the index of an exception in a 'throws' clause of a method. + * + * @return a reference to the type of the given exception. + */ + public static TypeReference newExceptionReference(int exceptionIndex) { + return new TypeReference((THROWS << 24) | (exceptionIndex << 8)); + } + + /** + * Returns a reference to the type of the exception declared in a 'catch' + * clause of a method. + * + * @param tryCatchBlockIndex + * the index of a try catch block (using the order in which they + * are visited with visitTryCatchBlock). + * + * @return a reference to the type of the given exception. + */ + public static TypeReference newTryCatchReference(int tryCatchBlockIndex) { + return new TypeReference((EXCEPTION_PARAMETER << 24) + | (tryCatchBlockIndex << 8)); + } + + /** + * Returns a reference to the type of a type argument in a constructor or + * method call or reference. + * + * @param sort + * {@link #CAST CAST}, + * {@link #CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT + * CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT}, + * {@link #METHOD_INVOCATION_TYPE_ARGUMENT + * METHOD_INVOCATION_TYPE_ARGUMENT}, + * {@link #CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT + * CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT}, or + * {@link #METHOD_REFERENCE_TYPE_ARGUMENT + * METHOD_REFERENCE_TYPE_ARGUMENT}. + * @param argIndex + * the type argument index. + * + * @return a reference to the type of the given type argument. + */ + public static TypeReference newTypeArgumentReference(int sort, int argIndex) { + return new TypeReference((sort << 24) | argIndex); + } + + /** + * Returns the sort of this type reference. + * + * @return {@link #CLASS_TYPE_PARAMETER CLASS_TYPE_PARAMETER}, + * {@link #METHOD_TYPE_PARAMETER METHOD_TYPE_PARAMETER}, + * {@link #CLASS_EXTENDS CLASS_EXTENDS}, + * {@link #CLASS_TYPE_PARAMETER_BOUND CLASS_TYPE_PARAMETER_BOUND}, + * {@link #METHOD_TYPE_PARAMETER_BOUND METHOD_TYPE_PARAMETER_BOUND}, + * {@link #FIELD FIELD}, {@link #METHOD_RETURN METHOD_RETURN}, + * {@link #METHOD_RECEIVER METHOD_RECEIVER}, + * {@link #METHOD_FORMAL_PARAMETER METHOD_FORMAL_PARAMETER}, + * {@link #THROWS THROWS}, {@link #LOCAL_VARIABLE LOCAL_VARIABLE}, + * {@link #RESOURCE_VARIABLE RESOURCE_VARIABLE}, + * {@link #EXCEPTION_PARAMETER EXCEPTION_PARAMETER}, + * {@link #INSTANCEOF INSTANCEOF}, {@link #NEW NEW}, + * {@link #CONSTRUCTOR_REFERENCE CONSTRUCTOR_REFERENCE}, + * {@link #METHOD_REFERENCE METHOD_REFERENCE}, {@link #CAST CAST}, + * {@link #CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT + * CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT}, + * {@link #METHOD_INVOCATION_TYPE_ARGUMENT + * METHOD_INVOCATION_TYPE_ARGUMENT}, + * {@link #CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT + * CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT}, or + * {@link #METHOD_REFERENCE_TYPE_ARGUMENT + * METHOD_REFERENCE_TYPE_ARGUMENT}. + */ + public int getSort() { + return value >>> 24; + } + + /** + * Returns the index of the type parameter referenced by this type + * reference. This method must only be used for type references whose sort + * is {@link #CLASS_TYPE_PARAMETER CLASS_TYPE_PARAMETER}, + * {@link #METHOD_TYPE_PARAMETER METHOD_TYPE_PARAMETER}, + * {@link #CLASS_TYPE_PARAMETER_BOUND CLASS_TYPE_PARAMETER_BOUND} or + * {@link #METHOD_TYPE_PARAMETER_BOUND METHOD_TYPE_PARAMETER_BOUND}. + * + * @return a type parameter index. + */ + public int getTypeParameterIndex() { + return (value & 0x00FF0000) >> 16; + } + + /** + * Returns the index of the type parameter bound, within the type parameter + * {@link #getTypeParameterIndex}, referenced by this type reference. This + * method must only be used for type references whose sort is + * {@link #CLASS_TYPE_PARAMETER_BOUND CLASS_TYPE_PARAMETER_BOUND} or + * {@link #METHOD_TYPE_PARAMETER_BOUND METHOD_TYPE_PARAMETER_BOUND}. + * + * @return a type parameter bound index. + */ + public int getTypeParameterBoundIndex() { + return (value & 0x0000FF00) >> 8; + } + + /** + * Returns the index of the "super type" of a class that is referenced by + * this type reference. This method must only be used for type references + * whose sort is {@link #CLASS_EXTENDS CLASS_EXTENDS}. + * + * @return the index of an interface in the 'implements' clause of a class, + * or -1 if this type reference references the type of the super + * class. + */ + public int getSuperTypeIndex() { + return (short) ((value & 0x00FFFF00) >> 8); + } + + /** + * Returns the index of the formal parameter whose type is referenced by + * this type reference. This method must only be used for type references + * whose sort is {@link #METHOD_FORMAL_PARAMETER METHOD_FORMAL_PARAMETER}. + * + * @return a formal parameter index. + */ + public int getFormalParameterIndex() { + return (value & 0x00FF0000) >> 16; + } + + /** + * Returns the index of the exception, in a 'throws' clause of a method, + * whose type is referenced by this type reference. This method must only be + * used for type references whose sort is {@link #THROWS THROWS}. + * + * @return the index of an exception in the 'throws' clause of a method. + */ + public int getExceptionIndex() { + return (value & 0x00FFFF00) >> 8; + } + + /** + * Returns the index of the try catch block (using the order in which they + * are visited with visitTryCatchBlock), whose 'catch' type is referenced by + * this type reference. This method must only be used for type references + * whose sort is {@link #EXCEPTION_PARAMETER EXCEPTION_PARAMETER} . + * + * @return the index of an exception in the 'throws' clause of a method. + */ + public int getTryCatchBlockIndex() { + return (value & 0x00FFFF00) >> 8; + } + + /** + * Returns the index of the type argument referenced by this type reference. + * This method must only be used for type references whose sort is + * {@link #CAST CAST}, {@link #CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT + * CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT}, + * {@link #METHOD_INVOCATION_TYPE_ARGUMENT METHOD_INVOCATION_TYPE_ARGUMENT}, + * {@link #CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT + * CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT}, or + * {@link #METHOD_REFERENCE_TYPE_ARGUMENT METHOD_REFERENCE_TYPE_ARGUMENT}. + * + * @return a type parameter index. + */ + public int getTypeArgumentIndex() { + return value & 0xFF; + } + + /** + * Returns the int encoded value of this type reference, suitable for use in + * visit methods related to type annotations, like visitTypeAnnotation. + * + * @return the int encoded value of this type reference. + */ + public int getValue() { + return value; + } +} diff --git a/src/asm/scala/tools/asm/signature/SignatureVisitor.java b/src/asm/scala/tools/asm/signature/SignatureVisitor.java index f38f81f53b8b..1e16bd3f7c7b 100644 --- a/src/asm/scala/tools/asm/signature/SignatureVisitor.java +++ b/src/asm/scala/tools/asm/signature/SignatureVisitor.java @@ -73,7 +73,7 @@ public abstract class SignatureVisitor { /** * The ASM API version implemented by this visitor. The value of this field - * must be one of {@link Opcodes#ASM4}. + * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ protected final int api; @@ -82,9 +82,12 @@ public abstract class SignatureVisitor { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ public SignatureVisitor(final int api) { + if (api != Opcodes.ASM4 && api != Opcodes.ASM5) { + throw new IllegalArgumentException(); + } this.api = api; } diff --git a/src/asm/scala/tools/asm/signature/SignatureWriter.java b/src/asm/scala/tools/asm/signature/SignatureWriter.java index ebf4fe07b4ec..65756eee510b 100644 --- a/src/asm/scala/tools/asm/signature/SignatureWriter.java +++ b/src/asm/scala/tools/asm/signature/SignatureWriter.java @@ -66,7 +66,7 @@ public class SignatureWriter extends SignatureVisitor { * Constructs a new {@link SignatureWriter} object. */ public SignatureWriter() { - super(Opcodes.ASM4); + super(Opcodes.ASM5); } // ------------------------------------------------------------------------ diff --git a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java index 411eead3c794..2ce0c8b6ee2c 100644 --- a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java +++ b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java @@ -29,6 +29,7 @@ */ package scala.tools.asm.tree; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -127,6 +128,28 @@ public abstract class AbstractInsnNode { */ protected int opcode; + /** + * The runtime visible type annotations of this instruction. This field is + * only used for real instructions (i.e. not for labels, frames, or line + * number nodes). This list is a list of {@link TypeAnnotationNode} objects. + * May be null. + * + * @associates scala.tools.asm.tree.TypeAnnotationNode + * @label visible + */ + public List visibleTypeAnnotations; + + /** + * The runtime invisible type annotations of this instruction. This field is + * only used for real instructions (i.e. not for labels, frames, or line + * number nodes). This list is a list of {@link TypeAnnotationNode} objects. + * May be null. + * + * @associates scala.tools.asm.tree.TypeAnnotationNode + * @label invisible + */ + public List invisibleTypeAnnotations; + /** * Previous instruction in the list to which this instruction belongs. */ @@ -203,6 +226,29 @@ public AbstractInsnNode getNext() { */ public abstract void accept(final MethodVisitor cv); + /** + * Makes the given visitor visit the annotations of this instruction. + * + * @param mv + * a method visitor. + */ + protected final void acceptAnnotations(final MethodVisitor mv) { + int n = visibleTypeAnnotations == null ? 0 : visibleTypeAnnotations + .size(); + for (int i = 0; i < n; ++i) { + TypeAnnotationNode an = visibleTypeAnnotations.get(i); + an.accept(mv.visitInsnAnnotation(an.typeRef, an.typePath, an.desc, + true)); + } + n = invisibleTypeAnnotations == null ? 0 : invisibleTypeAnnotations + .size(); + for (int i = 0; i < n; ++i) { + TypeAnnotationNode an = invisibleTypeAnnotations.get(i); + an.accept(mv.visitInsnAnnotation(an.typeRef, an.typePath, an.desc, + false)); + } + } + /** * Returns a copy of this instruction. * @@ -245,4 +291,36 @@ static LabelNode[] clone(final List labels, } return clones; } + + /** + * Clones the annotations of the given instruction into this instruction. + * + * @param insn + * the source instruction. + * @return this instruction. + */ + protected final AbstractInsnNode cloneAnnotations( + final AbstractInsnNode insn) { + if (insn.visibleTypeAnnotations != null) { + this.visibleTypeAnnotations = new ArrayList(); + for (int i = 0; i < insn.visibleTypeAnnotations.size(); ++i) { + TypeAnnotationNode src = insn.visibleTypeAnnotations.get(i); + TypeAnnotationNode ann = new TypeAnnotationNode(src.typeRef, + src.typePath, src.desc); + src.accept(ann); + this.visibleTypeAnnotations.add(ann); + } + } + if (insn.invisibleTypeAnnotations != null) { + this.invisibleTypeAnnotations = new ArrayList(); + for (int i = 0; i < insn.invisibleTypeAnnotations.size(); ++i) { + TypeAnnotationNode src = insn.invisibleTypeAnnotations.get(i); + TypeAnnotationNode ann = new TypeAnnotationNode(src.typeRef, + src.typePath, src.desc); + src.accept(ann); + this.invisibleTypeAnnotations.add(ann); + } + } + return this; + } } diff --git a/src/asm/scala/tools/asm/tree/AnnotationNode.java b/src/asm/scala/tools/asm/tree/AnnotationNode.java index 1f4beef9f754..b8d598806678 100644 --- a/src/asm/scala/tools/asm/tree/AnnotationNode.java +++ b/src/asm/scala/tools/asm/tree/AnnotationNode.java @@ -67,9 +67,14 @@ public class AnnotationNode extends AnnotationVisitor { * * @param desc * the class descriptor of the annotation class. + * @throws IllegalStateException + * If a subclass calls this constructor. */ public AnnotationNode(final String desc) { - this(Opcodes.ASM4, desc); + this(Opcodes.ASM5, desc); + if (getClass() != AnnotationNode.class) { + throw new IllegalStateException(); + } } /** @@ -77,7 +82,7 @@ public AnnotationNode(final String desc) { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param desc * the class descriptor of the annotation class. */ @@ -93,7 +98,7 @@ public AnnotationNode(final int api, final String desc) { * where the visited values must be stored. */ AnnotationNode(final List values) { - super(Opcodes.ASM4); + super(Opcodes.ASM5); this.values = values; } @@ -166,7 +171,8 @@ public void visitEnd() { * versions of the ASM API than the given version. * * @param api - * an ASM API version. Must be one of {@link Opcodes#ASM4}. + * an ASM API version. Must be one of {@link Opcodes#ASM4} or + * {@link Opcodes#ASM5}. */ public void check(final int api) { // nothing to do diff --git a/src/asm/scala/tools/asm/tree/ClassNode.java b/src/asm/scala/tools/asm/tree/ClassNode.java index c3d999985a25..304b4ec9f5a0 100644 --- a/src/asm/scala/tools/asm/tree/ClassNode.java +++ b/src/asm/scala/tools/asm/tree/ClassNode.java @@ -39,6 +39,7 @@ import scala.tools.asm.FieldVisitor; import scala.tools.asm.MethodVisitor; import scala.tools.asm.Opcodes; +import scala.tools.asm.TypePath; /** * A node that represents a class. @@ -132,6 +133,24 @@ public class ClassNode extends ClassVisitor { */ public List invisibleAnnotations; + /** + * The runtime visible type annotations of this class. This list is a list + * of {@link TypeAnnotationNode} objects. May be null. + * + * @associates scala.tools.asm.tree.TypeAnnotationNode + * @label visible + */ + public List visibleTypeAnnotations; + + /** + * The runtime invisible type annotations of this class. This list is a list + * of {@link TypeAnnotationNode} objects. May be null. + * + * @associates scala.tools.asm.tree.TypeAnnotationNode + * @label invisible + */ + public List invisibleTypeAnnotations; + /** * The non standard attributes of this class. This list is a list of * {@link Attribute} objects. May be null. @@ -168,9 +187,15 @@ public class ClassNode extends ClassVisitor { * Constructs a new {@link ClassNode}. Subclasses must not use this * constructor. Instead, they must use the {@link #ClassNode(int)} * version. + * + * @throws IllegalStateException + * If a subclass calls this constructor. */ public ClassNode() { - this(Opcodes.ASM4); + this(Opcodes.ASM5); + if (getClass() != ClassNode.class) { + throw new IllegalStateException(); + } } /** @@ -178,7 +203,7 @@ public ClassNode() { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ public ClassNode(final int api) { super(api); @@ -238,6 +263,24 @@ public AnnotationVisitor visitAnnotation(final String desc, return an; } + @Override + public AnnotationVisitor visitTypeAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + TypeAnnotationNode an = new TypeAnnotationNode(typeRef, typePath, desc); + if (visible) { + if (visibleTypeAnnotations == null) { + visibleTypeAnnotations = new ArrayList(1); + } + visibleTypeAnnotations.add(an); + } else { + if (invisibleTypeAnnotations == null) { + invisibleTypeAnnotations = new ArrayList(1); + } + invisibleTypeAnnotations.add(an); + } + return an; + } + @Override public void visitAttribute(final Attribute attr) { if (attrs == null) { @@ -286,10 +329,26 @@ public void visitEnd() { * API than the given version. * * @param api - * an ASM API version. Must be one of {@link Opcodes#ASM4}. + * an ASM API version. Must be one of {@link Opcodes#ASM4} or + * {@link Opcodes#ASM5}. */ public void check(final int api) { - // nothing to do + if (api == Opcodes.ASM4) { + if (visibleTypeAnnotations != null + && visibleTypeAnnotations.size() > 0) { + throw new RuntimeException(); + } + if (invisibleTypeAnnotations != null + && invisibleTypeAnnotations.size() > 0) { + throw new RuntimeException(); + } + for (FieldNode f : fields) { + f.check(api); + } + for (MethodNode m : methods) { + m.check(api); + } + } } /** @@ -323,6 +382,19 @@ public void accept(final ClassVisitor cv) { AnnotationNode an = invisibleAnnotations.get(i); an.accept(cv.visitAnnotation(an.desc, false)); } + n = visibleTypeAnnotations == null ? 0 : visibleTypeAnnotations.size(); + for (i = 0; i < n; ++i) { + TypeAnnotationNode an = visibleTypeAnnotations.get(i); + an.accept(cv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc, + true)); + } + n = invisibleTypeAnnotations == null ? 0 : invisibleTypeAnnotations + .size(); + for (i = 0; i < n; ++i) { + TypeAnnotationNode an = invisibleTypeAnnotations.get(i); + an.accept(cv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc, + false)); + } n = attrs == null ? 0 : attrs.size(); for (i = 0; i < n; ++i) { cv.visitAttribute(attrs.get(i)); diff --git a/src/asm/scala/tools/asm/tree/FieldInsnNode.java b/src/asm/scala/tools/asm/tree/FieldInsnNode.java index 0c94f18adff3..c027de109bbe 100644 --- a/src/asm/scala/tools/asm/tree/FieldInsnNode.java +++ b/src/asm/scala/tools/asm/tree/FieldInsnNode.java @@ -97,12 +97,14 @@ public int getType() { } @Override - public void accept(final MethodVisitor cv) { - cv.visitFieldInsn(opcode, owner, name, desc); + public void accept(final MethodVisitor mv) { + mv.visitFieldInsn(opcode, owner, name, desc); + acceptAnnotations(mv); } @Override public AbstractInsnNode clone(final Map labels) { - return new FieldInsnNode(opcode, owner, name, desc); + return new FieldInsnNode(opcode, owner, name, desc) + .cloneAnnotations(this); } } diff --git a/src/asm/scala/tools/asm/tree/FieldNode.java b/src/asm/scala/tools/asm/tree/FieldNode.java index 61b614ec597d..3fb14dac4f16 100644 --- a/src/asm/scala/tools/asm/tree/FieldNode.java +++ b/src/asm/scala/tools/asm/tree/FieldNode.java @@ -37,6 +37,7 @@ import scala.tools.asm.ClassVisitor; import scala.tools.asm.FieldVisitor; import scala.tools.asm.Opcodes; +import scala.tools.asm.TypePath; /** * A node that represents a field. @@ -91,6 +92,24 @@ public class FieldNode extends FieldVisitor { */ public List invisibleAnnotations; + /** + * The runtime visible type annotations of this field. This list is a list + * of {@link TypeAnnotationNode} objects. May be null. + * + * @associates scala.tools.asm.tree.TypeAnnotationNode + * @label visible + */ + public List visibleTypeAnnotations; + + /** + * The runtime invisible type annotations of this field. This list is a list + * of {@link TypeAnnotationNode} objects. May be null. + * + * @associates scala.tools.asm.tree.TypeAnnotationNode + * @label invisible + */ + public List invisibleTypeAnnotations; + /** * The non standard attributes of this field. This list is a list of * {@link Attribute} objects. May be null. @@ -120,20 +139,24 @@ public class FieldNode extends FieldVisitor { * null if the field does not have an initial value, * must be an {@link Integer}, a {@link Float}, a {@link Long}, a * {@link Double} or a {@link String}. + * @throws IllegalStateException + * If a subclass calls this constructor. */ public FieldNode(final int access, final String name, final String desc, final String signature, final Object value) { - this(Opcodes.ASM4, access, name, desc, signature, value); + this(Opcodes.ASM5, access, name, desc, signature, value); + if (getClass() != FieldNode.class) { + throw new IllegalStateException(); + } } /** * Constructs a new {@link FieldNode}. Subclasses must not use this - * constructor. Instead, they must use the - * {@link #FieldNode(int, int, String, String, String, Object)} version. + * constructor. * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param access * the field's access flags (see * {@link scala.tools.asm.Opcodes}). This parameter also @@ -183,6 +206,24 @@ public AnnotationVisitor visitAnnotation(final String desc, return an; } + @Override + public AnnotationVisitor visitTypeAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + TypeAnnotationNode an = new TypeAnnotationNode(typeRef, typePath, desc); + if (visible) { + if (visibleTypeAnnotations == null) { + visibleTypeAnnotations = new ArrayList(1); + } + visibleTypeAnnotations.add(an); + } else { + if (invisibleTypeAnnotations == null) { + invisibleTypeAnnotations = new ArrayList(1); + } + invisibleTypeAnnotations.add(an); + } + return an; + } + @Override public void visitAttribute(final Attribute attr) { if (attrs == null) { @@ -206,10 +247,20 @@ public void visitEnd() { * API than the given version. * * @param api - * an ASM API version. Must be one of {@link Opcodes#ASM4}. + * an ASM API version. Must be one of {@link Opcodes#ASM4} or + * {@link Opcodes#ASM5}. */ public void check(final int api) { - // nothing to do + if (api == Opcodes.ASM4) { + if (visibleTypeAnnotations != null + && visibleTypeAnnotations.size() > 0) { + throw new RuntimeException(); + } + if (invisibleTypeAnnotations != null + && invisibleTypeAnnotations.size() > 0) { + throw new RuntimeException(); + } + } } /** @@ -234,6 +285,19 @@ public void accept(final ClassVisitor cv) { AnnotationNode an = invisibleAnnotations.get(i); an.accept(fv.visitAnnotation(an.desc, false)); } + n = visibleTypeAnnotations == null ? 0 : visibleTypeAnnotations.size(); + for (i = 0; i < n; ++i) { + TypeAnnotationNode an = visibleTypeAnnotations.get(i); + an.accept(fv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc, + true)); + } + n = invisibleTypeAnnotations == null ? 0 : invisibleTypeAnnotations + .size(); + for (i = 0; i < n; ++i) { + TypeAnnotationNode an = invisibleTypeAnnotations.get(i); + an.accept(fv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc, + false)); + } n = attrs == null ? 0 : attrs.size(); for (i = 0; i < n; ++i) { fv.visitAttribute(attrs.get(i)); diff --git a/src/asm/scala/tools/asm/tree/IincInsnNode.java b/src/asm/scala/tools/asm/tree/IincInsnNode.java index f9adf2e38c1d..c37ac91c2741 100644 --- a/src/asm/scala/tools/asm/tree/IincInsnNode.java +++ b/src/asm/scala/tools/asm/tree/IincInsnNode.java @@ -73,10 +73,11 @@ public int getType() { @Override public void accept(final MethodVisitor mv) { mv.visitIincInsn(var, incr); + acceptAnnotations(mv); } @Override public AbstractInsnNode clone(final Map labels) { - return new IincInsnNode(var, incr); + return new IincInsnNode(var, incr).cloneAnnotations(this); } } diff --git a/src/asm/scala/tools/asm/tree/InsnList.java b/src/asm/scala/tools/asm/tree/InsnList.java index b1e2d97c6f81..e808712e7835 100644 --- a/src/asm/scala/tools/asm/tree/InsnList.java +++ b/src/asm/scala/tools/asm/tree/InsnList.java @@ -100,7 +100,7 @@ public AbstractInsnNode getLast() { * the index of the instruction that must be returned. * @return the instruction whose index is given. * @throws IndexOutOfBoundsException - * if (index < 0 || index >= size()). + * if (index < 0 || index >= size()). */ public AbstractInsnNode get(final int index) { if (index < 0 || index >= size) { @@ -535,6 +535,8 @@ private final class InsnListIterator implements ListIterator { AbstractInsnNode prev; + AbstractInsnNode remove; + InsnListIterator(int index) { if (index == size()) { next = null; @@ -556,12 +558,22 @@ public Object next() { AbstractInsnNode result = next; prev = result; next = result.next; + remove = result; return result; } public void remove() { - InsnList.this.remove(prev); - prev = prev.prev; + if (remove != null) { + if (remove == next) { + next = next.next; + } else { + prev = prev.prev; + } + InsnList.this.remove(remove); + remove = null; + } else { + throw new IllegalStateException(); + } } public boolean hasPrevious() { @@ -572,6 +584,7 @@ public Object previous() { AbstractInsnNode result = prev; next = result; prev = result.prev; + remove = result; return result; } @@ -598,6 +611,7 @@ public int previousIndex() { public void add(Object o) { InsnList.this.insertBefore(next, (AbstractInsnNode) o); prev = (AbstractInsnNode) o; + remove = null; } public void set(Object o) { diff --git a/src/asm/scala/tools/asm/tree/InsnNode.java b/src/asm/scala/tools/asm/tree/InsnNode.java index 4d5288cafa6d..f5313929ee06 100644 --- a/src/asm/scala/tools/asm/tree/InsnNode.java +++ b/src/asm/scala/tools/asm/tree/InsnNode.java @@ -78,10 +78,11 @@ public int getType() { @Override public void accept(final MethodVisitor mv) { mv.visitInsn(opcode); + acceptAnnotations(mv); } @Override public AbstractInsnNode clone(final Map labels) { - return new InsnNode(opcode); + return new InsnNode(opcode).cloneAnnotations(this); } } diff --git a/src/asm/scala/tools/asm/tree/IntInsnNode.java b/src/asm/scala/tools/asm/tree/IntInsnNode.java index e0aeed4bc83b..6bbe8d845cfa 100644 --- a/src/asm/scala/tools/asm/tree/IntInsnNode.java +++ b/src/asm/scala/tools/asm/tree/IntInsnNode.java @@ -78,10 +78,11 @@ public int getType() { @Override public void accept(final MethodVisitor mv) { mv.visitIntInsn(opcode, operand); + acceptAnnotations(mv); } @Override public AbstractInsnNode clone(final Map labels) { - return new IntInsnNode(opcode, operand); + return new IntInsnNode(opcode, operand).cloneAnnotations(this); } } diff --git a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java index 7ee84b875b08..0f85e6029152 100644 --- a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java +++ b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java @@ -91,10 +91,12 @@ public int getType() { @Override public void accept(final MethodVisitor mv) { mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs); + acceptAnnotations(mv); } @Override public AbstractInsnNode clone(final Map labels) { - return new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs); + return new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs) + .cloneAnnotations(this); } } diff --git a/src/asm/scala/tools/asm/tree/JumpInsnNode.java b/src/asm/scala/tools/asm/tree/JumpInsnNode.java index 81e1e09deb7f..8b8a769204a6 100644 --- a/src/asm/scala/tools/asm/tree/JumpInsnNode.java +++ b/src/asm/scala/tools/asm/tree/JumpInsnNode.java @@ -86,10 +86,12 @@ public int getType() { @Override public void accept(final MethodVisitor mv) { mv.visitJumpInsn(opcode, label.getLabel()); + acceptAnnotations(mv); } @Override public AbstractInsnNode clone(final Map labels) { - return new JumpInsnNode(opcode, clone(label, labels)); + return new JumpInsnNode(opcode, clone(label, labels)) + .cloneAnnotations(this); } } diff --git a/src/asm/scala/tools/asm/tree/LdcInsnNode.java b/src/asm/scala/tools/asm/tree/LdcInsnNode.java index 4e328f9b3934..1cc850bb31c5 100644 --- a/src/asm/scala/tools/asm/tree/LdcInsnNode.java +++ b/src/asm/scala/tools/asm/tree/LdcInsnNode.java @@ -69,10 +69,11 @@ public int getType() { @Override public void accept(final MethodVisitor mv) { mv.visitLdcInsn(cst); + acceptAnnotations(mv); } @Override public AbstractInsnNode clone(final Map labels) { - return new LdcInsnNode(cst); + return new LdcInsnNode(cst).cloneAnnotations(this); } } diff --git a/src/asm/scala/tools/asm/tree/LocalVariableAnnotationNode.java b/src/asm/scala/tools/asm/tree/LocalVariableAnnotationNode.java new file mode 100644 index 000000000000..d05b8081710d --- /dev/null +++ b/src/asm/scala/tools/asm/tree/LocalVariableAnnotationNode.java @@ -0,0 +1,157 @@ +/*** + * ASM: a very small and fast Java bytecode manipulation framework + * Copyright (c) 2000-2011 INRIA, France Telecom + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +package scala.tools.asm.tree; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import scala.tools.asm.Label; +import scala.tools.asm.MethodVisitor; +import scala.tools.asm.Opcodes; +import scala.tools.asm.TypePath; +import scala.tools.asm.TypeReference; + +/** + * A node that represents a type annotation on a local or resource variable. + * + * @author Eric Bruneton + */ +public class LocalVariableAnnotationNode extends TypeAnnotationNode { + + /** + * The fist instructions corresponding to the continuous ranges that make + * the scope of this local variable (inclusive). Must not be null. + */ + public List start; + + /** + * The last instructions corresponding to the continuous ranges that make + * the scope of this local variable (exclusive). This list must have the + * same size as the 'start' list. Must not be null. + */ + public List end; + + /** + * The local variable's index in each range. This list must have the same + * size as the 'start' list. Must not be null. + */ + public List index; + + /** + * Constructs a new {@link LocalVariableAnnotationNode}. Subclasses must + * not use this constructor. Instead, they must use the + * {@link #LocalVariableAnnotationNode(int, TypePath, LabelNode[], LabelNode[], int[], String)} + * version. + * + * @param typeRef + * a reference to the annotated type. See {@link TypeReference}. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + * @param start + * the fist instructions corresponding to the continuous ranges + * that make the scope of this local variable (inclusive). + * @param end + * the last instructions corresponding to the continuous ranges + * that make the scope of this local variable (exclusive). This + * array must have the same size as the 'start' array. + * @param index + * the local variable's index in each range. This array must have + * the same size as the 'start' array. + * @param desc + * the class descriptor of the annotation class. + */ + public LocalVariableAnnotationNode(int typeRef, TypePath typePath, + LabelNode[] start, LabelNode[] end, int[] index, String desc) { + this(Opcodes.ASM5, typeRef, typePath, start, end, index, desc); + } + + /** + * Constructs a new {@link LocalVariableAnnotationNode}. + * + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. + * @param typeRef + * a reference to the annotated type. See {@link TypeReference}. + * @param start + * the fist instructions corresponding to the continuous ranges + * that make the scope of this local variable (inclusive). + * @param end + * the last instructions corresponding to the continuous ranges + * that make the scope of this local variable (exclusive). This + * array must have the same size as the 'start' array. + * @param index + * the local variable's index in each range. This array must have + * the same size as the 'start' array. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + * @param desc + * the class descriptor of the annotation class. + */ + public LocalVariableAnnotationNode(int api, int typeRef, TypePath typePath, + LabelNode[] start, LabelNode[] end, int[] index, String desc) { + super(api, typeRef, typePath, desc); + this.start = new ArrayList(start.length); + this.start.addAll(Arrays.asList(start)); + this.end = new ArrayList(end.length); + this.end.addAll(Arrays.asList(end)); + this.index = new ArrayList(index.length); + for (int i : index) { + this.index.add(i); + } + } + + /** + * Makes the given visitor visit this type annotation. + * + * @param mv + * the visitor that must visit this annotation. + * @param visible + * true if the annotation is visible at runtime. + */ + public void accept(final MethodVisitor mv, boolean visible) { + Label[] start = new Label[this.start.size()]; + Label[] end = new Label[this.end.size()]; + int[] index = new int[this.index.size()]; + for (int i = 0; i < start.length; ++i) { + start[i] = this.start.get(i).getLabel(); + end[i] = this.end.get(i).getLabel(); + index[i] = this.index.get(i); + } + accept(mv.visitLocalVariableAnnotation(typeRef, typePath, start, end, + index, desc, true)); + } +} diff --git a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java index d2479b481422..7db2f53ff43e 100644 --- a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java +++ b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java @@ -105,6 +105,7 @@ public void accept(final MethodVisitor mv) { labels[i] = this.labels.get(i).getLabel(); } mv.visitLookupSwitchInsn(dflt.getLabel(), keys, labels); + acceptAnnotations(mv); } @Override @@ -112,6 +113,6 @@ public AbstractInsnNode clone(final Map labels) { LookupSwitchInsnNode clone = new LookupSwitchInsnNode(clone(dflt, labels), null, clone(this.labels, labels)); clone.keys.addAll(keys); - return clone; + return clone.cloneAnnotations(this); } } diff --git a/src/asm/scala/tools/asm/tree/MethodInsnNode.java b/src/asm/scala/tools/asm/tree/MethodInsnNode.java index bf09f556d8f6..1ec46d473d64 100644 --- a/src/asm/scala/tools/asm/tree/MethodInsnNode.java +++ b/src/asm/scala/tools/asm/tree/MethodInsnNode.java @@ -32,6 +32,7 @@ import java.util.Map; import scala.tools.asm.MethodVisitor; +import scala.tools.asm.Opcodes; /** * A node that represents a method instruction. A method instruction is an @@ -57,6 +58,11 @@ public class MethodInsnNode extends AbstractInsnNode { */ public String desc; + /** + * If the method's owner class if an interface. + */ + public boolean itf; + /** * Constructs a new {@link MethodInsnNode}. * @@ -73,12 +79,37 @@ public class MethodInsnNode extends AbstractInsnNode { * @param desc * the method's descriptor (see {@link scala.tools.asm.Type}). */ + @Deprecated public MethodInsnNode(final int opcode, final String owner, final String name, final String desc) { + this(opcode, owner, name, desc, opcode == Opcodes.INVOKEINTERFACE); + } + + /** + * Constructs a new {@link MethodInsnNode}. + * + * @param opcode + * the opcode of the type instruction to be constructed. This + * opcode must be INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or + * INVOKEINTERFACE. + * @param owner + * the internal name of the method's owner class (see + * {@link scala.tools.asm.Type#getInternalName() + * getInternalName}). + * @param name + * the method's name. + * @param desc + * the method's descriptor (see {@link scala.tools.asm.Type}). + * @param itf + * if the method's owner class is an interface. + */ + public MethodInsnNode(final int opcode, final String owner, + final String name, final String desc, final boolean itf) { super(opcode); this.owner = owner; this.name = name; this.desc = desc; + this.itf = itf; } /** @@ -99,11 +130,11 @@ public int getType() { @Override public void accept(final MethodVisitor mv) { - mv.visitMethodInsn(opcode, owner, name, desc); + mv.visitMethodInsn(opcode, owner, name, desc, itf); } @Override public AbstractInsnNode clone(final Map labels) { - return new MethodInsnNode(opcode, owner, name, desc); + return new MethodInsnNode(opcode, owner, name, desc, itf); } } diff --git a/src/asm/scala/tools/asm/tree/MethodNode.java b/src/asm/scala/tools/asm/tree/MethodNode.java index a161600edbc6..3dec50e02cab 100644 --- a/src/asm/scala/tools/asm/tree/MethodNode.java +++ b/src/asm/scala/tools/asm/tree/MethodNode.java @@ -41,6 +41,7 @@ import scala.tools.asm.MethodVisitor; import scala.tools.asm.Opcodes; import scala.tools.asm.Type; +import scala.tools.asm.TypePath; /** * A node that represents a method. @@ -77,6 +78,11 @@ public class MethodNode extends MethodVisitor { */ public List exceptions; + /** + * The method parameter info (access flags and name) + */ + public List parameters; + /** * The runtime visible annotations of this method. This list is a list of * {@link AnnotationNode} objects. May be null. @@ -95,6 +101,24 @@ public class MethodNode extends MethodVisitor { */ public List invisibleAnnotations; + /** + * The runtime visible type annotations of this method. This list is a list + * of {@link TypeAnnotationNode} objects. May be null. + * + * @associates scala.tools.asm.tree.TypeAnnotationNode + * @label visible + */ + public List visibleTypeAnnotations; + + /** + * The runtime invisible type annotations of this method. This list is a + * list of {@link TypeAnnotationNode} objects. May be null. + * + * @associates scala.tools.asm.tree.TypeAnnotationNode + * @label invisible + */ + public List invisibleTypeAnnotations; + /** * The non standard attributes of this method. This list is a list of * {@link Attribute} objects. May be null. @@ -166,6 +190,22 @@ public class MethodNode extends MethodVisitor { */ public List localVariables; + /** + * The visible local variable annotations of this method. This list is a + * list of {@link LocalVariableAnnotationNode} objects. May be null + * + * @associates scala.tools.asm.tree.LocalVariableAnnotationNode + */ + public List visibleLocalVariableAnnotations; + + /** + * The invisible local variable annotations of this method. This list is a + * list of {@link LocalVariableAnnotationNode} objects. May be null + * + * @associates scala.tools.asm.tree.LocalVariableAnnotationNode + */ + public List invisibleLocalVariableAnnotations; + /** * If the accept method has been called on this object. */ @@ -175,9 +215,15 @@ public class MethodNode extends MethodVisitor { * Constructs an uninitialized {@link MethodNode}. Subclasses must not * use this constructor. Instead, they must use the * {@link #MethodNode(int)} version. + * + * @throws IllegalStateException + * If a subclass calls this constructor. */ public MethodNode() { - this(Opcodes.ASM4); + this(Opcodes.ASM5); + if (getClass() != MethodNode.class) { + throw new IllegalStateException(); + } } /** @@ -185,7 +231,7 @@ public MethodNode() { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ public MethodNode(final int api) { super(api); @@ -211,10 +257,15 @@ public MethodNode(final int api) { * the internal names of the method's exception classes (see * {@link Type#getInternalName() getInternalName}). May be * null. + * @throws IllegalStateException + * If a subclass calls this constructor. */ public MethodNode(final int access, final String name, final String desc, final String signature, final String[] exceptions) { - this(Opcodes.ASM4, access, name, desc, signature, exceptions); + this(Opcodes.ASM5, access, name, desc, signature, exceptions); + if (getClass() != MethodNode.class) { + throw new IllegalStateException(); + } } /** @@ -222,7 +273,7 @@ public MethodNode(final int access, final String name, final String desc, * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param access * the method's access flags (see {@link Opcodes}). This * parameter also indicates if the method is synthetic and/or @@ -263,6 +314,15 @@ public MethodNode(final int api, final int access, final String name, // ------------------------------------------------------------------------ @Override + public void visitParameter(String name, int access) { + if (parameters == null) { + parameters = new ArrayList(5); + } + parameters.add(new ParameterNode(name, access)); + } + + @Override + @SuppressWarnings("serial") public AnnotationVisitor visitAnnotationDefault() { return new AnnotationNode(new ArrayList(0) { @Override @@ -291,6 +351,24 @@ public AnnotationVisitor visitAnnotation(final String desc, return an; } + @Override + public AnnotationVisitor visitTypeAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + TypeAnnotationNode an = new TypeAnnotationNode(typeRef, typePath, desc); + if (visible) { + if (visibleTypeAnnotations == null) { + visibleTypeAnnotations = new ArrayList(1); + } + visibleTypeAnnotations.add(an); + } else { + if (invisibleTypeAnnotations == null) { + invisibleTypeAnnotations = new ArrayList(1); + } + invisibleTypeAnnotations.add(an); + } + return an; + } + @Override public AnnotationVisitor visitParameterAnnotation(final int parameter, final String desc, final boolean visible) { @@ -365,12 +443,27 @@ public void visitFieldInsn(final int opcode, final String owner, instructions.add(new FieldInsnNode(opcode, owner, name, desc)); } + @Deprecated @Override - public void visitMethodInsn(final int opcode, final String owner, - final String name, final String desc) { + public void visitMethodInsn(int opcode, String owner, String name, + String desc) { + if (api >= Opcodes.ASM5) { + super.visitMethodInsn(opcode, owner, name, desc); + return; + } instructions.add(new MethodInsnNode(opcode, owner, name, desc)); } + @Override + public void visitMethodInsn(int opcode, String owner, String name, + String desc, boolean itf) { + if (api < Opcodes.ASM5) { + super.visitMethodInsn(opcode, owner, name, desc, itf); + return; + } + instructions.add(new MethodInsnNode(opcode, owner, name, desc, itf)); + } + @Override public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) { @@ -416,6 +509,33 @@ public void visitMultiANewArrayInsn(final String desc, final int dims) { instructions.add(new MultiANewArrayInsnNode(desc, dims)); } + @Override + public AnnotationVisitor visitInsnAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + // Finds the last real instruction, i.e. the instruction targeted by + // this annotation. + AbstractInsnNode insn = instructions.getLast(); + while (insn.getOpcode() == -1) { + insn = insn.getPrevious(); + } + // Adds the annotation to this instruction. + TypeAnnotationNode an = new TypeAnnotationNode(typeRef, typePath, desc); + if (visible) { + if (insn.visibleTypeAnnotations == null) { + insn.visibleTypeAnnotations = new ArrayList( + 1); + } + insn.visibleTypeAnnotations.add(an); + } else { + if (insn.invisibleTypeAnnotations == null) { + insn.invisibleTypeAnnotations = new ArrayList( + 1); + } + insn.invisibleTypeAnnotations.add(an); + } + return an; + } + @Override public void visitTryCatchBlock(final Label start, final Label end, final Label handler, final String type) { @@ -423,6 +543,27 @@ public void visitTryCatchBlock(final Label start, final Label end, getLabelNode(end), getLabelNode(handler), type)); } + @Override + public AnnotationVisitor visitTryCatchAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + TryCatchBlockNode tcb = tryCatchBlocks.get((typeRef & 0x00FFFF00) >> 8); + TypeAnnotationNode an = new TypeAnnotationNode(typeRef, typePath, desc); + if (visible) { + if (tcb.visibleTypeAnnotations == null) { + tcb.visibleTypeAnnotations = new ArrayList( + 1); + } + tcb.visibleTypeAnnotations.add(an); + } else { + if (tcb.invisibleTypeAnnotations == null) { + tcb.invisibleTypeAnnotations = new ArrayList( + 1); + } + tcb.invisibleTypeAnnotations.add(an); + } + return an; + } + @Override public void visitLocalVariable(final String name, final String desc, final String signature, final Label start, final Label end, @@ -431,6 +572,29 @@ public void visitLocalVariable(final String name, final String desc, getLabelNode(start), getLabelNode(end), index)); } + @Override + public AnnotationVisitor visitLocalVariableAnnotation(int typeRef, + TypePath typePath, Label[] start, Label[] end, int[] index, + String desc, boolean visible) { + LocalVariableAnnotationNode an = new LocalVariableAnnotationNode( + typeRef, typePath, getLabelNodes(start), getLabelNodes(end), + index, desc); + if (visible) { + if (visibleLocalVariableAnnotations == null) { + visibleLocalVariableAnnotations = new ArrayList( + 1); + } + visibleLocalVariableAnnotations.add(an); + } else { + if (invisibleLocalVariableAnnotations == null) { + invisibleLocalVariableAnnotations = new ArrayList( + 1); + } + invisibleLocalVariableAnnotations.add(an); + } + return an; + } + @Override public void visitLineNumber(final int line, final Label start) { instructions.add(new LineNumberNode(line, getLabelNode(start))); @@ -494,10 +658,57 @@ private Object[] getLabelNodes(final Object[] objs) { * versions of the ASM API than the given version. * * @param api - * an ASM API version. Must be one of {@link Opcodes#ASM4}. + * an ASM API version. Must be one of {@link Opcodes#ASM4} or + * {@link Opcodes#ASM5}. */ public void check(final int api) { - // nothing to do + if (api == Opcodes.ASM4) { + if (visibleTypeAnnotations != null + && visibleTypeAnnotations.size() > 0) { + throw new RuntimeException(); + } + if (invisibleTypeAnnotations != null + && invisibleTypeAnnotations.size() > 0) { + throw new RuntimeException(); + } + int n = tryCatchBlocks == null ? 0 : tryCatchBlocks.size(); + for (int i = 0; i < n; ++i) { + TryCatchBlockNode tcb = tryCatchBlocks.get(i); + if (tcb.visibleTypeAnnotations != null + && tcb.visibleTypeAnnotations.size() > 0) { + throw new RuntimeException(); + } + if (tcb.invisibleTypeAnnotations != null + && tcb.invisibleTypeAnnotations.size() > 0) { + throw new RuntimeException(); + } + } + for (int i = 0; i < instructions.size(); ++i) { + AbstractInsnNode insn = instructions.get(i); + if (insn.visibleTypeAnnotations != null + && insn.visibleTypeAnnotations.size() > 0) { + throw new RuntimeException(); + } + if (insn.invisibleTypeAnnotations != null + && insn.invisibleTypeAnnotations.size() > 0) { + throw new RuntimeException(); + } + if (insn instanceof MethodInsnNode) { + boolean itf = ((MethodInsnNode) insn).itf; + if (itf != (insn.opcode == Opcodes.INVOKEINTERFACE)) { + throw new RuntimeException(); + } + } + } + if (visibleLocalVariableAnnotations != null + && visibleLocalVariableAnnotations.size() > 0) { + throw new RuntimeException(); + } + if (invisibleLocalVariableAnnotations != null + && invisibleLocalVariableAnnotations.size() > 0) { + throw new RuntimeException(); + } + } } /** @@ -523,8 +734,14 @@ public void accept(final ClassVisitor cv) { * a method visitor. */ public void accept(final MethodVisitor mv) { - // visits the method attributes + // visits the method parameters int i, j, n; + n = parameters == null ? 0 : parameters.size(); + for (i = 0; i < n; i++) { + ParameterNode parameter = parameters.get(i); + mv.visitParameter(parameter.name, parameter.access); + } + // visits the method attributes if (annotationDefault != null) { AnnotationVisitor av = mv.visitAnnotationDefault(); AnnotationNode.accept(av, null, annotationDefault); @@ -542,6 +759,19 @@ public void accept(final MethodVisitor mv) { AnnotationNode an = invisibleAnnotations.get(i); an.accept(mv.visitAnnotation(an.desc, false)); } + n = visibleTypeAnnotations == null ? 0 : visibleTypeAnnotations.size(); + for (i = 0; i < n; ++i) { + TypeAnnotationNode an = visibleTypeAnnotations.get(i); + an.accept(mv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc, + true)); + } + n = invisibleTypeAnnotations == null ? 0 : invisibleTypeAnnotations + .size(); + for (i = 0; i < n; ++i) { + TypeAnnotationNode an = invisibleTypeAnnotations.get(i); + an.accept(mv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc, + false)); + } n = visibleParameterAnnotations == null ? 0 : visibleParameterAnnotations.length; for (i = 0; i < n; ++i) { @@ -579,6 +809,7 @@ public void accept(final MethodVisitor mv) { // visits try catch blocks n = tryCatchBlocks == null ? 0 : tryCatchBlocks.size(); for (i = 0; i < n; ++i) { + tryCatchBlocks.get(i).updateIndex(i); tryCatchBlocks.get(i).accept(mv); } // visits instructions @@ -588,6 +819,17 @@ public void accept(final MethodVisitor mv) { for (i = 0; i < n; ++i) { localVariables.get(i).accept(mv); } + // visits local variable annotations + n = visibleLocalVariableAnnotations == null ? 0 + : visibleLocalVariableAnnotations.size(); + for (i = 0; i < n; ++i) { + visibleLocalVariableAnnotations.get(i).accept(mv, true); + } + n = invisibleLocalVariableAnnotations == null ? 0 + : invisibleLocalVariableAnnotations.size(); + for (i = 0; i < n; ++i) { + invisibleLocalVariableAnnotations.get(i).accept(mv, false); + } // visits maxs mv.visitMaxs(maxStack, maxLocals); visited = true; diff --git a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java index fe5e8832b300..a8339a20b50d 100644 --- a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java +++ b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java @@ -73,11 +73,12 @@ public int getType() { @Override public void accept(final MethodVisitor mv) { mv.visitMultiANewArrayInsn(desc, dims); + acceptAnnotations(mv); } @Override public AbstractInsnNode clone(final Map labels) { - return new MultiANewArrayInsnNode(desc, dims); + return new MultiANewArrayInsnNode(desc, dims).cloneAnnotations(this); } } diff --git a/src/asm/scala/tools/asm/tree/ParameterNode.java b/src/asm/scala/tools/asm/tree/ParameterNode.java new file mode 100644 index 000000000000..a3e55d562953 --- /dev/null +++ b/src/asm/scala/tools/asm/tree/ParameterNode.java @@ -0,0 +1,76 @@ +/*** + * ASM: a very small and fast Java bytecode manipulation framework + * Copyright (c) 2000-2011 INRIA, France Telecom + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ +package scala.tools.asm.tree; + +import scala.tools.asm.MethodVisitor; + +/** + * A node that represents a parameter access and name. + * + * @author Remi Forax + */ +public class ParameterNode { + /** + * The parameter's name. + */ + public String name; + + /** + * The parameter's access flags (see {@link scala.tools.asm.Opcodes}). + * Valid values are ACC_FINAL, ACC_SYNTHETIC and + * ACC_MANDATED. + */ + public int access; + + /** + * Constructs a new {@link ParameterNode}. + * + * @param access + * The parameter's access flags. Valid values are + * ACC_FINAL, ACC_SYNTHETIC or/and + * ACC_MANDATED (see {@link scala.tools.asm.Opcodes}). + * @param name + * the parameter's name. + */ + public ParameterNode(final String name, final int access) { + this.name = name; + this.access = access; + } + + /** + * Makes the given visitor visit this parameter declaration. + * + * @param mv + * a method visitor. + */ + public void accept(final MethodVisitor mv) { + mv.visitParameter(name, access); + } +} diff --git a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java index 9b3c2a3437cc..fb17b9e2e9a4 100644 --- a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java +++ b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java @@ -103,11 +103,12 @@ public void accept(final MethodVisitor mv) { labels[i] = this.labels.get(i).getLabel(); } mv.visitTableSwitchInsn(min, max, dflt.getLabel(), labels); + acceptAnnotations(mv); } @Override public AbstractInsnNode clone(final Map labels) { return new TableSwitchInsnNode(min, max, clone(dflt, labels), clone( - this.labels, labels)); + this.labels, labels)).cloneAnnotations(this); } } diff --git a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java index ab4fa97c3489..c639b9aa8b5c 100644 --- a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java +++ b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java @@ -29,6 +29,8 @@ */ package scala.tools.asm.tree; +import java.util.List; + import scala.tools.asm.MethodVisitor; /** @@ -59,6 +61,26 @@ public class TryCatchBlockNode { */ public String type; + /** + * The runtime visible type annotations on the exception handler type. This + * list is a list of {@link TypeAnnotationNode} objects. May be + * null. + * + * @associates scala.tools.asm.tree.TypeAnnotationNode + * @label visible + */ + public List visibleTypeAnnotations; + + /** + * The runtime invisible type annotations on the exception handler type. + * This list is a list of {@link TypeAnnotationNode} objects. May be + * null. + * + * @associates scala.tools.asm.tree.TypeAnnotationNode + * @label invisible + */ + public List invisibleTypeAnnotations; + /** * Constructs a new {@link TryCatchBlockNode}. * @@ -81,6 +103,29 @@ public TryCatchBlockNode(final LabelNode start, final LabelNode end, this.type = type; } + /** + * Updates the index of this try catch block in the method's list of try + * catch block nodes. This index maybe stored in the 'target' field of the + * type annotations of this block. + * + * @param index + * the new index of this try catch block in the method's list of + * try catch block nodes. + */ + public void updateIndex(final int index) { + int newTypeRef = 0x42000000 | (index << 8); + if (visibleTypeAnnotations != null) { + for (TypeAnnotationNode tan : visibleTypeAnnotations) { + tan.typeRef = newTypeRef; + } + } + if (invisibleTypeAnnotations != null) { + for (TypeAnnotationNode tan : invisibleTypeAnnotations) { + tan.typeRef = newTypeRef; + } + } + } + /** * Makes the given visitor visit this try catch block. * @@ -90,5 +135,19 @@ public TryCatchBlockNode(final LabelNode start, final LabelNode end, public void accept(final MethodVisitor mv) { mv.visitTryCatchBlock(start.getLabel(), end.getLabel(), handler == null ? null : handler.getLabel(), type); + int n = visibleTypeAnnotations == null ? 0 : visibleTypeAnnotations + .size(); + for (int i = 0; i < n; ++i) { + TypeAnnotationNode an = visibleTypeAnnotations.get(i); + an.accept(mv.visitTryCatchAnnotation(an.typeRef, an.typePath, + an.desc, true)); + } + n = invisibleTypeAnnotations == null ? 0 : invisibleTypeAnnotations + .size(); + for (int i = 0; i < n; ++i) { + TypeAnnotationNode an = invisibleTypeAnnotations.get(i); + an.accept(mv.visitTryCatchAnnotation(an.typeRef, an.typePath, + an.desc, false)); + } } } diff --git a/src/asm/scala/tools/asm/tree/TypeAnnotationNode.java b/src/asm/scala/tools/asm/tree/TypeAnnotationNode.java new file mode 100644 index 000000000000..73b29624f7b8 --- /dev/null +++ b/src/asm/scala/tools/asm/tree/TypeAnnotationNode.java @@ -0,0 +1,100 @@ +/*** + * ASM: a very small and fast Java bytecode manipulation framework + * Copyright (c) 2000-2011 INRIA, France Telecom + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ +package scala.tools.asm.tree; + +import scala.tools.asm.Opcodes; +import scala.tools.asm.TypePath; +import scala.tools.asm.TypeReference; + +/** + * A node that represents a type annotationn. + * + * @author Eric Bruneton + */ +public class TypeAnnotationNode extends AnnotationNode { + + /** + * A reference to the annotated type. See {@link TypeReference}. + */ + public int typeRef; + + /** + * The path to the annotated type argument, wildcard bound, array element + * type, or static outer type within the referenced type. May be + * null if the annotation targets 'typeRef' as a whole. + */ + public TypePath typePath; + + /** + * Constructs a new {@link AnnotationNode}. Subclasses must not use this + * constructor. Instead, they must use the + * {@link #TypeAnnotationNode(int, int, TypePath, String)} version. + * + * @param typeRef + * a reference to the annotated type. See {@link TypeReference}. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + * @param desc + * the class descriptor of the annotation class. + * @throws IllegalStateException + * If a subclass calls this constructor. + */ + public TypeAnnotationNode(final int typeRef, final TypePath typePath, + final String desc) { + this(Opcodes.ASM5, typeRef, typePath, desc); + if (getClass() != TypeAnnotationNode.class) { + throw new IllegalStateException(); + } + } + + /** + * Constructs a new {@link AnnotationNode}. + * + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. + * @param typeRef + * a reference to the annotated type. See {@link TypeReference}. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + * @param desc + * the class descriptor of the annotation class. + */ + public TypeAnnotationNode(final int api, final int typeRef, + final TypePath typePath, final String desc) { + super(api, desc); + this.typeRef = typeRef; + this.typePath = typePath; + } +} diff --git a/src/asm/scala/tools/asm/tree/TypeInsnNode.java b/src/asm/scala/tools/asm/tree/TypeInsnNode.java index 3210dd60e624..401400c3cbad 100644 --- a/src/asm/scala/tools/asm/tree/TypeInsnNode.java +++ b/src/asm/scala/tools/asm/tree/TypeInsnNode.java @@ -81,10 +81,11 @@ public int getType() { @Override public void accept(final MethodVisitor mv) { mv.visitTypeInsn(opcode, desc); + acceptAnnotations(mv); } @Override public AbstractInsnNode clone(final Map labels) { - return new TypeInsnNode(opcode, desc); + return new TypeInsnNode(opcode, desc).cloneAnnotations(this); } } diff --git a/src/asm/scala/tools/asm/tree/VarInsnNode.java b/src/asm/scala/tools/asm/tree/VarInsnNode.java index 5dd9ef67264a..685e4fce2cfc 100644 --- a/src/asm/scala/tools/asm/tree/VarInsnNode.java +++ b/src/asm/scala/tools/asm/tree/VarInsnNode.java @@ -84,10 +84,11 @@ public int getType() { @Override public void accept(final MethodVisitor mv) { mv.visitVarInsn(opcode, var); + acceptAnnotations(mv); } @Override public AbstractInsnNode clone(final Map labels) { - return new VarInsnNode(opcode, var); + return new VarInsnNode(opcode, var).cloneAnnotations(this); } } diff --git a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java index 0134555f10c5..ff840aabde14 100644 --- a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java +++ b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java @@ -375,7 +375,7 @@ private void findSubroutine(int insn, final Subroutine sub, * instruction of the method. The size of the returned array is * equal to the number of instructions (and labels) of the method. A * given frame is null if the corresponding instruction - * cannot be reached, or if an error occured during the analysis of + * cannot be reached, or if an error occurred during the analysis of * the method. */ public Frame[] getFrames() { @@ -435,7 +435,7 @@ protected Frame newFrame(final Frame src) { /** * Creates a control flow graph edge. The default implementation of this - * method does nothing. It can be overriden in order to construct the + * method does nothing. It can be overridden in order to construct the * control flow graph of a method (this method is called by the * {@link #analyze analyze} method during its visit of the method's code). * diff --git a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java index 5e3f51f21ad4..52b2a11d6f39 100644 --- a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java +++ b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java @@ -37,6 +37,7 @@ * @author Bing Ran * @author Eric Bruneton */ +@SuppressWarnings("serial") public class AnalyzerException extends Exception { public final AbstractInsnNode node; diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java index 8d6653c1c545..7d0b7b0694bd 100644 --- a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java +++ b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java @@ -53,7 +53,7 @@ public class BasicInterpreter extends Interpreter implements Opcodes { public BasicInterpreter() { - super(ASM4); + super(ASM5); } protected BasicInterpreter(final int api) { diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java index 71666edb7498..b852f20acf3b 100644 --- a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java +++ b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java @@ -47,7 +47,7 @@ public class BasicVerifier extends BasicInterpreter { public BasicVerifier() { - super(ASM4); + super(ASM5); } protected BasicVerifier(final int api) { diff --git a/src/asm/scala/tools/asm/tree/analysis/Frame.java b/src/asm/scala/tools/asm/tree/analysis/Frame.java index 0d92edc4d671..44a07ee27c1f 100644 --- a/src/asm/scala/tools/asm/tree/analysis/Frame.java +++ b/src/asm/scala/tools/asm/tree/analysis/Frame.java @@ -133,6 +133,15 @@ public int getLocals() { return locals; } + /** + * Returns the maximum stack size of this frame. + * + * @return the maximum stack size of this frame. + */ + public int getMaxStackSize() { + return values.length - locals; + } + /** * Returns the value of the given local variable. * diff --git a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java index 56f4bedc0009..00fe6c8bffae 100644 --- a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java +++ b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java @@ -82,7 +82,7 @@ protected Interpreter(final int api) { * the bytecode instruction to be interpreted. * @return the result of the interpretation of the given instruction. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract V newOperation(AbstractInsnNode insn) throws AnalyzerException; @@ -101,7 +101,7 @@ public abstract V newOperation(AbstractInsnNode insn) * @return the result of the interpretation of the given instruction. The * returned value must be equal to the given value. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract V copyOperation(AbstractInsnNode insn, V value) throws AnalyzerException; @@ -122,7 +122,7 @@ public abstract V copyOperation(AbstractInsnNode insn, V value) * the argument of the instruction to be interpreted. * @return the result of the interpretation of the given instruction. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract V unaryOperation(AbstractInsnNode insn, V value) throws AnalyzerException; @@ -146,7 +146,7 @@ public abstract V unaryOperation(AbstractInsnNode insn, V value) * the second argument of the instruction to be interpreted. * @return the result of the interpretation of the given instruction. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract V binaryOperation(AbstractInsnNode insn, V value1, V value2) throws AnalyzerException; @@ -167,7 +167,7 @@ public abstract V binaryOperation(AbstractInsnNode insn, V value1, V value2) * the third argument of the instruction to be interpreted. * @return the result of the interpretation of the given instruction. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract V ternaryOperation(AbstractInsnNode insn, V value1, V value2, V value3) throws AnalyzerException; @@ -185,7 +185,7 @@ public abstract V ternaryOperation(AbstractInsnNode insn, V value1, * the arguments of the instruction to be interpreted. * @return the result of the interpretation of the given instruction. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract V naryOperation(AbstractInsnNode insn, List values) throws AnalyzerException; @@ -203,7 +203,7 @@ public abstract V naryOperation(AbstractInsnNode insn, * @param expected * the expected return type of the analyzed method. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract void returnOperation(AbstractInsnNode insn, V value, V expected) throws AnalyzerException; diff --git a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java index eaecd057eafb..a345981f367b 100644 --- a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java +++ b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java @@ -107,7 +107,7 @@ public SimpleVerifier(final Type currentClass, public SimpleVerifier(final Type currentClass, final Type currentSuperClass, final List currentClassInterfaces, final boolean isInterface) { - this(ASM4, currentClass, currentSuperClass, currentClassInterfaces, + this(ASM5, currentClass, currentSuperClass, currentClassInterfaces, isInterface); } diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java index a68086c07303..7d739d3df9f4 100644 --- a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java +++ b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java @@ -50,7 +50,7 @@ public class SourceInterpreter extends Interpreter implements Opcodes { public SourceInterpreter() { - super(ASM4); + super(ASM5); } protected SourceInterpreter(final int api) { diff --git a/src/asm/scala/tools/asm/util/ASMifier.java b/src/asm/scala/tools/asm/util/ASMifier.java index 7e6b223853c9..521e07541b3c 100644 --- a/src/asm/scala/tools/asm/util/ASMifier.java +++ b/src/asm/scala/tools/asm/util/ASMifier.java @@ -40,6 +40,7 @@ import scala.tools.asm.Label; import scala.tools.asm.Opcodes; import scala.tools.asm.Type; +import scala.tools.asm.TypePath; /** * A {@link Printer} that prints the ASM code to generate the classes if visits. @@ -83,9 +84,15 @@ public class ASMifier extends Printer { * Constructs a new {@link ASMifier}. Subclasses must not use this * constructor. Instead, they must use the * {@link #ASMifier(int, String, int)} version. + * + * @throws IllegalStateException + * If a subclass calls this constructor. */ public ASMifier() { - this(Opcodes.ASM4, "cw", 0); + this(Opcodes.ASM5, "cw", 0); + if (getClass() != ASMifier.class) { + throw new IllegalStateException(); + } } /** @@ -93,7 +100,7 @@ public ASMifier() { * * @param api * the ASM API version implemented by this class. Must be one of - * {@link Opcodes#ASM4}. + * {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param name * the name of the visitor variable in the produced code. * @param id @@ -170,7 +177,6 @@ public void visit(final int version, final int access, final String name, } text.add("import java.util.*;\n"); text.add("import scala.tools.asm.*;\n"); - text.add("import scala.tools.asm.attrs.*;\n"); text.add("public class " + simpleName + "Dump implements Opcodes {\n\n"); text.add("public static byte[] dump () throws Exception {\n\n"); text.add("ClassWriter cw = new ClassWriter(0);\n"); @@ -260,6 +266,12 @@ public ASMifier visitClassAnnotation(final String desc, return visitAnnotation(desc, visible); } + @Override + public ASMifier visitClassTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + return visitTypeAnnotation(typeRef, typePath, desc, visible); + } + @Override public void visitClassAttribute(final Attribute attr) { visitAttribute(attr); @@ -422,6 +434,12 @@ public ASMifier visitFieldAnnotation(final String desc, return visitAnnotation(desc, visible); } + @Override + public ASMifier visitFieldTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + return visitTypeAnnotation(typeRef, typePath, desc, visible); + } + @Override public void visitFieldAttribute(final Attribute attr) { visitAttribute(attr); @@ -438,6 +456,16 @@ public void visitFieldEnd() { // Methods // ------------------------------------------------------------------------ + @Override + public void visitParameter(String parameterName, int access) { + buf.setLength(0); + buf.append(name).append(".visitParameter("); + appendString(buf, parameterName); + buf.append(", "); + appendAccess(access); + text.add(buf.append(");\n").toString()); + } + @Override public ASMifier visitAnnotationDefault() { buf.setLength(0); @@ -456,6 +484,12 @@ public ASMifier visitMethodAnnotation(final String desc, return visitAnnotation(desc, visible); } + @Override + public ASMifier visitMethodTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + return visitTypeAnnotation(typeRef, typePath, desc, visible); + } + @Override public ASMifier visitParameterAnnotation(final int parameter, final String desc, final boolean visible) { @@ -582,9 +616,30 @@ public void visitFieldInsn(final int opcode, final String owner, text.add(buf.toString()); } + @Deprecated @Override public void visitMethodInsn(final int opcode, final String owner, final String name, final String desc) { + if (api >= Opcodes.ASM5) { + super.visitMethodInsn(opcode, owner, name, desc); + return; + } + doVisitMethodInsn(opcode, owner, name, desc, + opcode == Opcodes.INVOKEINTERFACE); + } + + @Override + public void visitMethodInsn(final int opcode, final String owner, + final String name, final String desc, final boolean itf) { + if (api < Opcodes.ASM5) { + super.visitMethodInsn(opcode, owner, name, desc, itf); + return; + } + doVisitMethodInsn(opcode, owner, name, desc, itf); + } + + private void doVisitMethodInsn(final int opcode, final String owner, + final String name, final String desc, final boolean itf) { buf.setLength(0); buf.append(this.name).append(".visitMethodInsn(") .append(OPCODES[opcode]).append(", "); @@ -593,6 +648,8 @@ public void visitMethodInsn(final int opcode, final String owner, appendConstant(name); buf.append(", "); appendConstant(desc); + buf.append(", "); + buf.append(itf ? "true" : "false"); buf.append(");\n"); text.add(buf.toString()); } @@ -710,6 +767,13 @@ public void visitMultiANewArrayInsn(final String desc, final int dims) { text.add(buf.toString()); } + @Override + public ASMifier visitInsnAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + return visitTypeAnnotation("visitInsnAnnotation", typeRef, typePath, + desc, visible); + } + @Override public void visitTryCatchBlock(final Label start, final Label end, final Label handler, final String type) { @@ -729,6 +793,13 @@ public void visitTryCatchBlock(final Label start, final Label end, text.add(buf.toString()); } + @Override + public ASMifier visitTryCatchAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + return visitTypeAnnotation("visitTryCatchAnnotation", typeRef, + typePath, desc, visible); + } + @Override public void visitLocalVariable(final String name, final String desc, final String signature, final Label start, final Label end, @@ -748,6 +819,39 @@ public void visitLocalVariable(final String name, final String desc, text.add(buf.toString()); } + @Override + public Printer visitLocalVariableAnnotation(int typeRef, TypePath typePath, + Label[] start, Label[] end, int[] index, String desc, + boolean visible) { + buf.setLength(0); + buf.append("{\n").append("av0 = ").append(name) + .append(".visitLocalVariableAnnotation("); + buf.append(typeRef); + buf.append(", TypePath.fromString(\"").append(typePath).append("\"), "); + buf.append("new Label[] {"); + for (int i = 0; i < start.length; ++i) { + buf.append(i == 0 ? " " : ", "); + appendLabel(start[i]); + } + buf.append(" }, new Label[] {"); + for (int i = 0; i < end.length; ++i) { + buf.append(i == 0 ? " " : ", "); + appendLabel(end[i]); + } + buf.append(" }, new int[] {"); + for (int i = 0; i < index.length; ++i) { + buf.append(i == 0 ? " " : ", ").append(index[i]); + } + buf.append(" }, "); + appendConstant(desc); + buf.append(", ").append(visible).append(");\n"); + text.add(buf.toString()); + ASMifier a = createASMifier("av", 0); + text.add(a.getText()); + text.add("}\n"); + return a; + } + @Override public void visitLineNumber(final int line, final Label start) { buf.setLength(0); @@ -789,6 +893,28 @@ public ASMifier visitAnnotation(final String desc, final boolean visible) { return a; } + public ASMifier visitTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + return visitTypeAnnotation("visitTypeAnnotation", typeRef, typePath, + desc, visible); + } + + public ASMifier visitTypeAnnotation(final String method, final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + buf.setLength(0); + buf.append("{\n").append("av0 = ").append(name).append(".") + .append(method).append("("); + buf.append(typeRef); + buf.append(", TypePath.fromString(\"").append(typePath).append("\"), "); + appendConstant(desc); + buf.append(", ").append(visible).append(");\n"); + text.add(buf.toString()); + ASMifier a = createASMifier("av", 0); + text.add(a.getText()); + text.add("}\n"); + return a; + } + public void visitAttribute(final Attribute attr) { buf.setLength(0); buf.append("// ATTRIBUTE ").append(attr.type).append('\n'); @@ -809,7 +935,7 @@ public void visitAttribute(final Attribute attr) { // ------------------------------------------------------------------------ protected ASMifier createASMifier(final String name, final int id) { - return new ASMifier(Opcodes.ASM4, name, id); + return new ASMifier(Opcodes.ASM5, name, id); } /** @@ -950,6 +1076,13 @@ void appendAccess(final int access) { buf.append("ACC_DEPRECATED"); first = false; } + if ((access & Opcodes.ACC_MANDATED) != 0) { + if (!first) { + buf.append(" + "); + } + buf.append("ACC_MANDATED"); + first = false; + } if (first) { buf.append('0'); } diff --git a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java index f00a8f04a26e..70441d1df418 100644 --- a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java +++ b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java @@ -49,7 +49,7 @@ public CheckAnnotationAdapter(final AnnotationVisitor av) { } CheckAnnotationAdapter(final AnnotationVisitor av, final boolean named) { - super(Opcodes.ASM4, av); + super(Opcodes.ASM5, av); this.named = named; } @@ -70,7 +70,7 @@ public void visit(final String name, final Object value) { } if (value instanceof Type) { int sort = ((Type) value).getSort(); - if (sort != Type.OBJECT && sort != Type.ARRAY) { + if (sort == Type.METHOD) { throw new IllegalArgumentException("Invalid annotation value"); } } diff --git a/src/asm/scala/tools/asm/util/CheckClassAdapter.java b/src/asm/scala/tools/asm/util/CheckClassAdapter.java index 0bfa143a95df..9909208cc4af 100644 --- a/src/asm/scala/tools/asm/util/CheckClassAdapter.java +++ b/src/asm/scala/tools/asm/util/CheckClassAdapter.java @@ -46,6 +46,8 @@ import scala.tools.asm.MethodVisitor; import scala.tools.asm.Opcodes; import scala.tools.asm.Type; +import scala.tools.asm.TypePath; +import scala.tools.asm.TypeReference; import scala.tools.asm.tree.ClassNode; import scala.tools.asm.tree.MethodNode; import scala.tools.asm.tree.analysis.Analyzer; @@ -91,9 +93,9 @@ * insnNumber locals : stack): * *
    - * scala.tools.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
    - *   at scala.tools.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
    - *   at scala.tools.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
    + * org.objectweb.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
    + *   at org.objectweb.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
    + *   at org.objectweb.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
      * ...
      * remove()V
      * 00000 LinkedBlockingQueue$Itr . . . . . . . .  :
    @@ -106,7 +108,7 @@
      * 00071 LinkedBlockingQueue$Itr . I . . . . . .  :
      *   ILOAD 1
      * 00072 ?
    - *   INVOKESPECIAL java/lang/Integer. (I)V
    + *   INVOKESPECIAL java/lang/Integer.<init> (I)V
      * ...
      * 
    * @@ -215,7 +217,7 @@ public static void verify(final ClassReader cr, final ClassLoader loader, List interfaces = new ArrayList(); for (Iterator i = cn.interfaces.iterator(); i.hasNext();) { - interfaces.add(Type.getObjectType(i.next().toString())); + interfaces.add(Type.getObjectType(i.next())); } for (int i = 0; i < methods.size(); ++i) { @@ -328,9 +330,14 @@ public CheckClassAdapter(final ClassVisitor cv) { * false to not perform any data flow check (see * {@link CheckMethodAdapter}). This option requires valid * maxLocals and maxStack values. + * @throws IllegalStateException + * If a subclass calls this constructor. */ public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow) { - this(Opcodes.ASM4, cv, checkDataFlow); + this(Opcodes.ASM5, cv, checkDataFlow); + if (getClass() != CheckClassAdapter.class) { + throw new IllegalStateException(); + } } /** @@ -338,7 +345,7 @@ public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow) { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param cv * the class visitor to which this adapter must delegate calls. * @param checkDataFlow @@ -440,7 +447,15 @@ public void visitInnerClass(final String name, final String outerName, CheckMethodAdapter.checkInternalName(outerName, "outer class name"); } if (innerName != null) { - CheckMethodAdapter.checkIdentifier(innerName, "inner class name"); + int start = 0; + while (start < innerName.length() + && Character.isDigit(innerName.charAt(start))) { + start++; + } + if (start == 0 || start < innerName.length()) { + CheckMethodAdapter.checkIdentifier(innerName, start, -1, + "inner class name"); + } } checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE + Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC @@ -516,6 +531,23 @@ public AnnotationVisitor visitAnnotation(final String desc, return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible)); } + @Override + public AnnotationVisitor visitTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + checkState(); + int sort = typeRef >>> 24; + if (sort != TypeReference.CLASS_TYPE_PARAMETER + && sort != TypeReference.CLASS_TYPE_PARAMETER_BOUND + && sort != TypeReference.CLASS_EXTENDS) { + throw new IllegalArgumentException("Invalid type reference sort 0x" + + Integer.toHexString(sort)); + } + checkTypeRefAndPath(typeRef, typePath); + CheckMethodAdapter.checkDesc(desc, false); + return new CheckAnnotationAdapter(super.visitTypeAnnotation(typeRef, + typePath, desc, visible)); + } + @Override public void visitAttribute(final Attribute attr) { checkState(); @@ -660,6 +692,77 @@ public static void checkFieldSignature(final String signature) { } } + /** + * Checks the reference to a type in a type annotation. + * + * @param typeRef + * a reference to an annotated type. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + */ + static void checkTypeRefAndPath(int typeRef, TypePath typePath) { + int mask = 0; + switch (typeRef >>> 24) { + case TypeReference.CLASS_TYPE_PARAMETER: + case TypeReference.METHOD_TYPE_PARAMETER: + case TypeReference.METHOD_FORMAL_PARAMETER: + mask = 0xFFFF0000; + break; + case TypeReference.FIELD: + case TypeReference.METHOD_RETURN: + case TypeReference.METHOD_RECEIVER: + case TypeReference.LOCAL_VARIABLE: + case TypeReference.RESOURCE_VARIABLE: + case TypeReference.INSTANCEOF: + case TypeReference.NEW: + case TypeReference.CONSTRUCTOR_REFERENCE: + case TypeReference.METHOD_REFERENCE: + mask = 0xFF000000; + break; + case TypeReference.CLASS_EXTENDS: + case TypeReference.CLASS_TYPE_PARAMETER_BOUND: + case TypeReference.METHOD_TYPE_PARAMETER_BOUND: + case TypeReference.THROWS: + case TypeReference.EXCEPTION_PARAMETER: + mask = 0xFFFFFF00; + break; + case TypeReference.CAST: + case TypeReference.CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT: + case TypeReference.METHOD_INVOCATION_TYPE_ARGUMENT: + case TypeReference.CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT: + case TypeReference.METHOD_REFERENCE_TYPE_ARGUMENT: + mask = 0xFF0000FF; + break; + default: + throw new IllegalArgumentException("Invalid type reference sort 0x" + + Integer.toHexString(typeRef >>> 24)); + } + if ((typeRef & ~mask) != 0) { + throw new IllegalArgumentException("Invalid type reference 0x" + + Integer.toHexString(typeRef)); + } + if (typePath != null) { + for (int i = 0; i < typePath.getLength(); ++i) { + int step = typePath.getStep(i); + if (step != TypePath.ARRAY_ELEMENT + && step != TypePath.INNER_TYPE + && step != TypePath.TYPE_ARGUMENT + && step != TypePath.WILDCARD_BOUND) { + throw new IllegalArgumentException( + "Invalid type path step " + i + " in " + typePath); + } + if (step != TypePath.TYPE_ARGUMENT + && typePath.getStepArgument(i) != 0) { + throw new IllegalArgumentException( + "Invalid type path step argument for step " + i + + " in " + typePath); + } + } + } + } + /** * Checks the formal type parameters of a class or method signature. * diff --git a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java index 4657605936d7..e682df47aff2 100644 --- a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java +++ b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java @@ -33,6 +33,8 @@ import scala.tools.asm.Attribute; import scala.tools.asm.FieldVisitor; import scala.tools.asm.Opcodes; +import scala.tools.asm.TypePath; +import scala.tools.asm.TypeReference; /** * A {@link FieldVisitor} that checks that its methods are properly used. @@ -48,9 +50,14 @@ public class CheckFieldAdapter extends FieldVisitor { * * @param fv * the field visitor to which this adapter must delegate calls. + * @throws IllegalStateException + * If a subclass calls this constructor. */ public CheckFieldAdapter(final FieldVisitor fv) { - this(Opcodes.ASM4, fv); + this(Opcodes.ASM5, fv); + if (getClass() != CheckFieldAdapter.class) { + throw new IllegalStateException(); + } } /** @@ -58,7 +65,7 @@ public CheckFieldAdapter(final FieldVisitor fv) { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param fv * the field visitor to which this adapter must delegate calls. */ @@ -74,6 +81,21 @@ public AnnotationVisitor visitAnnotation(final String desc, return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible)); } + @Override + public AnnotationVisitor visitTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + checkEnd(); + int sort = typeRef >>> 24; + if (sort != TypeReference.FIELD) { + throw new IllegalArgumentException("Invalid type reference sort 0x" + + Integer.toHexString(sort)); + } + CheckClassAdapter.checkTypeRefAndPath(typeRef, typePath); + CheckMethodAdapter.checkDesc(desc, false); + return new CheckAnnotationAdapter(super.visitTypeAnnotation(typeRef, + typePath, desc, visible)); + } + @Override public void visitAttribute(final Attribute attr) { checkEnd(); diff --git a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java index 9da01c9d6efd..131dfa5e5b14 100644 --- a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java +++ b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java @@ -46,6 +46,8 @@ import scala.tools.asm.MethodVisitor; import scala.tools.asm.Opcodes; import scala.tools.asm.Type; +import scala.tools.asm.TypePath; +import scala.tools.asm.TypeReference; import scala.tools.asm.tree.MethodNode; import scala.tools.asm.tree.analysis.Analyzer; import scala.tools.asm.tree.analysis.BasicValue; @@ -390,10 +392,15 @@ public CheckMethodAdapter(final MethodVisitor mv) { * the method visitor to which this adapter must delegate calls. * @param labels * a map of already visited labels (in other methods). + * @throws IllegalStateException + * If a subclass calls this constructor. */ public CheckMethodAdapter(final MethodVisitor mv, final Map labels) { - this(Opcodes.ASM4, mv, labels); + this(Opcodes.ASM5, mv, labels); + if (getClass() != CheckMethodAdapter.class) { + throw new IllegalStateException(); + } } /** @@ -434,7 +441,7 @@ protected CheckMethodAdapter(final int api, final MethodVisitor mv, public CheckMethodAdapter(final int access, final String name, final String desc, final MethodVisitor cmv, final Map labels) { - this(new MethodNode(access, name, desc, null, null) { + this(new MethodNode(Opcodes.ASM5, access, name, desc, null, null) { @Override public void visitEnd() { Analyzer a = new Analyzer( @@ -461,6 +468,16 @@ public void visitEnd() { this.access = access; } + @Override + public void visitParameter(String name, int access) { + if (name != null) { + checkUnqualifiedName(version, name, "name"); + } + CheckClassAdapter.checkAccess(access, Opcodes.ACC_FINAL + + Opcodes.ACC_MANDATED + Opcodes.ACC_SYNTHETIC); + super.visitParameter(name, access); + } + @Override public AnnotationVisitor visitAnnotation(final String desc, final boolean visible) { @@ -469,6 +486,26 @@ public AnnotationVisitor visitAnnotation(final String desc, return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible)); } + @Override + public AnnotationVisitor visitTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + checkEndMethod(); + int sort = typeRef >>> 24; + if (sort != TypeReference.METHOD_TYPE_PARAMETER + && sort != TypeReference.METHOD_TYPE_PARAMETER_BOUND + && sort != TypeReference.METHOD_RETURN + && sort != TypeReference.METHOD_RECEIVER + && sort != TypeReference.METHOD_FORMAL_PARAMETER + && sort != TypeReference.THROWS) { + throw new IllegalArgumentException("Invalid type reference sort 0x" + + Integer.toHexString(sort)); + } + CheckClassAdapter.checkTypeRefAndPath(typeRef, typePath); + CheckMethodAdapter.checkDesc(desc, false); + return new CheckAnnotationAdapter(super.visitTypeAnnotation(typeRef, + typePath, desc, visible)); + } + @Override public AnnotationVisitor visitAnnotationDefault() { checkEndMethod(); @@ -647,9 +684,30 @@ public void visitFieldInsn(final int opcode, final String owner, ++insnCount; } + @Deprecated @Override - public void visitMethodInsn(final int opcode, final String owner, - final String name, final String desc) { + public void visitMethodInsn(int opcode, String owner, String name, + String desc) { + if (api >= Opcodes.ASM5) { + super.visitMethodInsn(opcode, owner, name, desc); + return; + } + doVisitMethodInsn(opcode, owner, name, desc, + opcode == Opcodes.INVOKEINTERFACE); + } + + @Override + public void visitMethodInsn(int opcode, String owner, String name, + String desc, boolean itf) { + if (api < Opcodes.ASM5) { + super.visitMethodInsn(opcode, owner, name, desc, itf); + return; + } + doVisitMethodInsn(opcode, owner, name, desc, itf); + } + + private void doVisitMethodInsn(int opcode, final String owner, + final String name, final String desc, final boolean itf) { checkStartCode(); checkEndCode(); checkOpcode(opcode, 5); @@ -658,7 +716,21 @@ public void visitMethodInsn(final int opcode, final String owner, } checkInternalName(owner, "owner"); checkMethodDesc(desc); - super.visitMethodInsn(opcode, owner, name, desc); + if (opcode == Opcodes.INVOKEVIRTUAL && itf) { + throw new IllegalArgumentException( + "INVOKEVIRTUAL can't be used with interfaces"); + } + if (opcode == Opcodes.INVOKEINTERFACE && !itf) { + throw new IllegalArgumentException( + "INVOKEINTERFACE can't be used with classes"); + } + // Calling super.visitMethodInsn requires to call the correct version + // depending on this.api (otherwise infinite loops can occur). To + // simplify and to make it easier to automatically remove the backward + // compatibility code, we inline the code of the overridden method here. + if (mv != null) { + mv.visitMethodInsn(opcode, owner, name, desc, itf); + } ++insnCount; } @@ -796,6 +868,29 @@ public void visitMultiANewArrayInsn(final String desc, final int dims) { ++insnCount; } + @Override + public AnnotationVisitor visitInsnAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + checkStartCode(); + checkEndCode(); + int sort = typeRef >>> 24; + if (sort != TypeReference.INSTANCEOF && sort != TypeReference.NEW + && sort != TypeReference.CONSTRUCTOR_REFERENCE + && sort != TypeReference.METHOD_REFERENCE + && sort != TypeReference.CAST + && sort != TypeReference.CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT + && sort != TypeReference.METHOD_INVOCATION_TYPE_ARGUMENT + && sort != TypeReference.CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT + && sort != TypeReference.METHOD_REFERENCE_TYPE_ARGUMENT) { + throw new IllegalArgumentException("Invalid type reference sort 0x" + + Integer.toHexString(sort)); + } + CheckClassAdapter.checkTypeRefAndPath(typeRef, typePath); + CheckMethodAdapter.checkDesc(desc, false); + return new CheckAnnotationAdapter(super.visitInsnAnnotation(typeRef, + typePath, desc, visible)); + } + @Override public void visitTryCatchBlock(final Label start, final Label end, final Label handler, final String type) { @@ -820,6 +915,22 @@ public void visitTryCatchBlock(final Label start, final Label end, handlers.add(end); } + @Override + public AnnotationVisitor visitTryCatchAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + checkStartCode(); + checkEndCode(); + int sort = typeRef >>> 24; + if (sort != TypeReference.EXCEPTION_PARAMETER) { + throw new IllegalArgumentException("Invalid type reference sort 0x" + + Integer.toHexString(sort)); + } + CheckClassAdapter.checkTypeRefAndPath(typeRef, typePath); + CheckMethodAdapter.checkDesc(desc, false); + return new CheckAnnotationAdapter(super.visitTryCatchAnnotation( + typeRef, typePath, desc, visible)); + } + @Override public void visitLocalVariable(final String name, final String desc, final String signature, final Label start, final Label end, @@ -840,6 +951,40 @@ public void visitLocalVariable(final String name, final String desc, super.visitLocalVariable(name, desc, signature, start, end, index); } + @Override + public AnnotationVisitor visitLocalVariableAnnotation(int typeRef, + TypePath typePath, Label[] start, Label[] end, int[] index, + String desc, boolean visible) { + checkStartCode(); + checkEndCode(); + int sort = typeRef >>> 24; + if (sort != TypeReference.LOCAL_VARIABLE + && sort != TypeReference.RESOURCE_VARIABLE) { + throw new IllegalArgumentException("Invalid type reference sort 0x" + + Integer.toHexString(sort)); + } + CheckClassAdapter.checkTypeRefAndPath(typeRef, typePath); + checkDesc(desc, false); + if (start == null || end == null || index == null + || end.length != start.length || index.length != start.length) { + throw new IllegalArgumentException( + "Invalid start, end and index arrays (must be non null and of identical length"); + } + for (int i = 0; i < start.length; ++i) { + checkLabel(start[i], true, "start label"); + checkLabel(end[i], true, "end label"); + checkUnsignedShort(index[i], "Invalid variable index"); + int s = labels.get(start[i]).intValue(); + int e = labels.get(end[i]).intValue(); + if (e < s) { + throw new IllegalArgumentException( + "Invalid start and end labels (end must be greater than start)"); + } + } + return super.visitLocalVariableAnnotation(typeRef, typePath, start, + end, index, desc, visible); + } + @Override public void visitLineNumber(final int line, final Label start) { checkStartCode(); @@ -1202,7 +1347,7 @@ static void checkInternalName(final String name, final int start, checkIdentifier(name, begin, slash, null); begin = slash + 1; } while (slash != max); - } catch (IllegalArgumentException _) { + } catch (IllegalArgumentException unused) { throw new IllegalArgumentException( "Invalid " + msg @@ -1280,7 +1425,7 @@ static int checkDesc(final String desc, final int start, } try { checkInternalName(desc, start + 1, index, null); - } catch (IllegalArgumentException _) { + } catch (IllegalArgumentException unused) { throw new IllegalArgumentException("Invalid descriptor: " + desc); } diff --git a/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java index e69302b8a6e1..54c9033c90a6 100644 --- a/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java +++ b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java @@ -113,7 +113,7 @@ public class CheckSignatureAdapter extends SignatureVisitor { * null. */ public CheckSignatureAdapter(final int type, final SignatureVisitor sv) { - this(Opcodes.ASM4, type, sv); + this(Opcodes.ASM5, type, sv); } /** @@ -121,7 +121,7 @@ public CheckSignatureAdapter(final int type, final SignatureVisitor sv) { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param type * the type of signature to be checked. See * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and diff --git a/src/asm/scala/tools/asm/util/Printer.java b/src/asm/scala/tools/asm/util/Printer.java index 86e0f9e12284..773f129ad997 100644 --- a/src/asm/scala/tools/asm/util/Printer.java +++ b/src/asm/scala/tools/asm/util/Printer.java @@ -37,6 +37,7 @@ import scala.tools.asm.Handle; import scala.tools.asm.Label; import scala.tools.asm.Opcodes; +import scala.tools.asm.TypePath; /** * An abstract converter from visit events to text. @@ -116,7 +117,7 @@ public abstract class Printer { /** * The ASM API version implemented by this class. The value of this field - * must be one of {@link Opcodes#ASM4}. + * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ protected final int api; @@ -174,6 +175,15 @@ public abstract void visitOuterClass(final String owner, final String name, public abstract Printer visitClassAnnotation(final String desc, final boolean visible); + /** + * Class type annotation. See + * {@link scala.tools.asm.ClassVisitor#visitTypeAnnotation}. + */ + public Printer visitClassTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + throw new RuntimeException("Must be overridden"); + } + /** * Class attribute. See * {@link scala.tools.asm.ClassVisitor#visitAttribute}. @@ -248,6 +258,15 @@ public abstract void visitEnum(final String name, final String desc, public abstract Printer visitFieldAnnotation(final String desc, final boolean visible); + /** + * Field type annotation. See + * {@link scala.tools.asm.FieldVisitor#visitTypeAnnotation}. + */ + public Printer visitFieldTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + throw new RuntimeException("Must be overridden"); + } + /** * Field attribute. See * {@link scala.tools.asm.FieldVisitor#visitAttribute}. @@ -263,6 +282,14 @@ public abstract Printer visitFieldAnnotation(final String desc, // Methods // ------------------------------------------------------------------------ + /** + * Method parameter. See + * {@link scala.tools.asm.MethodVisitor#visitParameter(String, int)}. + */ + public void visitParameter(String name, int access) { + throw new RuntimeException("Must be overridden"); + } + /** * Method default annotation. See * {@link scala.tools.asm.MethodVisitor#visitAnnotationDefault}. @@ -276,6 +303,15 @@ public abstract Printer visitFieldAnnotation(final String desc, public abstract Printer visitMethodAnnotation(final String desc, final boolean visible); + /** + * Method type annotation. See + * {@link scala.tools.asm.MethodVisitor#visitTypeAnnotation}. + */ + public Printer visitMethodTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + throw new RuntimeException("Must be overridden"); + } + /** * Method parameter annotation. See * {@link scala.tools.asm.MethodVisitor#visitParameterAnnotation}. @@ -336,8 +372,33 @@ public abstract void visitFieldInsn(final int opcode, final String owner, * Method instruction. See * {@link scala.tools.asm.MethodVisitor#visitMethodInsn}. */ - public abstract void visitMethodInsn(final int opcode, final String owner, - final String name, final String desc); + @Deprecated + public void visitMethodInsn(final int opcode, final String owner, + final String name, final String desc) { + if (api >= Opcodes.ASM5) { + boolean itf = opcode == Opcodes.INVOKEINTERFACE; + visitMethodInsn(opcode, owner, name, desc, itf); + return; + } + throw new RuntimeException("Must be overridden"); + } + + /** + * Method instruction. See + * {@link scala.tools.asm.MethodVisitor#visitMethodInsn}. + */ + public void visitMethodInsn(final int opcode, final String owner, + final String name, final String desc, final boolean itf) { + if (api < Opcodes.ASM5) { + if (itf != (opcode == Opcodes.INVOKEINTERFACE)) { + throw new IllegalArgumentException( + "INVOKESPECIAL/STATIC on interfaces require ASM 5"); + } + visitMethodInsn(opcode, owner, name, desc); + return; + } + throw new RuntimeException("Must be overridden"); + } /** * Method instruction. See @@ -390,6 +451,15 @@ public abstract void visitLookupSwitchInsn(final Label dflt, public abstract void visitMultiANewArrayInsn(final String desc, final int dims); + /** + * Instruction type annotation. See + * {@link scala.tools.asm.MethodVisitor#visitInsnAnnotation}. + */ + public Printer visitInsnAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + throw new RuntimeException("Must be overridden"); + } + /** * Method exception handler. See * {@link scala.tools.asm.MethodVisitor#visitTryCatchBlock}. @@ -397,6 +467,15 @@ public abstract void visitMultiANewArrayInsn(final String desc, public abstract void visitTryCatchBlock(final Label start, final Label end, final Label handler, final String type); + /** + * Try catch block type annotation. See + * {@link scala.tools.asm.MethodVisitor#visitTryCatchAnnotation}. + */ + public Printer visitTryCatchAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + throw new RuntimeException("Must be overridden"); + } + /** * Method debug info. See * {@link scala.tools.asm.MethodVisitor#visitLocalVariable}. @@ -405,6 +484,16 @@ public abstract void visitLocalVariable(final String name, final String desc, final String signature, final Label start, final Label end, final int index); + /** + * Local variable type annotation. See + * {@link scala.tools.asm.MethodVisitor#visitTryCatchAnnotation}. + */ + public Printer visitLocalVariableAnnotation(final int typeRef, + final TypePath typePath, final Label[] start, final Label[] end, + final int[] index, final String desc, final boolean visible) { + throw new RuntimeException("Must be overridden"); + } + /** * Method debug info. See * {@link scala.tools.asm.MethodVisitor#visitLineNumber}. diff --git a/src/asm/scala/tools/asm/util/Textifier.java b/src/asm/scala/tools/asm/util/Textifier.java index a5c4f6779e44..373e46f5ed5e 100644 --- a/src/asm/scala/tools/asm/util/Textifier.java +++ b/src/asm/scala/tools/asm/util/Textifier.java @@ -40,6 +40,8 @@ import scala.tools.asm.Label; import scala.tools.asm.Opcodes; import scala.tools.asm.Type; +import scala.tools.asm.TypePath; +import scala.tools.asm.TypeReference; import scala.tools.asm.signature.SignatureReader; /** @@ -135,15 +137,26 @@ public class Textifier extends Printer { */ protected Map labelNames; + /** + * Class access flags + */ + private int access; + private int valueNumber = 0; /** * Constructs a new {@link Textifier}. Subclasses must not use this * constructor. Instead, they must use the {@link #Textifier(int)} * version. + * + * @throws IllegalStateException + * If a subclass calls this constructor. */ public Textifier() { - this(Opcodes.ASM4); + this(Opcodes.ASM5); + if (getClass() != Textifier.class) { + throw new IllegalStateException(); + } } /** @@ -151,7 +164,7 @@ public Textifier() { * * @param api * the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ protected Textifier(final int api) { super(api); @@ -208,6 +221,7 @@ public static void main(final String[] args) throws Exception { public void visit(final int version, final int access, final String name, final String signature, final String superName, final String[] interfaces) { + this.access = access; int major = version & 0xFFFF; int minor = version >>> 16; buf.setLength(0); @@ -293,6 +307,13 @@ public Textifier visitClassAnnotation(final String desc, return visitAnnotation(desc, visible); } + @Override + public Printer visitClassTypeAnnotation(int typeRef, TypePath typePath, + String desc, boolean visible) { + text.add("\n"); + return visitTypeAnnotation(typeRef, typePath, desc, visible); + } + @Override public void visitClassAttribute(final Attribute attr) { text.add("\n"); @@ -393,7 +414,7 @@ public Textifier visitMethod(final int access, final String name, } buf.append(tab); - appendAccess(access); + appendAccess(access & ~Opcodes.ACC_VOLATILE); if ((access & Opcodes.ACC_NATIVE) != 0) { buf.append("native "); } @@ -403,6 +424,11 @@ public Textifier visitMethod(final int access, final String name, if ((access & Opcodes.ACC_BRIDGE) != 0) { buf.append("bridge "); } + if ((this.access & Opcodes.ACC_INTERFACE) != 0 + && (access & Opcodes.ACC_ABSTRACT) == 0 + && (access & Opcodes.ACC_STATIC) == 0) { + buf.append("default "); + } buf.append(name); appendDescriptor(METHOD_DESCRIPTOR, desc); @@ -616,6 +642,12 @@ public Textifier visitFieldAnnotation(final String desc, return visitAnnotation(desc, visible); } + @Override + public Printer visitFieldTypeAnnotation(int typeRef, TypePath typePath, + String desc, boolean visible) { + return visitTypeAnnotation(typeRef, typePath, desc, visible); + } + @Override public void visitFieldAttribute(final Attribute attr) { visitAttribute(attr); @@ -629,6 +661,16 @@ public void visitFieldEnd() { // Methods // ------------------------------------------------------------------------ + @Override + public void visitParameter(final String name, final int access) { + buf.setLength(0); + buf.append(tab2).append("// parameter "); + appendAccess(access); + buf.append(' ').append((name == null) ? "" : name) + .append('\n'); + text.add(buf.toString()); + } + @Override public Textifier visitAnnotationDefault() { text.add(tab2 + "default="); @@ -644,6 +686,12 @@ public Textifier visitMethodAnnotation(final String desc, return visitAnnotation(desc, visible); } + @Override + public Printer visitMethodTypeAnnotation(int typeRef, TypePath typePath, + String desc, boolean visible) { + return visitTypeAnnotation(typeRef, typePath, desc, visible); + } + @Override public Textifier visitParameterAnnotation(final int parameter, final String desc, final boolean visible) { @@ -761,9 +809,30 @@ public void visitFieldInsn(final int opcode, final String owner, text.add(buf.toString()); } + @Deprecated @Override public void visitMethodInsn(final int opcode, final String owner, final String name, final String desc) { + if (api >= Opcodes.ASM5) { + super.visitMethodInsn(opcode, owner, name, desc); + return; + } + doVisitMethodInsn(opcode, owner, name, desc, + opcode == Opcodes.INVOKEINTERFACE); + } + + @Override + public void visitMethodInsn(final int opcode, final String owner, + final String name, final String desc, final boolean itf) { + if (api < Opcodes.ASM5) { + super.visitMethodInsn(opcode, owner, name, desc, itf); + return; + } + doVisitMethodInsn(opcode, owner, name, desc, itf); + } + + private void doVisitMethodInsn(final int opcode, final String owner, + final String name, final String desc, final boolean itf) { buf.setLength(0); buf.append(tab2).append(OPCODES[opcode]).append(' '); appendDescriptor(INTERNAL_NAME, owner); @@ -781,26 +850,35 @@ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, buf.append(name); appendDescriptor(METHOD_DESCRIPTOR, desc); buf.append(" ["); + buf.append('\n'); + buf.append(tab3); appendHandle(bsm); + buf.append('\n'); buf.append(tab3).append("// arguments:"); if (bsmArgs.length == 0) { buf.append(" none"); } else { - buf.append('\n').append(tab3); + buf.append('\n'); for (int i = 0; i < bsmArgs.length; i++) { + buf.append(tab3); Object cst = bsmArgs[i]; if (cst instanceof String) { Printer.appendString(buf, (String) cst); } else if (cst instanceof Type) { - buf.append(((Type) cst).getDescriptor()).append(".class"); + Type type = (Type) cst; + if(type.getSort() == Type.METHOD){ + appendDescriptor(METHOD_DESCRIPTOR, type.getDescriptor()); + } else { + buf.append(type.getDescriptor()).append(".class"); + } } else if (cst instanceof Handle) { appendHandle((Handle) cst); } else { buf.append(cst); } - buf.append(", "); + buf.append(", \n"); } - buf.setLength(buf.length() - 2); + buf.setLength(buf.length() - 3); } buf.append('\n'); buf.append(tab2).append("]\n"); @@ -889,6 +967,12 @@ public void visitMultiANewArrayInsn(final String desc, final int dims) { text.add(buf.toString()); } + @Override + public Printer visitInsnAnnotation(int typeRef, TypePath typePath, + String desc, boolean visible) { + return visitTypeAnnotation(typeRef, typePath, desc, visible); + } + @Override public void visitTryCatchBlock(final Label start, final Label end, final Label handler, final String type) { @@ -905,6 +989,25 @@ public void visitTryCatchBlock(final Label start, final Label end, text.add(buf.toString()); } + @Override + public Printer visitTryCatchAnnotation(int typeRef, TypePath typePath, + String desc, boolean visible) { + buf.setLength(0); + buf.append(tab2).append("TRYCATCHBLOCK @"); + appendDescriptor(FIELD_DESCRIPTOR, desc); + buf.append('('); + text.add(buf.toString()); + Textifier t = createTextifier(); + text.add(t.getText()); + buf.setLength(0); + buf.append(") : "); + appendTypeReference(typeRef); + buf.append(", ").append(typePath); + buf.append(visible ? "\n" : " // invisible\n"); + text.add(buf.toString()); + return t; + } + @Override public void visitLocalVariable(final String name, final String desc, final String signature, final Label start, final Label end, @@ -931,6 +1034,33 @@ public void visitLocalVariable(final String name, final String desc, text.add(buf.toString()); } + @Override + public Printer visitLocalVariableAnnotation(int typeRef, TypePath typePath, + Label[] start, Label[] end, int[] index, String desc, + boolean visible) { + buf.setLength(0); + buf.append(tab2).append("LOCALVARIABLE @"); + appendDescriptor(FIELD_DESCRIPTOR, desc); + buf.append('('); + text.add(buf.toString()); + Textifier t = createTextifier(); + text.add(t.getText()); + buf.setLength(0); + buf.append(") : "); + appendTypeReference(typeRef); + buf.append(", ").append(typePath); + for (int i = 0; i < start.length; ++i) { + buf.append(" [ "); + appendLabel(start[i]); + buf.append(" - "); + appendLabel(end[i]); + buf.append(" - ").append(index[i]).append(" ]"); + } + buf.append(visible ? "\n" : " // invisible\n"); + text.add(buf.toString()); + return t; + } + @Override public void visitLineNumber(final int line, final Label start) { buf.setLength(0); @@ -980,6 +1110,39 @@ public Textifier visitAnnotation(final String desc, final boolean visible) { return t; } + /** + * Prints a disassembled view of the given type annotation. + * + * @param typeRef + * a reference to the annotated type. See {@link TypeReference}. + * @param typePath + * the path to the annotated type argument, wildcard bound, array + * element type, or static inner type within 'typeRef'. May be + * null if the annotation targets 'typeRef' as a whole. + * @param desc + * the class descriptor of the annotation class. + * @param visible + * true if the annotation is visible at runtime. + * @return a visitor to visit the annotation values. + */ + public Textifier visitTypeAnnotation(final int typeRef, + final TypePath typePath, final String desc, final boolean visible) { + buf.setLength(0); + buf.append(tab).append('@'); + appendDescriptor(FIELD_DESCRIPTOR, desc); + buf.append('('); + text.add(buf.toString()); + Textifier t = createTextifier(); + text.add(t.getText()); + buf.setLength(0); + buf.append(") : "); + appendTypeReference(typeRef); + buf.append(", ").append(typePath); + buf.append(visible ? "\n" : " // invisible\n"); + text.add(buf.toString()); + return t; + } + /** * Prints a disassembled view of the given attribute. * @@ -1061,10 +1224,10 @@ protected void appendLabel(final Label l) { * a handle, non null. */ protected void appendHandle(final Handle h) { - buf.append('\n').append(tab3); int tag = h.getTag(); buf.append("// handle kind 0x").append(Integer.toHexString(tag)) .append(" : "); + boolean isMethodHandle = false; switch (tag) { case Opcodes.H_GETFIELD: buf.append("GETFIELD"); @@ -1080,18 +1243,23 @@ protected void appendHandle(final Handle h) { break; case Opcodes.H_INVOKEINTERFACE: buf.append("INVOKEINTERFACE"); + isMethodHandle = true; break; case Opcodes.H_INVOKESPECIAL: buf.append("INVOKESPECIAL"); + isMethodHandle = true; break; case Opcodes.H_INVOKESTATIC: buf.append("INVOKESTATIC"); + isMethodHandle = true; break; case Opcodes.H_INVOKEVIRTUAL: buf.append("INVOKEVIRTUAL"); + isMethodHandle = true; break; case Opcodes.H_NEWINVOKESPECIAL: buf.append("NEWINVOKESPECIAL"); + isMethodHandle = true; break; } buf.append('\n'); @@ -1099,9 +1267,13 @@ protected void appendHandle(final Handle h) { appendDescriptor(INTERNAL_NAME, h.getOwner()); buf.append('.'); buf.append(h.getName()); - buf.append('('); + if(!isMethodHandle){ + buf.append('('); + } appendDescriptor(HANDLE_DESCRIPTOR, h.getDesc()); - buf.append(')').append('\n'); + if(!isMethodHandle){ + buf.append(')'); + } } /** @@ -1145,6 +1317,9 @@ private void appendAccess(final int access) { if ((access & Opcodes.ACC_SYNTHETIC) != 0) { buf.append("synthetic "); } + if ((access & Opcodes.ACC_MANDATED) != 0) { + buf.append("mandated "); + } if ((access & Opcodes.ACC_ENUM) != 0) { buf.append("enum "); } @@ -1156,6 +1331,90 @@ private void appendComa(final int i) { } } + private void appendTypeReference(final int typeRef) { + TypeReference ref = new TypeReference(typeRef); + switch (ref.getSort()) { + case TypeReference.CLASS_TYPE_PARAMETER: + buf.append("CLASS_TYPE_PARAMETER ").append( + ref.getTypeParameterIndex()); + break; + case TypeReference.METHOD_TYPE_PARAMETER: + buf.append("METHOD_TYPE_PARAMETER ").append( + ref.getTypeParameterIndex()); + break; + case TypeReference.CLASS_EXTENDS: + buf.append("CLASS_EXTENDS ").append(ref.getSuperTypeIndex()); + break; + case TypeReference.CLASS_TYPE_PARAMETER_BOUND: + buf.append("CLASS_TYPE_PARAMETER_BOUND ") + .append(ref.getTypeParameterIndex()).append(", ") + .append(ref.getTypeParameterBoundIndex()); + break; + case TypeReference.METHOD_TYPE_PARAMETER_BOUND: + buf.append("METHOD_TYPE_PARAMETER_BOUND ") + .append(ref.getTypeParameterIndex()).append(", ") + .append(ref.getTypeParameterBoundIndex()); + break; + case TypeReference.FIELD: + buf.append("FIELD"); + break; + case TypeReference.METHOD_RETURN: + buf.append("METHOD_RETURN"); + break; + case TypeReference.METHOD_RECEIVER: + buf.append("METHOD_RECEIVER"); + break; + case TypeReference.METHOD_FORMAL_PARAMETER: + buf.append("METHOD_FORMAL_PARAMETER ").append( + ref.getFormalParameterIndex()); + break; + case TypeReference.THROWS: + buf.append("THROWS ").append(ref.getExceptionIndex()); + break; + case TypeReference.LOCAL_VARIABLE: + buf.append("LOCAL_VARIABLE"); + break; + case TypeReference.RESOURCE_VARIABLE: + buf.append("RESOURCE_VARIABLE"); + break; + case TypeReference.EXCEPTION_PARAMETER: + buf.append("EXCEPTION_PARAMETER ").append( + ref.getTryCatchBlockIndex()); + break; + case TypeReference.INSTANCEOF: + buf.append("INSTANCEOF"); + break; + case TypeReference.NEW: + buf.append("NEW"); + break; + case TypeReference.CONSTRUCTOR_REFERENCE: + buf.append("CONSTRUCTOR_REFERENCE"); + break; + case TypeReference.METHOD_REFERENCE: + buf.append("METHOD_REFERENCE"); + break; + case TypeReference.CAST: + buf.append("CAST ").append(ref.getTypeArgumentIndex()); + break; + case TypeReference.CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT: + buf.append("CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT ").append( + ref.getTypeArgumentIndex()); + break; + case TypeReference.METHOD_INVOCATION_TYPE_ARGUMENT: + buf.append("METHOD_INVOCATION_TYPE_ARGUMENT ").append( + ref.getTypeArgumentIndex()); + break; + case TypeReference.CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT: + buf.append("CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT ").append( + ref.getTypeArgumentIndex()); + break; + case TypeReference.METHOD_REFERENCE_TYPE_ARGUMENT: + buf.append("METHOD_REFERENCE_TYPE_ARGUMENT ").append( + ref.getTypeArgumentIndex()); + break; + } + } + private void appendFrameTypes(final int n, final Object[] o) { for (int i = 0; i < n; ++i) { if (i > 0) { diff --git a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java index 33e7cf0b267b..7a9dbfef06d4 100644 --- a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java +++ b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java @@ -47,7 +47,7 @@ public TraceAnnotationVisitor(final Printer p) { } public TraceAnnotationVisitor(final AnnotationVisitor av, final Printer p) { - super(Opcodes.ASM4, av); + super(Opcodes.ASM5, av); this.p = p; } diff --git a/src/asm/scala/tools/asm/util/TraceClassVisitor.java b/src/asm/scala/tools/asm/util/TraceClassVisitor.java index ff7a017482e5..842d2866728a 100644 --- a/src/asm/scala/tools/asm/util/TraceClassVisitor.java +++ b/src/asm/scala/tools/asm/util/TraceClassVisitor.java @@ -37,6 +37,7 @@ import scala.tools.asm.FieldVisitor; import scala.tools.asm.MethodVisitor; import scala.tools.asm.Opcodes; +import scala.tools.asm.TypePath; /** * A {@link ClassVisitor} that prints the classes it visits with a @@ -130,7 +131,7 @@ public TraceClassVisitor(final ClassVisitor cv, final PrintWriter pw) { */ public TraceClassVisitor(final ClassVisitor cv, final Printer p, final PrintWriter pw) { - super(Opcodes.ASM4, cv); + super(Opcodes.ASM5, cv); this.pw = pw; this.p = p; } @@ -165,6 +166,16 @@ public AnnotationVisitor visitAnnotation(final String desc, return new TraceAnnotationVisitor(av, p); } + @Override + public AnnotationVisitor visitTypeAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + Printer p = this.p.visitClassTypeAnnotation(typeRef, typePath, desc, + visible); + AnnotationVisitor av = cv == null ? null : cv.visitTypeAnnotation( + typeRef, typePath, desc, visible); + return new TraceAnnotationVisitor(av, p); + } + @Override public void visitAttribute(final Attribute attr) { p.visitClassAttribute(attr); diff --git a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java index 9547a70008ed..1d0743a42430 100644 --- a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java +++ b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java @@ -33,6 +33,7 @@ import scala.tools.asm.Attribute; import scala.tools.asm.FieldVisitor; import scala.tools.asm.Opcodes; +import scala.tools.asm.TypePath; /** * A {@link FieldVisitor} that prints the fields it visits with a @@ -49,7 +50,7 @@ public TraceFieldVisitor(final Printer p) { } public TraceFieldVisitor(final FieldVisitor fv, final Printer p) { - super(Opcodes.ASM4, fv); + super(Opcodes.ASM5, fv); this.p = p; } @@ -62,6 +63,16 @@ public AnnotationVisitor visitAnnotation(final String desc, return new TraceAnnotationVisitor(av, p); } + @Override + public AnnotationVisitor visitTypeAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + Printer p = this.p.visitFieldTypeAnnotation(typeRef, typePath, desc, + visible); + AnnotationVisitor av = fv == null ? null : fv.visitTypeAnnotation( + typeRef, typePath, desc, visible); + return new TraceAnnotationVisitor(av, p); + } + @Override public void visitAttribute(final Attribute attr) { p.visitFieldAttribute(attr); diff --git a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java index 9034567c8f64..db5f05100371 100644 --- a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java +++ b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java @@ -35,6 +35,7 @@ import scala.tools.asm.Label; import scala.tools.asm.MethodVisitor; import scala.tools.asm.Opcodes; +import scala.tools.asm.TypePath; /** * A {@link MethodVisitor} that prints the methods it visits with a @@ -51,10 +52,16 @@ public TraceMethodVisitor(final Printer p) { } public TraceMethodVisitor(final MethodVisitor mv, final Printer p) { - super(Opcodes.ASM4, mv); + super(Opcodes.ASM5, mv); this.p = p; } + @Override + public void visitParameter(String name, int access) { + p.visitParameter(name, access); + super.visitParameter(name, access); + } + @Override public AnnotationVisitor visitAnnotation(final String desc, final boolean visible) { @@ -64,6 +71,16 @@ public AnnotationVisitor visitAnnotation(final String desc, return new TraceAnnotationVisitor(av, p); } + @Override + public AnnotationVisitor visitTypeAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + Printer p = this.p.visitMethodTypeAnnotation(typeRef, typePath, desc, + visible); + AnnotationVisitor av = mv == null ? null : mv.visitTypeAnnotation( + typeRef, typePath, desc, visible); + return new TraceAnnotationVisitor(av, p); + } + @Override public void visitAttribute(final Attribute attr) { p.visitMethodAttribute(attr); @@ -130,11 +147,31 @@ public void visitFieldInsn(final int opcode, final String owner, super.visitFieldInsn(opcode, owner, name, desc); } + @Deprecated @Override - public void visitMethodInsn(final int opcode, final String owner, - final String name, final String desc) { + public void visitMethodInsn(int opcode, String owner, String name, + String desc) { + if (api >= Opcodes.ASM5) { + super.visitMethodInsn(opcode, owner, name, desc); + return; + } p.visitMethodInsn(opcode, owner, name, desc); - super.visitMethodInsn(opcode, owner, name, desc); + if (mv != null) { + mv.visitMethodInsn(opcode, owner, name, desc); + } + } + + @Override + public void visitMethodInsn(int opcode, String owner, String name, + String desc, boolean itf) { + if (api < Opcodes.ASM5) { + super.visitMethodInsn(opcode, owner, name, desc, itf); + return; + } + p.visitMethodInsn(opcode, owner, name, desc, itf); + if (mv != null) { + mv.visitMethodInsn(opcode, owner, name, desc, itf); + } } @Override @@ -188,6 +225,16 @@ public void visitMultiANewArrayInsn(final String desc, final int dims) { super.visitMultiANewArrayInsn(desc, dims); } + @Override + public AnnotationVisitor visitInsnAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + Printer p = this.p + .visitInsnAnnotation(typeRef, typePath, desc, visible); + AnnotationVisitor av = mv == null ? null : mv.visitInsnAnnotation( + typeRef, typePath, desc, visible); + return new TraceAnnotationVisitor(av, p); + } + @Override public void visitTryCatchBlock(final Label start, final Label end, final Label handler, final String type) { @@ -195,6 +242,16 @@ public void visitTryCatchBlock(final Label start, final Label end, super.visitTryCatchBlock(start, end, handler, type); } + @Override + public AnnotationVisitor visitTryCatchAnnotation(int typeRef, + TypePath typePath, String desc, boolean visible) { + Printer p = this.p.visitTryCatchAnnotation(typeRef, typePath, desc, + visible); + AnnotationVisitor av = mv == null ? null : mv.visitTryCatchAnnotation( + typeRef, typePath, desc, visible); + return new TraceAnnotationVisitor(av, p); + } + @Override public void visitLocalVariable(final String name, final String desc, final String signature, final Label start, final Label end, @@ -203,6 +260,18 @@ public void visitLocalVariable(final String name, final String desc, super.visitLocalVariable(name, desc, signature, start, end, index); } + @Override + public AnnotationVisitor visitLocalVariableAnnotation(int typeRef, + TypePath typePath, Label[] start, Label[] end, int[] index, + String desc, boolean visible) { + Printer p = this.p.visitLocalVariableAnnotation(typeRef, typePath, + start, end, index, desc, visible); + AnnotationVisitor av = mv == null ? null : mv + .visitLocalVariableAnnotation(typeRef, typePath, start, end, + index, desc, visible); + return new TraceAnnotationVisitor(av, p); + } + @Override public void visitLineNumber(final int line, final Label start) { p.visitLineNumber(line, start); diff --git a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java index 1e23c7ef1a34..f99ec2b0c242 100644 --- a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java +++ b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java @@ -75,13 +75,13 @@ public final class TraceSignatureVisitor extends SignatureVisitor { private String separator = ""; public TraceSignatureVisitor(final int access) { - super(Opcodes.ASM4); + super(Opcodes.ASM5); isInterface = (access & Opcodes.ACC_INTERFACE) != 0; this.declaration = new StringBuffer(); } private TraceSignatureVisitor(final StringBuffer buf) { - super(Opcodes.ASM4); + super(Opcodes.ASM5); this.declaration = buf; } diff --git a/src/build/bnd/scala-actors.bnd b/src/build/bnd/scala-actors.bnd index 8d0555777ff7..69885fc2bf7f 100644 --- a/src/build/bnd/scala-actors.bnd +++ b/src/build/bnd/scala-actors.bnd @@ -3,3 +3,5 @@ Bundle-SymbolicName: org.scala-lang.scala-actors ver: @VERSION@ Bundle-Version: ${ver} Export-Package: *;version=${ver} +Import-Package: scala.*;version="${range;[==,=+);${ver}}",* +Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7 diff --git a/src/build/bnd/scala-compiler-doc.bnd b/src/build/bnd/scala-compiler-doc.bnd index 4910e5fcb008..9d6d0304d1f2 100644 --- a/src/build/bnd/scala-compiler-doc.bnd +++ b/src/build/bnd/scala-compiler-doc.bnd @@ -3,4 +3,5 @@ Bundle-SymbolicName: org.scala-lang.modules.scala-compiler-doc_@SCALA_BINARY_VER ver: @SCALA_COMPILER_DOC_VERSION@ Bundle-Version: ${ver} Export-Package: *;version=${ver} -Import-Package: * +Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",* +Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7 diff --git a/src/build/bnd/scala-compiler-interactive.bnd b/src/build/bnd/scala-compiler-interactive.bnd index 34d2f2956d2d..07e3de35b0cd 100644 --- a/src/build/bnd/scala-compiler-interactive.bnd +++ b/src/build/bnd/scala-compiler-interactive.bnd @@ -3,4 +3,5 @@ Bundle-SymbolicName: org.scala-lang.modules.scala-compiler-interactive_@SCALA_BI ver: @SCALA_COMPILER_INTERACTIVE_VERSION@ Bundle-Version: ${ver} Export-Package: *;version=${ver} -Import-Package: * +Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",* +Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7 diff --git a/src/build/bnd/scala-compiler.bnd b/src/build/bnd/scala-compiler.bnd index dc30513db407..2bd24d780d7b 100644 --- a/src/build/bnd/scala-compiler.bnd +++ b/src/build/bnd/scala-compiler.bnd @@ -5,4 +5,8 @@ Bundle-Version: ${ver} Export-Package: *;version=${ver} Import-Package: jline.*;resolution:=optional, \ org.apache.tools.ant.*;resolution:=optional, \ + scala.util.parsing.*;version="${range;[====,====];@PARSER_COMBINATORS_VERSION@}";resolution:=optional, \ + scala.xml.*;version="${range;[====,====];@XML_VERSION@}";resolution:=optional, \ + scala.*;version="${range;[==,=+);${ver}}", \ * +Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7 diff --git a/src/build/bnd/scala-continuations-library.bnd b/src/build/bnd/scala-continuations-library.bnd index bb505b60a970..b36718cc5b46 100644 --- a/src/build/bnd/scala-continuations-library.bnd +++ b/src/build/bnd/scala-continuations-library.bnd @@ -1,5 +1,7 @@ Bundle-Name: Scala Delimited Continuations Library Bundle-SymbolicName: org.scala-lang.plugins.scala-continuations-library -ver: @VERSION@ +ver: @CONTINUATIONS_LIBRARY_VERSION@ Bundle-Version: ${ver} Export-Package: *;version=${ver} +Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",* +Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7 diff --git a/src/build/bnd/scala-continuations-plugin.bnd b/src/build/bnd/scala-continuations-plugin.bnd index cd66614a22c4..2f2464b4529a 100644 --- a/src/build/bnd/scala-continuations-plugin.bnd +++ b/src/build/bnd/scala-continuations-plugin.bnd @@ -1,5 +1,7 @@ Bundle-Name: Scala Delimited Continuations Compiler Plugin Bundle-SymbolicName: org.scala-lang.plugins.scala-continuations-plugin -ver: @VERSION@ +ver: @CONTINUATIONS_PLUGIN_VERSION@ Bundle-Version: ${ver} Export-Package: *;version=${ver} +Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",* +Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7 diff --git a/src/build/bnd/scala-library.bnd b/src/build/bnd/scala-library.bnd index 03aff45672fe..7eb4fa4b2aa1 100644 --- a/src/build/bnd/scala-library.bnd +++ b/src/build/bnd/scala-library.bnd @@ -4,3 +4,4 @@ ver: @VERSION@ Bundle-Version: ${ver} Export-Package: *;version=${ver} Import-Package: sun.misc;resolution:=optional, * +Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7 diff --git a/src/build/bnd/scala-parser-combinators.bnd b/src/build/bnd/scala-parser-combinators.bnd index 6ffc3b2760e1..ef8646cbd0a3 100644 --- a/src/build/bnd/scala-parser-combinators.bnd +++ b/src/build/bnd/scala-parser-combinators.bnd @@ -1,5 +1,7 @@ Bundle-Name: Scala Parser Combinators Library Bundle-SymbolicName: org.scala-lang.modules.scala-parser-combinators -ver: @VERSION@ +ver: @PARSER_COMBINATORS_VERSION@ Bundle-Version: ${ver} Export-Package: *;version=${ver} +Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",* +Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7 diff --git a/src/build/bnd/scala-reflect.bnd b/src/build/bnd/scala-reflect.bnd index 6cda346d3ade..e4bc54e52ed6 100644 --- a/src/build/bnd/scala-reflect.bnd +++ b/src/build/bnd/scala-reflect.bnd @@ -3,4 +3,7 @@ Bundle-SymbolicName: org.scala-lang.scala-reflect ver: @VERSION@ Bundle-Version: ${ver} Export-Package: *;version=${ver} -Import-Package: scala.tools.nsc;resolution:=optional, * +Import-Package: scala.*;version="${range;[==,=+);${ver}}", \ + scala.tools.nsc;resolution:=optional;version="${range;[==,=+);${ver}}", \ + * +Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7 diff --git a/src/build/bnd/scala-swing.bnd b/src/build/bnd/scala-swing.bnd index 7cccb1343bc5..f8b50baa9190 100644 --- a/src/build/bnd/scala-swing.bnd +++ b/src/build/bnd/scala-swing.bnd @@ -1,5 +1,7 @@ Bundle-Name: Scala Swing Bundle-SymbolicName: org.scala-lang.modules.scala-swing -ver: @VERSION@ +ver: @SCALA_SWING_VERSION@ Bundle-Version: ${ver} Export-Package: *;version=${ver} +Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",* +Bundle-RequiredExecutionEnvironment: JavaSE-1.6,JavaSE-1.7 diff --git a/src/build/bnd/scala-xml.bnd b/src/build/bnd/scala-xml.bnd index 5d64c05e659c..01bf0144eb56 100644 --- a/src/build/bnd/scala-xml.bnd +++ b/src/build/bnd/scala-xml.bnd @@ -1,5 +1,7 @@ Bundle-Name: Scala XML Library Bundle-SymbolicName: org.scala-lang.modules.scala-xml -ver: @VERSION@ +ver: @XML_VERSION@ Bundle-Version: ${ver} Export-Package: *;version=${ver} +Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",* +Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7 diff --git a/src/build/maven/scala-dist-pom.xml b/src/build/maven/scala-dist-pom.xml index 9a566d231bef..9477e1428538 100644 --- a/src/build/maven/scala-dist-pom.xml +++ b/src/build/maven/scala-dist-pom.xml @@ -39,6 +39,17 @@ scala-compiler @VERSION@ + + org.scala-lang + scalap + @VERSION@ + + + org.scala-lang.plugins + + scala-continuations-plugin_@SCALA_FULL_VERSION@ + @CONTINUATIONS_PLUGIN_VERSION@ + org.scala-lang.plugins scala-continuations-library_@SCALA_BINARY_VERSION@ diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala index 1413065a2760..b8384851dafe 100644 --- a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala +++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala @@ -12,7 +12,7 @@ abstract class DefaultMacroCompiler extends Resolvers import treeInfo._ import definitions._ val runDefinitions = currentRun.runDefinitions - import runDefinitions.{Predef_???, _} + import runDefinitions.Predef_??? val typer: global.analyzer.Typer val context = typer.context @@ -53,7 +53,7 @@ abstract class DefaultMacroCompiler extends Resolvers (EmptyTree, TermName(""), Nil) } val bundleImplRef = MacroImplRefCompiler( - atPos(macroDdef.rhs.pos)(gen.mkTypeApply(Select(New(maybeBundleRef, List(List(Ident(Predef_???)))), methName), targs)), + atPos(macroDdef.rhs.pos)(gen.mkTypeApply(Select(New(maybeBundleRef, List(List(Literal(Constant(null))))), methName), targs)), isImplBundle = true ) val vanillaResult = tryCompile(vanillaImplRef) diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala index cc4508e6964d..98fd091e9cd5 100644 --- a/src/compiler/scala/reflect/macros/compiler/Errors.scala +++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala @@ -11,7 +11,6 @@ trait Errors extends Traces { import analyzer._ import definitions._ import treeInfo._ - import typer.TyperErrorGen._ import typer.infer.InferErrorGen._ import runDefinitions._ def globalSettings = global.settings diff --git a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala index 4484c234aa25..d3f49390ea60 100644 --- a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala +++ b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala @@ -1,18 +1,12 @@ package scala.reflect.macros package compiler -import scala.reflect.internal.Flags._ -import scala.reflect.macros.TypecheckException - trait Resolvers { self: DefaultMacroCompiler => import global._ import analyzer._ - import definitions._ import treeInfo._ - import gen._ - import runDefinitions._ trait Resolver { self: MacroImplRefCompiler => diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala index a146818ae34d..fc932f2b187c 100644 --- a/src/compiler/scala/reflect/macros/compiler/Validators.scala +++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala @@ -9,7 +9,7 @@ trait Validators { import global._ import analyzer._ import definitions._ - import runDefinitions.{Predef_???, _} + import runDefinitions.Predef_??? trait Validator { self: MacroImplRefCompiler => diff --git a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala index df7aa4d2bee8..7088058145d9 100644 --- a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala +++ b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala @@ -12,5 +12,5 @@ trait Infrastructure { def compilerSettings: List[String] = universe.settings.recreateArgs - def classPath: List[java.net.URL] = global.classPath.asURLs + def classPath: List[java.net.URL] = global.classPath.asURLs.toList } diff --git a/src/compiler/scala/reflect/macros/contexts/Names.scala b/src/compiler/scala/reflect/macros/contexts/Names.scala index 299af40b94ea..5a5bb428b53a 100644 --- a/src/compiler/scala/reflect/macros/contexts/Names.scala +++ b/src/compiler/scala/reflect/macros/contexts/Names.scala @@ -33,8 +33,9 @@ trait Names { // // TODO: hopefully SI-7823 will provide an ultimate answer to this problem. // In the meanwhile I will also keep open the original issue: SI-6879 "c.freshName is broken". + val prefix = if (name.endsWith("$")) name else name + "$" // SI-8425 val sortOfUniqueSuffix = freshNameCreator.newName(nme.FRESH_SUFFIX) - name + "$" + sortOfUniqueSuffix + prefix + sortOfUniqueSuffix } def freshName[NameType <: Name](name: NameType): NameType = diff --git a/src/compiler/scala/reflect/macros/contexts/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala index 88cfea8157d1..f4584f36277a 100644 --- a/src/compiler/scala/reflect/macros/contexts/Parsers.scala +++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala @@ -9,12 +9,15 @@ trait Parsers { def parse(code: String) = { val sreporter = new StoreReporter() - val unit = new CompilationUnit(newSourceFile(code, "")) { override def reporter = sreporter } - val parser = newUnitParser(unit) - val tree = gen.mkTreeOrBlock(parser.parseStatsOrPackages()) - sreporter.infos.foreach { - case sreporter.Info(pos, msg, sreporter.ERROR) => throw ParseException(pos, msg) - } - tree + val oldReporter = global.reporter + try { + global.reporter = sreporter + val parser = newUnitParser(new CompilationUnit(newSourceFile(code, ""))) + val tree = gen.mkTreeOrBlock(parser.parseStatsOrPackages()) + sreporter.infos.foreach { + case sreporter.Info(pos, msg, sreporter.ERROR) => throw ParseException(pos, msg) + } + tree + } finally global.reporter = oldReporter } } \ No newline at end of file diff --git a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala index ecdd48db22c0..be114efbc008 100644 --- a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala @@ -14,7 +14,7 @@ trait JavaReflectionRuntimes { def resolveJavaReflectionRuntime(classLoader: ClassLoader): MacroRuntime = { val implClass = Class.forName(className, true, classLoader) - val implMeths = implClass.getDeclaredMethods.find(_.getName == methName) + val implMeths = implClass.getMethods.find(_.getName == methName) // relies on the fact that macro impls cannot be overloaded // so every methName can resolve to at maximum one method val implMeth = implMeths getOrElse { throw new NoSuchMethodException(s"$className.$methName") } diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala index bddc42d1f990..961c41dab5c7 100644 --- a/src/compiler/scala/reflect/macros/util/Helpers.scala +++ b/src/compiler/scala/reflect/macros/util/Helpers.scala @@ -54,14 +54,10 @@ trait Helpers { * * @see Metalevels.scala for more information and examples about metalevels */ - def increaseMetalevel(pre: Type, tp: Type): Type = { - val runDefinitions = currentRun.runDefinitions - import runDefinitions._ - + def increaseMetalevel(pre: Type, tp: Type): Type = transparentShallowTransform(RepeatedParamClass, tp) { case tp => typeRef(pre, MacroContextExprClass, List(tp)) } - } /** Transforms c.Expr[T] types into c.Tree and leaves the rest unchanged. */ diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala b/src/compiler/scala/reflect/quasiquotes/Holes.scala similarity index 94% rename from src/compiler/scala/tools/reflect/quasiquotes/Holes.scala rename to src/compiler/scala/reflect/quasiquotes/Holes.scala index 8376fca4ade5..38b05f9d4b08 100644 --- a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala +++ b/src/compiler/scala/reflect/quasiquotes/Holes.scala @@ -1,4 +1,4 @@ -package scala.tools.reflect +package scala.reflect package quasiquotes import scala.collection.{immutable, mutable} @@ -43,13 +43,13 @@ trait Holes { self: Quasiquotes => tpe <:< NothingClass.tpe || tpe <:< NullClass.tpe private def extractIterableTParam(tpe: Type) = IterableTParam.asSeenFrom(tpe, IterableClass) - private def stripIterable(tpe: Type, limit: Option[Rank] = None): (Rank, Type) = - if (limit.map { _ == NoDot }.getOrElse { false }) (NoDot, tpe) + private def stripIterable(tpe: Type, limit: Rank = DotDotDot): (Rank, Type) = + if (limit == NoDot) (NoDot, tpe) else if (tpe != null && !isIterableType(tpe)) (NoDot, tpe) else if (isBottomType(tpe)) (NoDot, tpe) else { val targ = extractIterableTParam(tpe) - val (rank, innerTpe) = stripIterable(targ, limit.map { _.pred }) + val (rank, innerTpe) = stripIterable(targ, limit.pred) (rank.succ, innerTpe) } private def iterableTypeFromRank(n: Rank, tpe: Type): Type = { @@ -76,10 +76,12 @@ trait Holes { self: Quasiquotes => class ApplyHole(annotatedRank: Rank, unquotee: Tree) extends Hole { val (strippedTpe, tpe): (Type, Type) = { - val (strippedRank, strippedTpe) = stripIterable(unquotee.tpe, limit = Some(annotatedRank)) + val (strippedRank, strippedTpe) = stripIterable(unquotee.tpe, limit = annotatedRank) if (isBottomType(strippedTpe)) cantSplice() - else if (isNativeType(strippedTpe)) (strippedTpe, iterableTypeFromRank(annotatedRank, strippedTpe)) - else if (isLiftableType(strippedTpe)) (strippedTpe, iterableTypeFromRank(annotatedRank, treeType)) + else if (isNativeType(strippedTpe)) { + if (strippedRank != NoDot && !(strippedTpe <:< treeType) && !isLiftableType(strippedTpe)) cantSplice() + else (strippedTpe, iterableTypeFromRank(annotatedRank, strippedTpe)) + } else if (isLiftableType(strippedTpe)) (strippedTpe, iterableTypeFromRank(annotatedRank, treeType)) else cantSplice() } @@ -191,7 +193,7 @@ trait Holes { self: Quasiquotes => val (iterableRank, _) = stripIterable(tpe) if (iterableRank.value < rank.value) c.abort(pat.pos, s"Can't extract $tpe with $rank, consider using $iterableRank") - val (_, strippedTpe) = stripIterable(tpe, limit = Some(rank)) + val (_, strippedTpe) = stripIterable(tpe, limit = rank) if (strippedTpe <:< treeType) treeNoUnlift else unlifters.spawn(strippedTpe, rank).map { diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/reflect/quasiquotes/Parsers.scala similarity index 92% rename from src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala rename to src/compiler/scala/reflect/quasiquotes/Parsers.scala index b68022afd971..97ec7dbfc3a0 100644 --- a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala +++ b/src/compiler/scala/reflect/quasiquotes/Parsers.scala @@ -1,4 +1,4 @@ -package scala.tools.reflect +package scala.reflect package quasiquotes import scala.tools.nsc.ast.parser.{Parsers => ScalaParser} @@ -69,9 +69,16 @@ trait Parsers { self: Quasiquotes => override def makeTupleType(trees: List[Tree]): Tree = TupleTypePlaceholder(trees) // q"{ $x }" - override def makeBlock(stats: List[Tree]): Tree = stats match { - case (head @ Ident(name)) :: Nil if isHole(name) => Block(Nil, head) - case _ => super.makeBlock(stats) + override def makeBlock(stats: List[Tree]): Tree = method match { + case nme.apply => + stats match { + // we don't want to eagerly flatten trees with placeholders as they + // might have to be wrapped into a block depending on their value + case (head @ Ident(name)) :: Nil if isHole(name) => Block(Nil, head) + case _ => gen.mkBlock(stats, doFlatten = true) + } + case nme.unapply => gen.mkBlock(stats, doFlatten = false) + case other => global.abort("unreachable") } // tq"$a => $b" @@ -83,7 +90,7 @@ trait Parsers { self: Quasiquotes => case _ => super.makePatDef(mods, pat, rhs) } } - import treeBuilder.{global => _, unit => _, _} + import treeBuilder.{global => _, unit => _} // q"def foo($x)" override def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef = diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/reflect/quasiquotes/Placeholders.scala similarity index 99% rename from src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala rename to src/compiler/scala/reflect/quasiquotes/Placeholders.scala index b287971815cf..a5b42f8a1fe2 100644 --- a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala +++ b/src/compiler/scala/reflect/quasiquotes/Placeholders.scala @@ -1,4 +1,4 @@ -package scala.tools.reflect +package scala.reflect package quasiquotes import java.util.UUID.randomUUID diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala similarity index 95% rename from src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala rename to src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala index 396688c43759..72e6000e9fe6 100644 --- a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala +++ b/src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala @@ -1,4 +1,4 @@ -package scala.tools.reflect +package scala.reflect package quasiquotes import scala.reflect.macros.runtime.Context @@ -51,7 +51,7 @@ abstract class Quasiquotes extends Parsers def sreified = reified .toString - .replace("scala.reflect.runtime.`package`.universe.build.", "") + .replace("scala.reflect.runtime.`package`.universe.internal.reificationSupport.", "") .replace("scala.reflect.runtime.`package`.universe.", "") .replace("scala.collection.immutable.", "") debug(s"reified tree:\n$sreified\n") diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala similarity index 93% rename from src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala rename to src/compiler/scala/reflect/quasiquotes/Reifiers.scala index 5eae3b6e6ff7..cc98717c4e17 100644 --- a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala +++ b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala @@ -1,4 +1,4 @@ -package scala.tools.reflect +package scala.reflect package quasiquotes import java.lang.UnsupportedOperationException @@ -8,7 +8,6 @@ import scala.reflect.internal.Flags._ trait Reifiers { self: Quasiquotes => import global._ import global.build._ - import global.treeInfo._ import global.definitions._ import Rank._ import universeTypes._ @@ -131,6 +130,10 @@ trait Reifiers { self: Quasiquotes => case Placeholder(Hole(tree, NoDot)) if isReifyingPatterns => tree case Placeholder(hole @ Hole(_, rank @ Dot())) => c.abort(hole.pos, s"Can't $action with $rank here") case TuplePlaceholder(args) => reifyTuple(args) + // Due to greediness of syntactic applied we need to pre-emptively peek inside. + // `rest` will always be non-empty due to the rule on top of this one. + case SyntacticApplied(id @ Ident(nme.QUASIQUOTE_TUPLE), first :: rest) => + mirrorBuildCall(nme.SyntacticApplied, reifyTreePlaceholder(Apply(id, first)), reify(rest)) case TupleTypePlaceholder(args) => reifyTupleType(args) case FunctionTypePlaceholder(argtpes, restpe) => reifyFunctionType(argtpes, restpe) case CasePlaceholder(hole) => hole.tree @@ -185,14 +188,32 @@ trait Reifiers { self: Quasiquotes => reifyBuildCall(nme.SyntacticApplied, fun, argss) case SyntacticTypeApplied(fun, targs) if targs.nonEmpty => reifyBuildCall(nme.SyntacticTypeApplied, fun, targs) + case SyntacticAppliedType(tpt, targs) if targs.nonEmpty => + reifyBuildCall(nme.SyntacticAppliedType, tpt, targs) case SyntacticFunction(args, body) => reifyBuildCall(nme.SyntacticFunction, args, body) - case SyntacticIdent(name, isBackquoted) => - reifyBuildCall(nme.SyntacticIdent, name, isBackquoted) case SyntacticEmptyTypeTree() => reifyBuildCall(nme.SyntacticEmptyTypeTree) case SyntacticImport(expr, selectors) => reifyBuildCall(nme.SyntacticImport, expr, selectors) + case SyntacticPartialFunction(cases) => + reifyBuildCall(nme.SyntacticPartialFunction, cases) + case SyntacticMatch(scrutinee, cases) => + reifyBuildCall(nme.SyntacticMatch, scrutinee, cases) + case SyntacticTermIdent(name, isBackquoted) => + reifyBuildCall(nme.SyntacticTermIdent, name, isBackquoted) + case SyntacticTypeIdent(name) => + reifyBuildCall(nme.SyntacticTypeIdent, name) + case SyntacticCompoundType(parents, defns) => + reifyBuildCall(nme.SyntacticCompoundType, parents, defns) + case SyntacticSingletonType(ref) => + reifyBuildCall(nme.SyntacticSingletonType, ref) + case SyntacticTypeProjection(qual, name) => + reifyBuildCall(nme.SyntacticTypeProjection, qual, name) + case SyntacticAnnotatedType(tpt, annot) => + reifyBuildCall(nme.SyntacticAnnotatedType, tpt, annot) + case SyntacticExistentialType(tpt, where) => + reifyBuildCall(nme.SyntacticExistentialType, tpt, where) case Q(tree) if fillListHole.isDefinedAt(tree) => mirrorBuildCall(nme.SyntacticBlock, fillListHole(tree)) case Q(other) => @@ -205,8 +226,6 @@ trait Reifiers { self: Quasiquotes => reifyBuildCall(nme.SyntacticBlock, Nil) case Try(block, catches, finalizer) => reifyBuildCall(nme.SyntacticTry, block, catches, finalizer) - case Match(selector, cases) => - reifyBuildCall(nme.SyntacticMatch, selector, cases) case CaseDef(pat, guard, body) if fillListHole.isDefinedAt(body) => mirrorCall(nme.CaseDef, reify(pat), reify(guard), mirrorBuildCall(nme.SyntacticBlock, fillListHole(body))) // parser emits trees with scala package symbol to ensure @@ -215,6 +234,9 @@ trait Reifiers { self: Quasiquotes => // correctness of the trees produced by quasiquotes case Select(id @ Ident(nme.scala_), name) if id.symbol == ScalaPackage => reifyBuildCall(nme.ScalaDot, name) + case Select(qual, name) => + val ctor = if (name.isTypeName) nme.SyntacticSelectType else nme.SyntacticSelectTerm + reifyBuildCall(ctor, qual, name) case _ => super.reifyTreeSyntactically(tree) } diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala index b1cc79738908..a3e0f02dcc16 100644 --- a/src/compiler/scala/reflect/reify/Reifier.scala +++ b/src/compiler/scala/reflect/reify/Reifier.scala @@ -21,7 +21,6 @@ abstract class Reifier extends States import global._ import definitions._ private val runDefinitions = currentRun.runDefinitions - import runDefinitions._ val typer: global.analyzer.Typer val universe: Tree diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala index 093c2bee22d0..0863ee38f9c9 100644 --- a/src/compiler/scala/reflect/reify/Taggers.scala +++ b/src/compiler/scala/reflect/reify/Taggers.scala @@ -79,8 +79,7 @@ abstract class Taggers { try materializer catch { case ReificationException(pos, msg) => - c.error(pos.asInstanceOf[c.Position], msg) // this cast is a very small price for the sanity of exception handling - EmptyTree + c.abort(pos.asInstanceOf[c.Position], msg) // this cast is a very small price for the sanity of exception handling case UnexpectedReificationException(pos, err, cause) if cause != null => throw cause } diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala index 4512b2cb6fd7..de9fec0df5b9 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala @@ -5,10 +5,6 @@ trait GenUtils { self: Reifier => import global._ - import treeInfo._ - import definitions._ - private val runDefinitions = currentRun.runDefinitions - import runDefinitions._ def reifyList(xs: List[Any]): Tree = mkList(xs map reify) diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala index 1747405f0330..13bf0ef4c601 100644 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ b/src/compiler/scala/tools/ant/Scalac.scala @@ -97,7 +97,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared { /** Defines valid values for the `target` property. */ object Target extends PermissibleValue { - val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7") + val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7", "jvm-1.8") } /** Defines valid values for the `deprecation` and `unchecked` properties. */ @@ -479,7 +479,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared { /** Tests if a file exists and prints a warning in case it doesn't. Always * returns the file, even if it doesn't exist. - * @param file A file to test for existance. + * @param file A file to test for existence. * @return The same file. */ protected def existing(file: File): File = { if (!file.exists) diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl old mode 100644 new mode 100755 index 88fee71843a7..7acb3632d29e --- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl +++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl @@ -86,10 +86,14 @@ fi TOOL_CLASSPATH="@classpath@" if [[ -z "$TOOL_CLASSPATH" ]]; then for ext in "$SCALA_HOME"/lib/* ; do - if [[ -z "$TOOL_CLASSPATH" ]]; then - TOOL_CLASSPATH="$ext" - else - TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}" + file_extension="${ext##*.}" + # SI-8967 Only consider directories and files named '*.jar' + if [[ -d "$ext" || $file_extension == "jar" ]]; then + if [[ -z "$TOOL_CLASSPATH" ]]; then + TOOL_CLASSPATH="$ext" + else + TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}" + fi fi done fi @@ -144,6 +148,10 @@ classpathArgs () { fi } +# SI-8358, SI-8368 -- the default should really be false, +# but I don't want to flip the default during 2.11's RC cycle +OVERRIDE_USEJAVACP="-Dscala.usejavacp=true" + while [[ $# -gt 0 ]]; do case "$1" in -D*) @@ -151,6 +159,8 @@ while [[ $# -gt 0 ]]; do # need it, e.g. communicating with a server compiler. java_args=("${java_args[@@]}" "$1") scala_args=("${scala_args[@@]}" "$1") + # respect user-supplied -Dscala.usejavacp + case "$1" in -Dscala.usejavacp*) OVERRIDE_USEJAVACP="";; esac shift ;; -J*) @@ -199,7 +209,7 @@ execCommand \ "${java_args[@@]}" \ $(classpathArgs) \ -Dscala.home="$SCALA_HOME" \ - -Dscala.usejavacp=true \ + $OVERRIDE_USEJAVACP \ "$EMACS_OPT" \ $WINDOWS_OPT \ @properties@ @class@ @toolflags@ "$@@" diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl index 8441f3af230d..50e44fb669ed 100644 --- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl +++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl @@ -25,6 +25,10 @@ shift :notoolcp +rem SI-8358, SI-8368 -- the default should really be false, +rem but I don't want to flip the default during 2.11's RC cycle +set _OVERRIDE_USEJAVACP="-Dscala.usejavacp=true" + rem We keep in _JAVA_PARAMS all -J-prefixed and -D-prefixed arguments set _JAVA_PARAMS= @@ -45,6 +49,10 @@ if "%_TEST_PARAM:~0,2%"=="-J" ( ) if "%_TEST_PARAM:~0,2%"=="-D" ( + rem Only match beginning of the -D option. The relevant bit is 17 chars long. + if "%_TEST_PARAM:~0,17%"=="-Dscala.usejavacp" ( + set _OVERRIDE_USEJAVACP= + ) rem test if this was double-quoted property "-Dprop=42" for /F "delims== tokens=1-2" %%G in ("%_TEST_PARAM%") DO ( if not "%%G" == "%_TEST_PARAM%" ( @@ -120,13 +128,13 @@ if defined _JAVA_PARAMS set _JAVA_OPTS=%_JAVA_OPTS% %_JAVA_PARAMS% set _TOOL_CLASSPATH=@classpath@ if "%_TOOL_CLASSPATH%"=="" ( - for %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f" + for %%f in ("!_SCALA_HOME!\lib\*.jar") do call :add_cpath "%%f" for /d %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f" ) if not "%_LINE_TOOLCP%"=="" call :add_cpath "%_LINE_TOOLCP%" -set _PROPS=-Dscala.home="!_SCALA_HOME!" -Denv.emacs="%EMACS%" -Dscala.usejavacp=true @properties@ +set _PROPS=-Dscala.home="!_SCALA_HOME!" -Denv.emacs="%EMACS%" %_OVERRIDE_USEJAVACP% @properties@ rem echo "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %* "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %* diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala index 842851b4f60d..e78589908ca2 100644 --- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala +++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala @@ -111,8 +111,8 @@ import scala.language.implicitConversions""" " */"), Op(">>", "/**\n" + - " * Returns this value bit-shifted left by the specified number of bits,\n" + - " * filling in the right bits with the same value as the left-most bit of this.\n" + + " * Returns this value bit-shifted right by the specified number of bits,\n" + + " * filling in the left bits with the same value as the left-most bit of this.\n" + " * The effect of this is to retain the sign of the value.\n" + " * @example {{{\n" + " * -21 >> 3 == -3\n" + diff --git a/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala b/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala new file mode 100644 index 000000000000..2faf6c6272ec --- /dev/null +++ b/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc + +import scala.io.StdIn.readLine + +/** + * Simple application to check out amount of memory used by chosen classpath representation. + * It allows us to create many scalac-like calls based on specified parameters, where each main retains Global. + * And we need additional tool (e.g. profiler) to measure memory consumption itself. + */ +object ClassPathMemoryConsumptionTester { + + private class TestSettings extends Settings { + val requiredInstances = IntSetting("-requiredInstances", + "Determine how many times classpath should be loaded", 10, Some((1, 10000)), (_: String) => None) + } + + private class MainRetainsGlobal extends scala.tools.nsc.MainClass { + var retainedGlobal: Global = _ + override def doCompile(compiler: Global) { + retainedGlobal = compiler + super.doCompile(compiler) + } + } + + def main(args: Array[String]): Unit = { + if (args contains "-help") usage() + else doTest(args) + } + + private def doTest(args: Array[String]) = { + val settings = loadSettings(args.toList) + + val mains = (1 to settings.requiredInstances.value) map (_ => new MainRetainsGlobal) + + // we need original settings without additional params to be able to use them later + val baseArgs = argsWithoutRequiredInstances(args) + + println(s"Loading classpath ${settings.requiredInstances.value} times") + val startTime = System.currentTimeMillis() + + mains map (_.process(baseArgs)) + + val elapsed = System.currentTimeMillis() - startTime + println(s"Operation finished - elapsed $elapsed ms") + println("Memory consumption can be now measured") + + var textFromStdIn = "" + while (textFromStdIn.toLowerCase != "exit") + textFromStdIn = readLine("Type 'exit' to close application: ") + } + + /** + * Prints usage information + */ + private def usage(): Unit = + println( """Use classpath and sourcepath options like in the case of e.g. 'scala' command. + | There's also one additional option: + | -requiredInstances Determine how many times classpath should be loaded + """.stripMargin.trim) + + private def loadSettings(args: List[String]) = { + val settings = new TestSettings() + settings.processArguments(args, processAll = true) + if (settings.classpath.isDefault) + settings.classpath.value = sys.props("java.class.path") + settings + } + + private def argsWithoutRequiredInstances(args: Array[String]) = { + val instancesIndex = args.indexOf("-requiredInstances") + if (instancesIndex == -1) args + else args.dropRight(args.length - instancesIndex) ++ args.drop(instancesIndex + 2) + } +} diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index c2caed70a06e..1a6843a24971 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -8,7 +8,6 @@ package scala.tools.nsc import scala.reflect.internal.util.{ SourceFile, NoSourceFile, FreshNameCreator } import scala.collection.mutable import scala.collection.mutable.{ LinkedHashSet, ListBuffer } -import scala.tools.nsc.reporters.Reporter trait CompilationUnits { global: Global => @@ -123,31 +122,20 @@ trait CompilationUnits { global: Global => */ val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet - def reporter = global.reporter + @deprecated("Call global.reporter.echo directly instead.", "2.11.2") + final def echo(pos: Position, msg: String): Unit = reporter.echo(pos, msg) + @deprecated("Call global.reporter.error (or typer.context.error) directly instead.", "2.11.2") + final def error(pos: Position, msg: String): Unit = reporter.error(pos, msg) + @deprecated("Call global.reporter.warning (or typer.context.warning) directly instead.", "2.11.2") + final def warning(pos: Position, msg: String): Unit = reporter.warning(pos, msg) - def echo(pos: Position, msg: String) = - reporter.echo(pos, msg) + @deprecated("Call global.currentRun.reporting.deprecationWarning directly instead.", "2.11.2") + final def deprecationWarning(pos: Position, msg: String): Unit = currentRun.reporting.deprecationWarning(pos, msg) + @deprecated("Call global.currentRun.reporting.uncheckedWarning directly instead.", "2.11.2") + final def uncheckedWarning(pos: Position, msg: String): Unit = currentRun.reporting.uncheckedWarning(pos, msg) - def error(pos: Position, msg: String) = - reporter.error(pos, msg) - - def warning(pos: Position, msg: String) = - reporter.warning(pos, msg) - - def deprecationWarning(pos: Position, msg: String) = - currentRun.deprecationWarnings0.warn(pos, msg) - - def uncheckedWarning(pos: Position, msg: String) = - currentRun.uncheckedWarnings0.warn(pos, msg) - - def inlinerWarning(pos: Position, msg: String) = - currentRun.inlinerWarnings.warn(pos, msg) - - def incompleteInputError(pos: Position, msg:String) = - reporter.incompleteInputError(pos, msg) - - def comment(pos: Position, msg: String) = - reporter.comment(pos, msg) + @deprecated("This method will be removed. It does nothing.", "2.11.2") + final def comment(pos: Position, msg: String): Unit = {} /** Is this about a .java source file? */ lazy val isJava = source.file.name.endsWith(".java") diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala index 3017d8c9ccf8..f25950447302 100644 --- a/src/compiler/scala/tools/nsc/CompileClient.scala +++ b/src/compiler/scala/tools/nsc/CompileClient.scala @@ -43,8 +43,8 @@ class StandardCompileClient extends HasCompileSocket with CompileOutputCommon { info(vmArgs.mkString("[VM arguments: ", " ", "]")) val socket = - if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown) - else Some(compileSocket.getSocket(settings.server.value)) + if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown, settings.port.value) + else compileSocket.getSocket(settings.server.value) socket match { case Some(sock) => compileOnServer(sock, fscArgs) diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index 6f068e179c85..aa02957a6c82 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -6,8 +6,9 @@ package scala.tools.nsc import java.io.PrintStream +import io.Directory import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} -import scala.reflect.internal.util.FakePos //Position +import scala.reflect.internal.util.{FakePos, Position} import scala.tools.util.SocketServer import settings.FscSettings @@ -19,7 +20,7 @@ import settings.FscSettings * @author Martin Odersky * @version 1.0 */ -class StandardCompileServer extends SocketServer { +class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) { lazy val compileSocket: CompileSocket = CompileSocket private var compiler: Global = null @@ -37,7 +38,7 @@ class StandardCompileServer extends SocketServer { /** Create a new compiler instance */ def newGlobal(settings: Settings, reporter: Reporter) = new Global(settings, reporter) { - override def inform(msg: String) = out.println(msg) + override def inform(pos: Position, msg: String) = out.println(msg) } override def timeout() { @@ -152,6 +153,7 @@ class StandardCompileServer extends SocketServer { clearCompiler() case ex: Throwable => warn("Compile server encountered fatal condition: " + ex) + reporter.error(null, "Compile server encountered fatal condition: " + ex.getMessage) shutdown = true throw ex } @@ -165,12 +167,12 @@ class StandardCompileServer extends SocketServer { } -object CompileServer extends StandardCompileServer { +object CompileServer { /** A directory holding redirected output */ - private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory() + //private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory() - private def createRedirect(filename: String) = - new PrintStream((redirectDir / filename).createFile().bufferedOutput()) + private def createRedirect(dir: Directory, filename: String) = + new PrintStream((dir / filename).createFile().bufferedOutput()) def main(args: Array[String]) = execute(() => (), args) @@ -186,21 +188,33 @@ object CompileServer extends StandardCompileServer { */ def execute(startupCallback : () => Unit, args: Array[String]) { val debug = args contains "-v" + var port = 0 + val i = args.indexOf("-p") + if (i >= 0 && args.length > i + 1) { + scala.util.control.Exception.ignoring(classOf[NumberFormatException]) { + port = args(i + 1).toInt + } + } + + // Create instance rather than extend to pass a port parameter. + val server = new StandardCompileServer(port) + val redirectDir = (server.compileSocket.tmpDir / "output-redirects").createDirectory() + if (debug) { - echo("Starting CompileServer on port " + port) - echo("Redirect dir is " + redirectDir) + server.echo("Starting CompileServer on port " + server.port) + server.echo("Redirect dir is " + redirectDir) } - Console.withErr(createRedirect("scala-compile-server-err.log")) { - Console.withOut(createRedirect("scala-compile-server-out.log")) { - Console.err.println("...starting server on socket "+port+"...") + Console.withErr(createRedirect(redirectDir, "scala-compile-server-err.log")) { + Console.withOut(createRedirect(redirectDir, "scala-compile-server-out.log")) { + Console.err.println("...starting server on socket "+server.port+"...") Console.err.flush() - compileSocket setPort port + server.compileSocket setPort server.port startupCallback() - run() + server.run() - compileSocket deletePort port + server.compileSocket deletePort server.port } } } diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala index c4f06b59ec53..27a14141faee 100644 --- a/src/compiler/scala/tools/nsc/CompileSocket.scala +++ b/src/compiler/scala/tools/nsc/CompileSocket.scala @@ -32,7 +32,8 @@ trait HasCompileSocket { if (isErrorMessage(line)) noErrors = false - compileSocket.echo(line) + // be consistent with scalac: everything goes to stderr + compileSocket.warn(line) loop() } try loop() @@ -45,6 +46,9 @@ trait HasCompileSocket { class CompileSocket extends CompileOutputCommon { protected lazy val compileClient: StandardCompileClient = CompileClient def verbose = compileClient.verbose + + /* Fixes the port where to start the server, 0 yields some free port */ + var fixPort = 0 /** The prefix of the port identification file, which is followed * by the port number. @@ -63,7 +67,7 @@ class CompileSocket extends CompileOutputCommon { /** The class name of the scala compile server */ protected val serverClass = "scala.tools.nsc.CompileServer" - protected def serverClassArgs = if (verbose) List("-v") else Nil // debug + protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil) /** A temporary directory to use */ val tmpDir = { @@ -103,9 +107,14 @@ class CompileSocket extends CompileOutputCommon { def portFile(port: Int) = portsDir / File(port.toString) /** Poll for a server port number; return -1 if none exists yet */ - private def pollPort(): Int = portsDir.list.toList match { + private def pollPort(): Int = if (fixPort > 0) { + if (portsDir.list.toList.exists(_.name == fixPort.toString)) fixPort else -1 + } else portsDir.list.toList match { case Nil => -1 - case x :: xs => try x.name.toInt finally xs foreach (_.delete()) + case x :: xs => try x.name.toInt catch { + case e: Exception => x.delete() + throw e + } } /** Get the port number to which a scala compile server is connected; @@ -151,7 +160,8 @@ class CompileSocket extends CompileOutputCommon { * create a new daemon if necessary. Returns None if the connection * cannot be established. */ - def getOrCreateSocket(vmArgs: String, create: Boolean = true): Option[Socket] = { + def getOrCreateSocket(vmArgs: String, create: Boolean = true, fixedPort: Int = 0): Option[Socket] = { + fixPort = fixedPort val maxMillis = 10L * 1000 // try for 10 seconds val retryDelay = 50L val maxAttempts = (maxMillis / retryDelay).toInt @@ -185,14 +195,17 @@ class CompileSocket extends CompileOutputCommon { try { Some(x.toInt) } catch { case _: NumberFormatException => None } - def getSocket(serverAdr: String): Socket = ( - for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield + def getSocket(serverAdr: String): Option[Socket] = ( + for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield getSocket(name, port) ) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr) - def getSocket(hostName: String, port: Int): Socket = - Socket(hostName, port).opt getOrElse fatal("Unable to establish connection to server %s:%d; exiting".format(hostName, port)) - + def getSocket(hostName: String, port: Int): Option[Socket] = { + val sock = Socket(hostName, port).opt + if (sock.isEmpty) warn("Unable to establish connection to server %s:%d".format(hostName, port)) + sock + } + def getPassword(port: Int): String = { val ff = portFile(port) val f = ff.bufferedReader() diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index bab0768ca9f3..9b8e9fa3304f 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -20,9 +20,12 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { def ok = processArgumentsResult._1 def files = processArgumentsResult._2 - /** The name of the command */ + /** The name of the command. */ def cmdName = "scalac" + /** A descriptive alias for version and help messages. */ + def cmdDesc = "compiler" + private def explainAdvanced = "\n" + """ |-- Notes on option parsing -- |Boolean settings are always false unless set. @@ -85,7 +88,11 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { def getInfoMessage(global: Global): String = { import settings._ - if (help) usageMsg + global.pluginOptionsHelp + import Properties.{ versionString, copyrightString } //versionFor + def versionFor(command: String) = f"Scala $command $versionString -- $copyrightString" + + if (version) versionFor(cmdDesc) + else if (help) usageMsg + global.pluginOptionsHelp else if (Xhelp) xusageMsg else if (Yhelp) yusageMsg else if (showPlugins) global.pluginDescriptions @@ -96,7 +103,15 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { val components = global.phaseNames // global.phaseDescriptors // one initializes s"Phase graph of ${components.size} components output to ${genPhaseGraph.value}*.dot." } - else "" + // would be nicer if we could ask all the options for their helpful messages + else { + val sb = new StringBuilder + allSettings foreach { + case s: MultiChoiceSetting[_] if s.isHelping => sb append s.help + case _ => + } + sb.toString + } } /** diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala index 3ac27a42e864..6befa76b3f10 100644 --- a/src/compiler/scala/tools/nsc/Driver.scala +++ b/src/compiler/scala/tools/nsc/Driver.scala @@ -2,26 +2,24 @@ package scala package tools.nsc import scala.tools.nsc.reporters.ConsoleReporter -import Properties.{ versionString, copyrightString, residentPromptString } +import Properties.{ versionMsg, residentPromptString } import scala.reflect.internal.util.FakePos abstract class Driver { val prompt = residentPromptString - val versionMsg = "Scala compiler " + - versionString + " -- " + - copyrightString - var reporter: ConsoleReporter = _ protected var command: CompilerCommand = _ protected var settings: Settings = _ - protected def scalacError(msg: String) { + protected def scalacError(msg: String): Unit = { reporter.error(FakePos("scalac"), msg + "\n scalac -help gives more information") } - protected def processSettingsHook(): Boolean = true + protected def processSettingsHook(): Boolean = { + if (settings.version) { reporter echo versionMsg ; false } else true + } protected def newCompiler(): Global @@ -37,14 +35,12 @@ abstract class Driver { } def process(args: Array[String]) { - val ss = new Settings(scalacError) - reporter = new ConsoleReporter(ss) + val ss = new Settings(scalacError) + reporter = new ConsoleReporter(ss) command = new CompilerCommand(args.toList, ss) settings = command.settings - if (settings.version) { - reporter.echo(versionMsg) - } else if (processSettingsHook()) { + if (processSettingsHook()) { val compiler = newCompiler() try { if (reporter.hasErrors) @@ -68,5 +64,4 @@ abstract class Driver { process(args) sys.exit(if (reporter.hasErrors) 1 else 0) } - } diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala index e71022228580..dbdeec809f75 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala @@ -19,9 +19,10 @@ extends CompilerCommand(args, settings) { def this(args: List[String]) = this(args, str => Console.println("Error: " + str)) - /** name of the associated compiler command */ override def cmdName = "scala" - def compCmdName = "scalac" + override def cmdDesc = "code runner" + + def compCmdName = "scalac" // super.cmdName // change CompilerCommand behavior override def shouldProcessArguments: Boolean = false @@ -50,17 +51,16 @@ extends CompilerCommand(args, settings) { case Nil => AsRepl case hd :: _ => waysToRun find (_.name == settings.howtorun.value) getOrElse guessHowToRun(hd) } - private def interpolate(s: String) = s.trim.replaceAll("@cmd@", cmdName).replaceAll("@compileCmd@", compCmdName) + "\n" - - def shortUsageMsg = interpolate(""" -Usage: @cmd@ [ ] - or @cmd@ -help -All options to @compileCmd@ (see @compileCmd@ -help) are also allowed. -""") + def shortUsageMsg = +s"""|Usage: $cmdName [ ] + | or $cmdName -help + | + |All options to $compCmdName (see $compCmdName -help) are also allowed. +""".stripMargin - override def usageMsg = shortUsageMsg + interpolate(""" -The first given argument other than options to @cmd@ designates + override def usageMsg = f"""$shortUsageMsg +The first given argument other than options to $cmdName designates what to run. Runnable targets are: - a file containing scala source @@ -68,7 +68,7 @@ what to run. Runnable targets are: - a runnable jar file with a valid Main-Class attribute - or if no argument is given, the repl (interactive shell) is started -Options to @cmd@ which reach the java runtime: +Options to $cmdName which reach the java runtime: -Dname=prop passed directly to java to set system properties -J -J is stripped and passed to java as-is @@ -86,8 +86,7 @@ A file argument will be run as a scala script unless it contains only self-contained compilation units (classes and objects) and exactly one runnable main method. In that case the file will be compiled and the main method invoked. This provides a bridge between scripts and standard -scala source. - """) + "\n" +scala source.%n""" } object GenericRunnerCommand { diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index ad75d02bff2a..1289d55c3726 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -5,10 +5,11 @@ package scala.tools.nsc -import scala.tools.util.PathResolver +import java.net.URL +import scala.tools.util.PathResolverFactory class GenericRunnerSettings(error: String => Unit) extends Settings(error) { - def classpathURLs = new PathResolver(this).asURLs + def classpathURLs: Seq[URL] = PathResolverFactory.create(this).resultAsURLs val howtorun = ChoiceSetting( diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index c5d0c8506a3b..1c9dbad4dddc 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -8,18 +8,17 @@ package tools package nsc import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException } +import java.net.URL import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException } -import java.util.UUID._ import scala.compat.Platform.currentTime import scala.collection.{ mutable, immutable } import io.{ SourceReader, AbstractFile, Path } import reporters.{ Reporter, ConsoleReporter } -import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString } +import util.{ ClassFileLookup, ClassPath, MergedClassPath, StatisticsInfo, returning } import scala.reflect.ClassTag -import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile } -import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat } -import scala.reflect.io.VirtualFile -import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers } +import scala.reflect.internal.util.{ SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile } +import scala.reflect.internal.pickling.PickleBuffer +import symtab.{ Flags, SymbolTable, SymbolTrackers } import symtab.classfile.Pickler import plugins.Plugins import ast._ @@ -28,13 +27,15 @@ import typechecker._ import transform.patmat.PatternMatching import transform._ import backend.icode.{ ICodes, GenICode, ICodeCheckers } -import backend.{ ScalaPrimitives, Platform, JavaPlatform } +import backend.{ ScalaPrimitives, JavaPlatform } import backend.jvm.GenBCode import backend.jvm.GenASM import backend.opt.{ Inliners, InlineExceptionHandlers, ConstantOptimization, ClosureElimination, DeadCodeElimination } import backend.icode.analysis._ import scala.language.postfixOps import scala.tools.nsc.ast.{TreeGen => AstTreeGen} +import scala.tools.nsc.classpath.FlatClassPath +import scala.tools.nsc.settings.ClassPathRepresentationType class Global(var currentSettings: Settings, var reporter: Reporter) extends SymbolTable @@ -44,7 +45,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter) with Trees with Printers with DocComments - with Positions { self => + with Positions + with Reporting + with Parsing { self => // the mirror -------------------------------------------------- @@ -56,7 +59,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter) class GlobalMirror extends Roots(NoSymbol) { val universe: self.type = self - def rootLoader: LazyType = new loaders.PackageLoader(classPath) + def rootLoader: LazyType = { + settings.YclasspathImpl.value match { + case ClassPathRepresentationType.Flat => new loaders.PackageLoaderUsingFlatClassPath(FlatClassPath.RootPackage, flatClassPath) + case ClassPathRepresentationType.Recursive => new loaders.PackageLoader(recursiveClassPath) + } + } override def toString = "compiler mirror" } implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[GlobalMirror]) @@ -102,7 +110,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) type PlatformClassPath = ClassPath[AbstractFile] type OptClassPath = Option[PlatformClassPath] - def classPath: PlatformClassPath = platform.classPath + def classPath: ClassFileLookup[AbstractFile] = settings.YclasspathImpl.value match { + case ClassPathRepresentationType.Flat => flatClassPath + case ClassPathRepresentationType.Recursive => recursiveClassPath + } + + private def recursiveClassPath: ClassPath[AbstractFile] = platform.classPath + + private def flatClassPath: FlatClassPath = platform.flatClassPath // sub-components -------------------------------------------------- @@ -217,6 +232,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** Called from parser, which signals hereby that a method definition has been parsed. */ def signalParseProgress(pos: Position) {} + /** Called by ScalaDocAnalyzer when a doc comment has been parsed. */ + def signalParsedDocComment(comment: String, pos: Position) = { + // TODO: this is all very broken (only works for scaladoc comments, not regular ones) + // --> add hooks to parser and refactor Interactive global to handle comments directly + // in any case don't use reporter for parser hooks + reporter.comment(pos, comment) + } + /** Register new context; called for every created context */ def registerContext(c: analyzer.Context) { @@ -227,19 +250,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) */ def registerTopLevelSym(sym: Symbol) {} -// ------------------ Reporting ------------------------------------- - - // not deprecated yet, but a method called "error" imported into - // nearly every trait really must go. For now using globalError. - def error(msg: String) = globalError(msg) - - override def inform(msg: String) = inform(NoPosition, msg) - override def globalError(msg: String) = globalError(NoPosition, msg) - override def warning(msg: String) = warning(NoPosition, msg) - - def globalError(pos: Position, msg: String) = reporter.error(pos, msg) - def warning(pos: Position, msg: String) = if (settings.fatalWarnings) globalError(pos, msg) else reporter.warning(pos, msg) - def inform(pos: Position, msg: String) = reporter.echo(pos, msg) +// ------------------ Debugging ------------------------------------- // Getting in front of Predef's asserts to supplement with more info. // This has the happy side effect of masking the one argument forms @@ -262,12 +273,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) require(requirement, "") } - // Needs to call error to make sure the compile fails. - override def abort(msg: String): Nothing = { - error(msg) - super.abort(msg) - } - @inline final def ifDebug(body: => Unit) { if (settings.debug) body @@ -290,8 +295,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) log(s"!!!$pos_s $msg") // such warnings always at least logged } - def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg)) - def logError(msg: String, t: Throwable): Unit = () override def shouldLogAtThisPhase = settings.log.isSetByUser && ( @@ -329,7 +332,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) None } - val charset = ( if (settings.encoding.isSetByUser) Some(settings.encoding.value) else None ) flatMap loadCharset getOrElse { + val charset = settings.encoding.valueSetByUser flatMap loadCharset getOrElse { settings.encoding.value = defaultEncoding // A mandatory charset Charset.forName(defaultEncoding) } @@ -344,16 +347,16 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } - ( if (settings.sourceReader.isSetByUser) Some(settings.sourceReader.value) else None ) flatMap loadReader getOrElse { + settings.sourceReader.valueSetByUser flatMap loadReader getOrElse { new SourceReader(charset.newDecoder(), reporter) } } - if (settings.verbose || settings.Ylogcp) { - // Uses the "do not truncate" inform - informComplete("[search path for source files: " + classPath.sourcepaths.mkString(",") + "]") - informComplete("[search path for class files: " + classPath.asClasspathString + "]") - } + if (settings.verbose || settings.Ylogcp) + reporter.echo( + s"[search path for source files: ${classPath.asSourcePathString}]\n" + + s"[search path for class files: ${classPath.asClassPathString}]" + ) // The current division between scala.reflect.* and scala.tools.nsc.* is pretty // clunky. It is often difficult to have a setting influence something without having @@ -414,7 +417,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) reporter.cancelled || unit.isJava && this.id > maxJavaPhase } - final def applyPhase(unit: CompilationUnit) { + final def withCurrentUnit(unit: CompilationUnit)(task: => Unit) { if ((unit ne null) && unit.exists) lastSeenSourceFile = unit.source @@ -426,7 +429,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) currentRun.currentUnit = unit if (!cancelled(unit)) { currentRun.informUnitStarting(this, unit) - apply(unit) + task } currentRun.advanceUnit() } finally { @@ -434,6 +437,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) currentRun.currentUnit = unit0 } } + + final def applyPhase(unit: CompilationUnit) = withCurrentUnit(unit)(apply(unit)) } // phaseName = "parser" @@ -850,52 +855,63 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } reverse } + // ------------ REPL utilities --------------------------------- + + /** Extend classpath of `platform` and rescan updated packages. */ + def extendCompilerClassPath(urls: URL*): Unit = { + if (settings.YclasspathImpl.value == ClassPathRepresentationType.Flat) + throw new UnsupportedOperationException("Flat classpath doesn't support extending the compiler classpath") + + val newClassPath = platform.classPath.mergeUrlsIntoClassPath(urls: _*) + platform.currentClassPath = Some(newClassPath) + // Reload all specified jars into this compiler instance + invalidateClassPathEntries(urls.map(_.getPath): _*) + } + // ------------ Invalidations --------------------------------- /** Is given package class a system package class that cannot be invalidated? */ private def isSystemPackageClass(pkg: Symbol) = - pkg == RootClass || - pkg == definitions.ScalaPackageClass || { - val pkgname = pkg.fullName - (pkgname startsWith "scala.") && !(pkgname startsWith "scala.tools") - } + pkg == RootClass || (pkg.hasTransOwner(definitions.ScalaPackageClass) && !pkg.hasTransOwner(this.rootMirror.staticPackage("scala.tools").moduleClass.asClass)) /** Invalidates packages that contain classes defined in a classpath entry, and * rescans that entry. - * @param paths Fully qualified names that refer to directories or jar files that are - * a entries on the classpath. - * First, causes the classpath entry referred to by `path` to be rescanned, so that - * any new files or deleted files or changes in subpackages are picked up. - * Second, invalidates any packages for which one of the following considitions is met: - - * - the classpath entry contained during the last compilation run classfiles - * that represent a member in the package - * - the classpath entry now contains classfiles - * that represent a member in the package + * + * First, the classpath entry referred to by one of the `paths` is rescanned, + * so that any new files or changes in subpackages are picked up. + * Second, any packages for which one of the following conditions is met is invalidated: + * - the classpath entry contained during the last compilation run now contains classfiles + * that represent a member in the package; + * - the classpath entry now contains classfiles that represent a member in the package; * - the set of subpackages has changed. * * The invalidated packages are reset in their entirety; all member classes and member packages * are re-accessed using the new classpath. - * Not invalidated are system packages that the compiler needs to access as parts - * of standard definitions. The criterion what is a system package is currently: - * any package rooted in "scala", with the exception of packages rooted in "scala.tools". - * This can be refined later. - * @return A pair consisting of - * - a list of invalidated packages - * - a list of of packages that should have been invalidated but were not because - * they are system packages. + * + * System packages that the compiler needs to access as part of standard definitions + * are not invalidated. A system package is: + * Any package rooted in "scala", with the exception of packages rooted in "scala.tools". + * + * @param paths Fully-qualified names that refer to directories or jar files that are + * entries on the classpath. */ - def invalidateClassPathEntries(paths: String*): (List[ClassSymbol], List[ClassSymbol]) = { + def invalidateClassPathEntries(paths: String*): Unit = { + if (settings.YclasspathImpl.value == ClassPathRepresentationType.Flat) + throw new UnsupportedOperationException("Flat classpath doesn't support the classpath invalidation") + + implicit object ClassPathOrdering extends Ordering[PlatformClassPath] { + def compare(a:PlatformClassPath, b:PlatformClassPath) = a.asClassPathString compare b.asClassPathString + } val invalidated, failed = new mutable.ListBuffer[ClassSymbol] classPath match { case cp: MergedClassPath[_] => def assoc(path: String): List[(PlatformClassPath, PlatformClassPath)] = { - val dir = AbstractFile getDirectory path + val dir = AbstractFile.getDirectory(path) val canonical = dir.canonicalPath def matchesCanonical(e: ClassPath[_]) = e.origin match { case Some(opath) => - (AbstractFile getDirectory opath).canonicalPath == canonical + AbstractFile.getDirectory(opath).canonicalPath == canonical case None => false } @@ -903,21 +919,20 @@ class Global(var currentSettings: Settings, var reporter: Reporter) case Some(oldEntry) => List(oldEntry -> cp.context.newClassPath(dir)) case None => - println(s"canonical = $canonical, origins = ${cp.entries map (_.origin)}") - error(s"cannot invalidate: no entry named $path in classpath $classPath") + error(s"Error adding entry to classpath. During invalidation, no entry named $path in classpath $classPath") List() } } - val subst = Map(paths flatMap assoc: _*) + val subst = immutable.TreeMap(paths flatMap assoc: _*) if (subst.nonEmpty) { platform updateClassPath subst informProgress(s"classpath updated on entries [${subst.keys mkString ","}]") def mkClassPath(elems: Iterable[PlatformClassPath]): PlatformClassPath = if (elems.size == 1) elems.head - else new MergedClassPath(elems, classPath.context) + else new MergedClassPath(elems, recursiveClassPath.context) val oldEntries = mkClassPath(subst.keys) val newEntries = mkClassPath(subst.values) - reSync(RootClass, Some(classPath), Some(oldEntries), Some(newEntries), invalidated, failed) + mergeNewEntries(newEntries, RootClass, Some(recursiveClassPath), Some(oldEntries), invalidated, failed) } } def show(msg: String, syms: scala.collection.Traversable[Symbol]) = @@ -925,36 +940,32 @@ class Global(var currentSettings: Settings, var reporter: Reporter) informProgress(s"$msg: ${syms map (_.fullName) mkString ","}") show("invalidated packages", invalidated) show("could not invalidate system packages", failed) - (invalidated.toList, failed.toList) } - /** Re-syncs symbol table with classpath + /** Merges new classpath entries into the symbol table + * + * @param newEntries The new classpath entries * @param root The root symbol to be resynced (a package class) - * @param allEntries Optionally, the corresponding package in the complete current classPath - * @param oldEntries Optionally, the corresponding package in the old classPath entries - * @param newEntries Optionally, the corresponding package in the new classPath entries + * @param allEntries Optionally, the corresponding package in the complete current classpath + * @param oldEntries Optionally, the corresponding package in the old classpath entries * @param invalidated A listbuffer collecting the invalidated package classes * @param failed A listbuffer collecting system package classes which could not be invalidated - * The resyncing strategy is determined by the absence or presence of classes and packages. - * If either oldEntries or newEntries contains classes, root is invalidated, provided a corresponding package - * exists in allEntries, or otherwise is removed. - * Otherwise, the action is determined by the following matrix, with columns: * - * old new all sym action - * + + + + recurse into all child packages of old ++ new - * + - + + invalidate root - * + - - + remove root from its scope - * - + + + invalidate root - * - + + - create and enter root - * - - * * no action + * The merging strategy is determined by the absence or presence of classes and packages. * - * Here, old, new, all mean classpaths and sym means symboltable. + is presence of an - * entry in its column, - is absence, * is don't care. + * If either oldEntries or newEntries contains classes, root is invalidated provided that a corresponding package + * exists in allEntries. Otherwise it is removed. + * Otherwise, the action is determined by the following matrix, with columns: + * + * old sym action + * + + recurse into all child packages of newEntries + * - + invalidate root + * - - create and enter root * - * Note that new <= all and old <= sym, so the matrix above covers all possibilities. + * Here, old means classpath, and sym means symboltable. + is presence of an entry in its column, - is absence. */ - private def reSync(root: ClassSymbol, - allEntries: OptClassPath, oldEntries: OptClassPath, newEntries: OptClassPath, + private def mergeNewEntries(newEntries: PlatformClassPath, root: ClassSymbol, + allEntries: OptClassPath, oldEntries: OptClassPath, invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) { ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries")) @@ -967,11 +978,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } invalidated += root } - def packageNames(cp: PlatformClassPath): Set[String] = cp.packages.toSet map getName def subPackage(cp: PlatformClassPath, name: String): OptClassPath = cp.packages find (cp1 => getName(cp1) == name) - val classesFound = hasClasses(oldEntries) || hasClasses(newEntries) + val classesFound = hasClasses(oldEntries) || newEntries.classes.nonEmpty if (classesFound && !isSystemPackageClass(root)) { invalidateOrRemove(root) } else { @@ -979,69 +989,27 @@ class Global(var currentSettings: Settings, var reporter: Reporter) if (root.isRoot) invalidateOrRemove(EmptyPackageClass) else failed += root } - (oldEntries, newEntries) match { - case (Some(oldcp) , Some(newcp)) => - for (pstr <- packageNames(oldcp) ++ packageNames(newcp)) { - val pname = newTermName(pstr) - val pkg = (root.info decl pname) orElse { - // package was created by external agent, create symbol to track it - assert(!subPackage(oldcp, pstr).isDefined) - loaders.enterPackage(root, pstr, new loaders.PackageLoader(allEntries.get)) - } - reSync( - pkg.moduleClass.asInstanceOf[ClassSymbol], - subPackage(allEntries.get, pstr), subPackage(oldcp, pstr), subPackage(newcp, pstr), - invalidated, failed) + if (!oldEntries.isDefined) invalidateOrRemove(root) + else + for (pstr <- newEntries.packages.map(getName)) { + val pname = newTermName(pstr) + val pkg = (root.info decl pname) orElse { + // package does not exist in symbol table, create symbol to track it + assert(!subPackage(oldEntries.get, pstr).isDefined) + loaders.enterPackage(root, pstr, new loaders.PackageLoader(allEntries.get)) } - case (Some(oldcp), None) => - invalidateOrRemove(root) - case (None, Some(newcp)) => - invalidateOrRemove(root) - case (None, None) => - } + mergeNewEntries(subPackage(newEntries, pstr).get, pkg.moduleClass.asClass, + subPackage(allEntries.get, pstr), subPackage(oldEntries.get, pstr), + invalidated, failed) + } } } - /** Invalidate contents of setting -Yinvalidate */ - def doInvalidation() = settings.Yinvalidate.value match { - case "" => - case entry => invalidateClassPathEntries(entry) - } - // ----------- Runs --------------------------------------- private var curRun: Run = null private var curRunId = 0 - /** A hook that lets subclasses of `Global` define whether a package or class should be kept loaded for the - * next compiler run. If the parameter `sym` is a class or object, and `clearOnNextRun(sym)` returns `true`, - * then the symbol is unloaded and reset to its state before the last compiler run. If the parameter `sym` is - * a package, and clearOnNextRun(sym)` returns `true`, the package is recursively searched for - * classes to drop. - * - * Example: Let's say I want a compiler that drops all classes corresponding to the current project - * between runs. Then `keepForNextRun` of a toplevel class or object should return `true` if the - * class or object does not form part of the current project, `false` otherwise. For a package, - * clearOnNextRun should return `true` if no class in that package forms part of the current project, - * `false` otherwise. - * - * @param sym A class symbol, object symbol, package, or package class. - */ - @deprecated("use invalidateClassPathEntries instead", "2.10.0") - def clearOnNextRun(sym: Symbol) = false - /* To try out clearOnNext run on the scala.tools.nsc project itself - * replace `false` above with the following code - - settings.Xexperimental.value && { sym.isRoot || { - sym.fullName match { - case "scala" | "scala.tools" | "scala.tools.nsc" => true - case _ => sym.owner.fullName.startsWith("scala.tools.nsc") - } - }} - - * Then, fsc -Xexperimental clears the nsc project between successive runs of `fsc`. - */ - object typeDeconstruct extends { val global: Global.this.type = Global.this } with typechecker.StructuredTypeStrings @@ -1111,45 +1079,41 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** Don't want to introduce new errors trying to report errors, * so swallow exceptions. */ - override def supplementErrorMessage(errorMessage: String): String = { - if (currentRun.supplementedError) errorMessage - else try { - currentRun.supplementedError = true - val tree = analyzer.lastTreeToTyper - val sym = tree.symbol - val tpe = tree.tpe - val site = lastSeenContext.enclClassOrMethod.owner - val pos_s = if (tree.pos.isDefined) s"line ${tree.pos.line} of ${tree.pos.source.file}" else "" - val context_s = try { - // Taking 3 before, 3 after the fingered line. - val start = 0 max (tree.pos.line - 3) - val xs = scala.reflect.io.File(tree.pos.source.file.file).lines drop start take 7 - val strs = xs.zipWithIndex map { case (line, idx) => f"${start + idx}%6d $line" } - strs.mkString("== Source file context for tree position ==\n\n", "\n", "") - } - catch { case t: Exception => devWarning("" + t) ; "" } - - val info1 = formatExplain( - "while compiling" -> currentSource.path, - "during phase" -> ( if (globalPhase eq phase) phase else "globalPhase=%s, enteringPhase=%s".format(globalPhase, phase) ), - "library version" -> scala.util.Properties.versionString, - "compiler version" -> Properties.versionString, - "reconstructed args" -> settings.recreateArgs.mkString(" ") - ) - val info2 = formatExplain( - "last tree to typer" -> tree.summaryString, - "tree position" -> pos_s, - "tree tpe" -> tpe, - "symbol" -> Option(sym).fold("null")(_.debugLocationString), - "symbol definition" -> Option(sym).fold("null")(s => s.defString + s" (a ${s.shortSymbolClass})"), - "symbol package" -> sym.enclosingPackage.fullName, - "symbol owners" -> ownerChainString(sym), - "call site" -> (site.fullLocationString + " in " + site.enclosingPackage) - ) - ("\n " + errorMessage + "\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n" + override def supplementTyperState(errorMessage: String): String = try { + val tree = analyzer.lastTreeToTyper + val sym = tree.symbol + val tpe = tree.tpe + val site = lastSeenContext.enclClassOrMethod.owner + val pos_s = if (tree.pos.isDefined) s"line ${tree.pos.line} of ${tree.pos.source.file}" else "" + val context_s = try { + // Taking 3 before, 3 after the fingered line. + val start = 0 max (tree.pos.line - 3) + val xs = scala.reflect.io.File(tree.pos.source.file.file).lines drop start take 7 + val strs = xs.zipWithIndex map { case (line, idx) => f"${start + idx}%6d $line" } + strs.mkString("== Source file context for tree position ==\n\n", "\n", "") } - catch { case _: Exception | _: TypeError => errorMessage } - } + catch { case t: Exception => devWarning("" + t) ; "" } + + val info1 = formatExplain( + "while compiling" -> currentSource.path, + "during phase" -> ( if (globalPhase eq phase) phase else "globalPhase=%s, enteringPhase=%s".format(globalPhase, phase) ), + "library version" -> scala.util.Properties.versionString, + "compiler version" -> Properties.versionString, + "reconstructed args" -> settings.recreateArgs.mkString(" ") + ) + val info2 = formatExplain( + "last tree to typer" -> tree.summaryString, + "tree position" -> pos_s, + "tree tpe" -> tpe, + "symbol" -> Option(sym).fold("null")(_.debugLocationString), + "symbol definition" -> Option(sym).fold("null")(s => s.defString + s" (a ${s.shortSymbolClass})"), + "symbol package" -> sym.enclosingPackage.fullName, + "symbol owners" -> ownerChainString(sym), + "call site" -> (site.fullLocationString + " in " + site.enclosingPackage) + ) + ("\n " + errorMessage + "\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n" + } catch { case _: Exception | _: TypeError => errorMessage } + /** The id of the currently active run */ @@ -1161,17 +1125,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]") } - /** Collects for certain classes of warnings during this run. */ - class ConditionalWarning(what: String, option: Settings#BooleanSetting) { - val warnings = mutable.LinkedHashMap[Position, String]() - def warn(pos: Position, msg: String) = - if (option) reporter.warning(pos, msg) - else if (!(warnings contains pos)) warnings += ((pos, msg)) - def summarize() = - if (warnings.nonEmpty && (option.isDefault || settings.fatalWarnings)) - warning("there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name)) - } - def newSourceFile(code: String, filename: String = "") = new BatchSourceFile(filename, code) @@ -1189,7 +1142,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** A Run is a single execution of the compiler on a set of units. */ - class Run extends RunContextApi { + class Run extends RunContextApi with RunReporting with RunParsing { /** Have been running into too many init order issues with Run * during erroneous conditions. Moved all these vals up to the * top of the file so at least they're not trivially null. @@ -1198,24 +1151,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** The currently compiled unit; set from GlobalPhase */ var currentUnit: CompilationUnit = NoCompilationUnit - // This change broke sbt; I gave it the thrilling name of uncheckedWarnings0 so - // as to recover uncheckedWarnings for its ever-fragile compiler interface. - val deprecationWarnings0 = new ConditionalWarning("deprecation", settings.deprecation) - val uncheckedWarnings0 = new ConditionalWarning("unchecked", settings.unchecked) - val featureWarnings = new ConditionalWarning("feature", settings.feature) - val inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings) - val allConditionalWarnings = List(deprecationWarnings0, uncheckedWarnings0, featureWarnings, inlinerWarnings) - - def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList // used in sbt - def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList // used in sbt - - var reportedFeature = Set[Symbol]() - - /** Has any macro expansion used a fallback during this run? */ - var seenMacroExpansionsFallingBack = false - - /** Have we already supplemented the error message of a compiler crash? */ - private[nsc] final var supplementedError = false + // used in sbt + def uncheckedWarnings: List[(Position, String)] = reporting.uncheckedWarnings + // used in sbt + def deprecationWarnings: List[(Position, String)] = reporting.deprecationWarnings private class SyncedCompilationBuffer { self => private val underlying = new mutable.ArrayBuffer[CompilationUnit] @@ -1236,7 +1175,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } private val unitbuf = new SyncedCompilationBuffer - + val compiledFiles = new mutable.HashSet[String] /** A map from compiled top-level symbols to their source files */ @@ -1340,47 +1279,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) first } - /** Reset all classes contained in current project, as determined by - * the clearOnNextRun hook - */ - @deprecated("use invalidateClassPathEntries instead", "2.10.0") - def resetProjectClasses(root: Symbol): Unit = try { - def unlink(sym: Symbol) = - if (sym != NoSymbol) root.info.decls.unlink(sym) - if (settings.verbose) inform("[reset] recursing in "+root) - val toReload = mutable.Set[String]() - for (sym <- root.info.decls) { - if (sym.isInitialized && clearOnNextRun(sym)) - if (sym.hasPackageFlag) { - resetProjectClasses(sym.moduleClass) - openPackageModule(sym.moduleClass) - } else { - unlink(sym) - unlink(root.info.decls.lookup( - if (sym.isTerm) sym.name.toTypeName else sym.name.toTermName)) - toReload += sym.fullName - // note: toReload could be set twice with the same name - // but reinit must happen only once per name. That's why - // the following classPath.findClass { ... } code cannot be moved here. - } - } - for (fullname <- toReload) - classPath.findClass(fullname) match { - case Some(classRep) => - if (settings.verbose) inform("[reset] reinit "+fullname) - loaders.initializeFromClassPath(root, classRep) - case _ => - } - } catch { - case ex: Throwable => - // this handler should not be nessasary, but it seems that `fsc` - // eats exceptions if they appear here. Need to find out the cause for - // this and fix it. - inform("[reset] exception happened: "+ex) - ex.printStackTrace() - throw ex - } - // --------------- Miscellania ------------------------------- /** Progress tracking. Measured in "progress units" which are 1 per @@ -1413,6 +1311,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) refreshProgress() } + // for sbt def cancel() { reporter.cancelled = true } private def currentProgress = (phasec * size) + unitc @@ -1444,7 +1343,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val erasurePhase = phaseNamed("erasure") val posterasurePhase = phaseNamed("posterasure") // val lazyvalsPhase = phaseNamed("lazyvals") - // val lambdaliftPhase = phaseNamed("lambdalift") + val lambdaliftPhase = phaseNamed("lambdalift") // val constructorsPhase = phaseNamed("constructors") val flattenPhase = phaseNamed("flatten") val mixinPhase = phaseNamed("mixin") @@ -1478,10 +1377,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) private def checkDeprecatedSettings(unit: CompilationUnit) { // issue warnings for any usage of deprecated settings settings.userSetSettings filter (_.isDeprecated) foreach { s => - unit.deprecationWarning(NoPosition, s.name + " is deprecated: " + s.deprecationMessage.get) + currentRun.reporting.deprecationWarning(NoPosition, s.name + " is deprecated: " + s.deprecationMessage.get) } if (settings.target.value.contains("jvm-1.5")) - unit.deprecationWarning(NoPosition, settings.target.name + ":" + settings.target.value + " is deprecated: use target for Java 1.6 or above.") + currentRun.reporting.deprecationWarning(NoPosition, settings.target.name + ":" + settings.target.value + " is deprecated: use target for Java 1.6 or above.") } /* An iterator returning all the units being compiled in this run */ @@ -1491,18 +1390,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter) made to the underlying structure. */ def units: Iterator[CompilationUnit] = unitbuf.iterator - + def registerPickle(sym: Symbol): Unit = () /** does this run compile given class, module, or case factory? */ // NOTE: Early initialized members temporarily typechecked before the enclosing class, see typedPrimaryConstrBody! - // Here we work around that wrinkle by claiming that a top-level, early-initialized member is compiled in + // Here we work around that wrinkle by claiming that a early-initialized member is compiled in // *every* run. This approximation works because this method is exclusively called with `this` == `currentRun`. def compiles(sym: Symbol): Boolean = if (sym == NoSymbol) false else if (symSource.isDefinedAt(sym)) true - else if (sym.isTopLevel && sym.isEarlyInitialized) true - else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClass) + else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClassOrDummy) else if (sym.isModuleClass) compiles(sym.sourceModule) else false @@ -1562,28 +1460,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } - def reportCompileErrors() { - if (!reporter.hasErrors && reporter.hasWarnings && settings.fatalWarnings) - globalError("No warnings can be incurred under -Xfatal-warnings.") - if (reporter.hasErrors) { - for ((sym, file) <- symSource.iterator) { - sym.reset(new loaders.SourcefileLoader(file)) - if (sym.isTerm) - sym.moduleClass reset loaders.moduleClassLoader - } - } - else { - allConditionalWarnings foreach (_.summarize()) - - if (seenMacroExpansionsFallingBack) - warning("some macros could not be expanded and code fell back to overridden methods;"+ - "\nrecompiling with generated classfiles on the classpath might help.") - // todo: migrationWarnings - } - } - - /** Caching member symbols that are def-s in Defintions because they might change from Run to Run. */ + /** Caching member symbols that are def-s in Definitions because they might change from Run to Run. */ val runDefinitions: definitions.RunDefinitions = new definitions.RunDefinitions /** Compile list of source files, @@ -1594,7 +1472,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def checkDeprecations() = { checkDeprecatedSettings(newCompilationUnit("")) - reportCompileErrors() + reporting.summarizeErrors() } val units = sources map scripted map (new CompilationUnit(_)) @@ -1609,8 +1487,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) compileUnitsInternal(units, fromPhase) private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) { - doInvalidation() - units foreach addUnit val startTime = currentTime @@ -1618,7 +1494,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) checkDeprecatedSettings(unitbuf.head) globalPhase = fromPhase - while (globalPhase.hasNext && !reporter.hasErrors) { + while (globalPhase.hasNext && !reporter.hasErrors) { val startTime = currentTime phase = globalPhase globalPhase.run() @@ -1658,12 +1534,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) runCheckers() // output collected statistics - if (settings.Ystatistics) + if (settings.YstatisticsEnabled) statistics.print(phase) advancePhase() } + reporting.summarizeErrors() + if (traceSymbolActivity) units map (_.body) foreach (traceSymbols recordSymbolsInTree _) @@ -1671,19 +1549,19 @@ class Global(var currentSettings: Settings, var reporter: Reporter) if (settings.Yshow.isDefault) showMembers() - reportCompileErrors() + if (reporter.hasErrors) { + for ((sym, file) <- symSource.iterator) { + sym.reset(new loaders.SourcefileLoader(file)) + if (sym.isTerm) + sym.moduleClass reset loaders.moduleClassLoader + } + } symSource.keys foreach (x => resetPackageClass(x.owner)) + informTime("total", startTime) // Clear any sets or maps created via perRunCaches. perRunCaches.clearAll() - - // Reset project - if (!stopPhase("namer")) { - enteringPhase(namerPhase) { - resetProjectClasses(RootClass) - } - } } /** Compile list of abstract files. */ @@ -1731,10 +1609,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } - /** Reset package class to state at typer (not sure what this - * is needed for?) + /** Reset package class to state at typer (not sure what this is needed for?) */ - private def resetPackageClass(pclazz: Symbol) { + private def resetPackageClass(pclazz: Symbol): Unit = if (typerPhase != NoPhase) { enteringPhase(firstPhase) { pclazz.setInfo(enteringPhase(typerPhase)(pclazz.info)) } diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala index 03190a63f31b..f01de0cbe118 100644 --- a/src/compiler/scala/tools/nsc/MainBench.scala +++ b/src/compiler/scala/tools/nsc/MainBench.scala @@ -24,7 +24,7 @@ object MainBench extends Driver with EvalLoop { var start = System.nanoTime() for (i <- 0 until NIter) { if (i == NIter-1) { - theCompiler.settings.Ystatistics.value = true + theCompiler.settings.Ystatistics.default.get foreach theCompiler.settings.Ystatistics.add Statistics.enabled = true } process(args) diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala index 95264aeda6f1..7c14f4943f44 100644 --- a/src/compiler/scala/tools/nsc/ObjectRunner.scala +++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala @@ -18,14 +18,14 @@ trait CommonRunner { * @throws NoSuchMethodException * @throws InvocationTargetException */ - def run(urls: List[URL], objectName: String, arguments: Seq[String]) { + def run(urls: Seq[URL], objectName: String, arguments: Seq[String]) { (ScalaClassLoader fromURLs urls).run(objectName, arguments) } /** Catches exceptions enumerated by run (in the case of InvocationTargetException, * unwrapping it) and returns it any thrown in Left(x). */ - def runAndCatch(urls: List[URL], objectName: String, arguments: Seq[String]): Either[Throwable, Boolean] = { + def runAndCatch(urls: Seq[URL], objectName: String, arguments: Seq[String]): Either[Throwable, Boolean] = { try { run(urls, objectName, arguments) ; Right(true) } catch { case e: Throwable => Left(unwrap(e)) } } diff --git a/src/compiler/scala/tools/nsc/Parsing.scala b/src/compiler/scala/tools/nsc/Parsing.scala new file mode 100644 index 000000000000..9e5999ce4f04 --- /dev/null +++ b/src/compiler/scala/tools/nsc/Parsing.scala @@ -0,0 +1,35 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL, Typesafe Inc. + * @author Adriaan Moors + */ + +package scala +package tools.nsc + +import scala.reflect.internal.Positions + +/** Similar to Reporting: gather global functionality specific to parsing. + */ +trait Parsing { self : Positions with Reporting => + def currentRun: RunParsing + + trait RunParsing { + val parsing: PerRunParsing = new PerRunParsing + } + + class PerRunParsing { + // for repl + private[this] var incompleteHandler: (Position, String) => Unit = null + def withIncompleteHandler[T](handler: (Position, String) => Unit)(thunk: => T) = { + val saved = incompleteHandler + incompleteHandler = handler + try thunk + finally incompleteHandler = saved + } + + def incompleteHandled = incompleteHandler != null + def incompleteInputError(pos: Position, msg: String): Unit = + if (incompleteHandled) incompleteHandler(pos, msg) + else reporter.error(pos, msg) + } +} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala index cfb4cd23a1ad..e1cfa639607d 100644 --- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala +++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala @@ -18,7 +18,7 @@ trait PhaseAssembly { /** * Aux datastructure for solving the constraint system - * The depency graph container with helper methods for node and edge creation + * The dependency graph container with helper methods for node and edge creation */ private class DependencyGraph { @@ -199,7 +199,7 @@ trait PhaseAssembly { // Add all phases in the set to the graph val graph = phasesSetToDepGraph(phasesSet) - val dot = if (settings.genPhaseGraph.isSetByUser) Some(settings.genPhaseGraph.value) else None + val dot = settings.genPhaseGraph.valueSetByUser // Output the phase dependency graph at this stage def dump(stage: Int) = dot foreach (n => graphToDotFile(graph, s"$n-$stage.dot")) diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala index ed5fda9c3f43..9f160e248527 100644 --- a/src/compiler/scala/tools/nsc/Properties.scala +++ b/src/compiler/scala/tools/nsc/Properties.scala @@ -11,9 +11,12 @@ object Properties extends scala.util.PropertiesTrait { protected def propCategory = "compiler" protected def pickJarBasedOn = classOf[Global] - // settings based on jar properties + // settings based on jar properties, falling back to System prefixed by "scala." def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ") def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ") + // message to display at EOF (which by default ends with + // a newline so as not to break the user's terminal) + def shellInterruptedString = scalaPropOrElse("shell.interrupted", f":quit$lineSeparator") // derived values def isEmacsShell = propOrEmpty("env.emacs") != "" diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala new file mode 100644 index 000000000000..4d7e9e753f7e --- /dev/null +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -0,0 +1,107 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL, Typesafe Inc. + * @author Adriaan Moors + */ + +package scala +package tools +package nsc + +import scala.collection.{ mutable, immutable } +import scala.reflect.internal.util.StringOps.countElementsAsString + +/** Provides delegates to the reporter doing the actual work. + * PerRunReporting implements per-Run stateful info tracking and reporting + * + * TODO: make reporting configurable + */ +trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions with CompilationUnits with scala.reflect.internal.Symbols => + def settings: Settings + + // not deprecated yet, but a method called "error" imported into + // nearly every trait really must go. For now using globalError. + def error(msg: String) = globalError(msg) + + // a new instance of this class is created for every Run (access the current instance via `currentRun.reporting`) + protected def PerRunReporting = new PerRunReporting + class PerRunReporting extends PerRunReportingBase { + /** Collects for certain classes of warnings during this run. */ + private class ConditionalWarning(what: String, option: Settings#BooleanSetting) { + val warnings = mutable.LinkedHashMap[Position, String]() + def warn(pos: Position, msg: String) = + if (option) reporter.warning(pos, msg) + else if (!(warnings contains pos)) warnings += ((pos, msg)) + def summarize() = + if (warnings.nonEmpty && (option.isDefault || option)) { + val numWarnings = warnings.size + val warningVerb = if (numWarnings == 1) "was" else "were" + val warningCount = countElementsAsString(numWarnings, s"$what warning") + + reporter.warning(NoPosition, s"there $warningVerb $warningCount; re-run with ${option.name} for details") + } + } + + // This change broke sbt; I gave it the thrilling name of uncheckedWarnings0 so + // as to recover uncheckedWarnings for its ever-fragile compiler interface. + private val _deprecationWarnings = new ConditionalWarning("deprecation", settings.deprecation) + private val _uncheckedWarnings = new ConditionalWarning("unchecked", settings.unchecked) + private val _featureWarnings = new ConditionalWarning("feature", settings.feature) + private val _inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings) + private val _allConditionalWarnings = List(_deprecationWarnings, _uncheckedWarnings, _featureWarnings, _inlinerWarnings) + + // TODO: remove in favor of the overload that takes a Symbol, give that argument a default (NoSymbol) + def deprecationWarning(pos: Position, msg: String): Unit = _deprecationWarnings.warn(pos, msg) + def uncheckedWarning(pos: Position, msg: String): Unit = _uncheckedWarnings.warn(pos, msg) + def featureWarning(pos: Position, msg: String): Unit = _featureWarnings.warn(pos, msg) + def inlinerWarning(pos: Position, msg: String): Unit = _inlinerWarnings.warn(pos, msg) + + def deprecationWarnings = _deprecationWarnings.warnings.toList + def uncheckedWarnings = _uncheckedWarnings.warnings.toList + def featureWarnings = _featureWarnings.warnings.toList + def inlinerWarnings = _inlinerWarnings.warnings.toList + + def allConditionalWarnings = _allConditionalWarnings flatMap (_.warnings) + + // behold! the symbol that caused the deprecation warning (may not be deprecated itself) + def deprecationWarning(pos: Position, sym: Symbol, msg: String): Unit = _deprecationWarnings.warn(pos, msg) + def deprecationWarning(pos: Position, sym: Symbol): Unit = { + val suffix = sym.deprecationMessage match { case Some(msg) => ": "+ msg case _ => "" } + deprecationWarning(pos, sym, s"$sym${sym.locationString} is deprecated$suffix") + } + + private[this] var reportedFeature = Set[Symbol]() + def featureWarning(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean): Unit = { + val req = if (required) "needs to" else "should" + val fqname = "scala.language." + featureName + val explain = ( + if (reportedFeature contains featureTrait) "" else + s"""| + |This can be achieved by adding the import clause 'import $fqname' + |or by setting the compiler option -language:$featureName. + |See the Scala docs for value $fqname for a discussion + |why the feature $req be explicitly enabled.""".stripMargin + ) + reportedFeature += featureTrait + + val msg = s"$featureDesc $req be enabled\nby making the implicit value $fqname visible.$explain" replace ("#", construct) + if (required) reporter.error(pos, msg) + else featureWarning(pos, msg) + } + + /** Has any macro expansion used a fallback during this run? */ + var seenMacroExpansionsFallingBack = false + + def summarizeErrors(): Unit = if (!reporter.hasErrors) { + _allConditionalWarnings foreach (_.summarize()) + + if (seenMacroExpansionsFallingBack) + reporter.warning(NoPosition, "some macros could not be expanded and code fell back to overridden methods;"+ + "\nrecompiling with generated classfiles on the classpath might help.") + + // todo: migrationWarnings + + if (settings.fatalWarnings && reporter.hasWarnings) + reporter.error(NoPosition, "No warnings can be incurred under -Xfatal-warnings.") + } + } +} diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index c2d62db5585a..6d24b31531b4 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -6,9 +6,12 @@ package scala package tools.nsc -import io.{ Directory, File, Path } +import io.{ AbstractFile, Directory, File, Path } import java.io.IOException +import scala.tools.nsc.classpath.DirectoryFlatClassPath import scala.tools.nsc.reporters.{Reporter,ConsoleReporter} +import scala.tools.nsc.settings.ClassPathRepresentationType +import scala.tools.nsc.util.ClassPath.DefaultJavaContext import util.Exceptional.unwrap /** An object that runs Scala code in script files. @@ -111,6 +114,14 @@ class ScriptRunner extends HasCompileSocket { else None } + def hasClassToRun(d: Directory): Boolean = { + val cp = settings.YclasspathImpl.value match { + case ClassPathRepresentationType.Recursive => DefaultJavaContext.newClassPath(AbstractFile.getDirectory(d)) + case ClassPathRepresentationType.Flat => DirectoryFlatClassPath(d.jfile) + } + cp.findClass(mainClass).isDefined + } + /* The script runner calls sys.exit to communicate a return value, but this must * not take place until there are no non-daemon threads running. Tickets #1955, #2006. */ @@ -124,15 +135,21 @@ class ScriptRunner extends HasCompileSocket { compile match { case Some(compiledPath) => - try io.Jar.create(jarFile, compiledPath, mainClass) - catch { case _: Exception => jarFile.delete() } - - if (jarOK) { - compiledPath.deleteRecursively() - handler(jarFile.toAbsolute.path) + if (!hasClassToRun(compiledPath)) { + // it compiled ok, but there is nothing to run; + // running an empty script should succeed + true + } else { + try io.Jar.create(jarFile, compiledPath, mainClass) + catch { case _: Exception => jarFile.delete() } + + if (jarOK) { + compiledPath.deleteRecursively() + handler(jarFile.toAbsolute.path) + } + // jar failed; run directly from the class files + else handler(compiledPath.path) } - // jar failed; run directly from the class files - else handler(compiledPath.path) case _ => false } } @@ -140,8 +157,8 @@ class ScriptRunner extends HasCompileSocket { if (jarOK) handler(jarFile.toAbsolute.path) // pre-compiled jar is current else recompile() // jar old - recompile the script. } - // don't use a cache jar at all--just use the class files - else compile exists (cp => handler(cp.path)) + // don't use a cache jar at all--just use the class files, if they exist + else compile exists (cp => !hasClassToRun(cp) || handler(cp.path)) } } diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index 6d9b41ec45b7..02a199f7ac47 100755 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -59,14 +59,21 @@ trait DocComments { self: Global => comment.defineVariables(sym) } + + def replaceInheritDocToInheritdoc(docStr: String):String = { + docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc") + } + /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by * missing sections of an inherited doc comment. * If a symbol does not have a doc comment but some overridden version of it does, * the doc comment of the overridden version is copied instead. */ def cookedDocComment(sym: Symbol, docStr: String = ""): String = cookedDocComments.getOrElseUpdate(sym, { - val ownComment = if (docStr.length == 0) docComments get sym map (_.template) getOrElse "" + var ownComment = if (docStr.length == 0) docComments get sym map (_.template) getOrElse "" else DocComment(docStr).template + ownComment = replaceInheritDocToInheritdoc(ownComment) + superComment(sym) match { case None => if (ownComment.indexOf("@inheritdoc") != -1) diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala index 0731d78a9b3c..689e6405d0eb 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala @@ -74,6 +74,11 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo { } } + // TODO these overrides, and the slow trickle of bugs that they solve (e.g. SI-8479), + // suggest that we should pursue an alternative design in which the DocDef nodes + // are eliminated from the tree before typer, and instead are modelled as tree + // attachments. + /** Is tree legal as a member definition of an interface? */ override def isInterfaceMember(tree: Tree): Boolean = tree match { @@ -81,6 +86,11 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo { case _ => super.isInterfaceMember(tree) } + override def isConstructorWithDefault(t: Tree) = t match { + case DocDef(_, definition) => isConstructorWithDefault(definition) + case _ => super.isConstructorWithDefault(t) + } + /** Is tree a pure (i.e. non-side-effecting) definition? */ override def isPureDef(tree: Tree): Boolean = tree match { diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala index d3f495f28032..96939e616cd2 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala @@ -346,12 +346,11 @@ trait MarkupParsers { // parse more XML ? if (charComingAfter(xSpaceOpt()) == '<') { - xSpaceOpt() - while (ch == '<') { + do { + xSpaceOpt() nextch() ts append element - xSpaceOpt() - } + } while (charComingAfter(xSpaceOpt()) == '<') handle.makeXMLseq(r2p(start, start, curOffset), ts) } else { @@ -426,11 +425,10 @@ trait MarkupParsers { if (ch != '/') ts append xPattern // child else return false // terminate - case '{' => // embedded Scala patterns - while (ch == '{') { - nextch() + case '{' if xCheckEmbeddedBlock => // embedded Scala patterns, if not double brace + do { ts ++= xScalaPatterns - } + } while (xCheckEmbeddedBlock) assert(!xEmbeddedBlock, "problem with embedded block") case SU => diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 9e631febeecf..466381000376 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -154,8 +154,8 @@ self => def unit = global.currentUnit // suppress warnings; silent abort on errors - def warning(offset: Offset, msg: String) {} - def deprecationWarning(offset: Offset, msg: String) {} + def warning(offset: Offset, msg: String): Unit = () + def deprecationWarning(offset: Offset, msg: String): Unit = () def syntaxError(offset: Offset, msg: String): Unit = throw new MalformedInput(offset, msg) def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg) @@ -204,13 +204,11 @@ self => override def newScanner() = new UnitScanner(unit, patches) - override def warning(offset: Offset, msg: String) { - unit.warning(o2p(offset), msg) - } + override def warning(offset: Offset, msg: String): Unit = + reporter.warning(o2p(offset), msg) - override def deprecationWarning(offset: Offset, msg: String) { - unit.deprecationWarning(o2p(offset), msg) - } + override def deprecationWarning(offset: Offset, msg: String): Unit = + currentRun.reporting.deprecationWarning(o2p(offset), msg) private var smartParsing = false @inline private def withSmartParsing[T](body: => T): T = { @@ -224,17 +222,17 @@ self => val syntaxErrors = new ListBuffer[(Int, String)] def showSyntaxErrors() = for ((offset, msg) <- syntaxErrors) - unit.error(o2p(offset), msg) + reporter.error(o2p(offset), msg) - override def syntaxError(offset: Offset, msg: String) { + override def syntaxError(offset: Offset, msg: String): Unit = { if (smartParsing) syntaxErrors += ((offset, msg)) - else unit.error(o2p(offset), msg) + else reporter.error(o2p(offset), msg) } - override def incompleteInputError(msg: String) { + override def incompleteInputError(msg: String): Unit = { val offset = source.content.length - 1 if (smartParsing) syntaxErrors += ((offset, msg)) - else unit.incompleteInputError(o2p(offset), msg) + else currentRun.parsing.incompleteInputError(o2p(offset), msg) } /** parse unit. If there are inbalanced braces, @@ -335,7 +333,7 @@ self => */ private var inScalaPackage = false private var currentPackage = "" - def resetPackage() { + def resetPackage(): Unit = { inScalaPackage = false currentPackage = "" } @@ -514,7 +512,7 @@ self => finally inFunReturnType = saved } - protected def skip(targetToken: Token) { + protected def skip(targetToken: Token): Unit = { var nparens = 0 var nbraces = 0 while (true) { @@ -544,27 +542,25 @@ self => } def warning(offset: Offset, msg: String): Unit def incompleteInputError(msg: String): Unit - private def syntaxError(pos: Position, msg: String, skipIt: Boolean) { - syntaxError(pos pointOrElse in.offset, msg, skipIt) - } def syntaxError(offset: Offset, msg: String): Unit - def syntaxError(msg: String, skipIt: Boolean) { + + private def syntaxError(pos: Position, msg: String, skipIt: Boolean): Unit = + syntaxError(pos pointOrElse in.offset, msg, skipIt) + def syntaxError(msg: String, skipIt: Boolean): Unit = syntaxError(in.offset, msg, skipIt) - } - def syntaxError(offset: Offset, msg: String, skipIt: Boolean) { + def syntaxError(offset: Offset, msg: String, skipIt: Boolean): Unit = { if (offset > lastErrorOffset) { syntaxError(offset, msg) - // no more errors on this token. - lastErrorOffset = in.offset + lastErrorOffset = in.offset // no more errors on this token. } if (skipIt) skip(UNDEF) } - def warning(msg: String) { warning(in.offset, msg) } + def warning(msg: String): Unit = warning(in.offset, msg) - def syntaxErrorOrIncomplete(msg: String, skipIt: Boolean) { + def syntaxErrorOrIncomplete(msg: String, skipIt: Boolean): Unit = { if (in.token == EOF) incompleteInputError(msg) else @@ -654,9 +650,10 @@ self => def isIdentExcept(except: Name) = isIdent && in.name != except def isIdentOf(name: Name) = isIdent && in.name == name - def isUnaryOp = isIdent && raw.isUnary(in.name) - def isRawStar = isIdent && in.name == raw.STAR - def isRawBar = isIdent && in.name == raw.BAR + def isUnaryOp = isIdent && raw.isUnary(in.name) + def isRawStar = isRawIdent && in.name == raw.STAR + def isRawBar = isRawIdent && in.name == raw.BAR + def isRawIdent = in.token == IDENTIFIER def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw @@ -718,7 +715,7 @@ self => /** Convert tree to formal parameter. */ def convertToParam(tree: Tree): ValDef = atPos(tree.pos) { - def removeAsPlaceholder(name: Name) { + def removeAsPlaceholder(name: Name): Unit = { placeholderParams = placeholderParams filter (_.name != name) } def errorParam = makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.end)) @@ -1001,19 +998,30 @@ self => } def infixTypeRest(t: Tree, mode: InfixMode.Value): Tree = { - if (isIdent && in.name != nme.STAR) { - val opOffset = in.offset + // Detect postfix star for repeated args. + // Only RPAREN can follow, but accept COMMA and EQUALS for error's sake. + // Take RBRACE as a paren typo. + def checkRepeatedParam = if (isRawStar) { + lookingAhead (in.token match { + case RPAREN | COMMA | EQUALS | RBRACE => t + case _ => EmptyTree + }) + } else EmptyTree + def asInfix = { + val opOffset = in.offset val leftAssoc = treeInfo.isLeftAssoc(in.name) - if (mode != InfixMode.FirstOp) checkAssoc(opOffset, in.name, leftAssoc = mode == InfixMode.LeftOp) - val op = identForType() - val tycon = atPos(opOffset) { Ident(op) } + if (mode != InfixMode.FirstOp) + checkAssoc(opOffset, in.name, leftAssoc = mode == InfixMode.LeftOp) + val tycon = atPos(opOffset) { Ident(identForType()) } newLineOptWhenFollowing(isTypeIntroToken) def mkOp(t1: Tree) = atPos(t.pos.start, opOffset) { AppliedTypeTree(tycon, List(t, t1)) } if (leftAssoc) infixTypeRest(mkOp(compoundType()), InfixMode.LeftOp) else mkOp(infixType(InfixMode.RightOp)) - } else t + } + if (isIdent) checkRepeatedParam orElse asInfix + else t } /** {{{ @@ -1051,7 +1059,7 @@ self => def identOrMacro(): Name = if (isMacro) rawIdent() else ident() def selector(t: Tree): Tree = { - val point = in.offset + val point = if(isIdent) in.offset else in.lastOffset //SI-8459 //assert(t.pos.isDefined, t) if (t != EmptyTree) Select(t, ident(skipIt = false)) setPos r2p(t.pos.start, point, in.lastOffset) @@ -1221,15 +1229,15 @@ self => skipIt = true)(EmptyTree) // Like Swiss cheese, with holes def stringCheese: Tree = atPos(in.offset) { - val start = in.offset + val start = in.offset val interpolator = in.name.encoded // ident() for INTERPOLATIONID val partsBuf = new ListBuffer[Tree] - val exprBuf = new ListBuffer[Tree] + val exprsBuf = new ListBuffer[Tree] in.nextToken() while (in.token == STRINGPART) { partsBuf += literal() - exprBuf += ( + exprsBuf += ( if (inPattern) dropAnyBraces(pattern()) else in.token match { case IDENTIFIER => atPos(in.offset)(Ident(ident())) @@ -1242,11 +1250,13 @@ self => } if (in.token == STRINGLIT) partsBuf += literal() + // Documenting that it is intentional that the ident is not rooted for purposes of virtualization + //val t1 = atPos(o2p(start)) { Select(Select (Ident(nme.ROOTPKG), nme.scala_), nme.StringContext) } val t1 = atPos(o2p(start)) { Ident(nme.StringContext) } val t2 = atPos(start) { Apply(t1, partsBuf.toList) } t2 setPos t2.pos.makeTransparent val t3 = Select(t2, interpolator) setPos t2.pos - atPos(start) { Apply(t3, exprBuf.toList) } + atPos(start) { Apply(t3, exprsBuf.toList) } } if (inPattern) stringCheese else withPlaceholders(stringCheese, isAny = true) // strinterpolator params are Any* by definition @@ -1254,21 +1264,21 @@ self => /* ------------- NEW LINES ------------------------------------------------- */ - def newLineOpt() { + def newLineOpt(): Unit = { if (in.token == NEWLINE) in.nextToken() } - def newLinesOpt() { + def newLinesOpt(): Unit = { if (in.token == NEWLINE || in.token == NEWLINES) in.nextToken() } - def newLineOptWhenFollowedBy(token: Offset) { + def newLineOptWhenFollowedBy(token: Offset): Unit = { // note: next is defined here because current == NEWLINE if (in.token == NEWLINE && in.next.token == token) newLineOpt() } - def newLineOptWhenFollowing(p: Token => Boolean) { + def newLineOptWhenFollowing(p: Token => Boolean): Unit = { // note: next is defined here because current == NEWLINE if (in.token == NEWLINE && p(in.next.token)) newLineOpt() } @@ -1549,7 +1559,7 @@ self => } /** {{{ - * PrefixExpr ::= [`-' | `+' | `~' | `!' | `&'] SimpleExpr + * PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr * }}} */ def prefixExpr(): Tree = { diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index e8d46704c32b..92833d647b64 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -113,7 +113,7 @@ trait Scanners extends ScannersCommon { case SU | CR | LF => case _ => nextChar() ; skipLineComment() } - private def maybeOpen() { + private def maybeOpen(): Unit = { putCommentChar() if (ch == '*') { putCommentChar() @@ -137,7 +137,7 @@ trait Scanners extends ScannersCommon { def skipDocComment(): Unit = skipNestedComments() def skipBlockComment(): Unit = skipNestedComments() - private def skipToCommentEnd(isLineComment: Boolean) { + private def skipToCommentEnd(isLineComment: Boolean): Unit = { nextChar() if (isLineComment) skipLineComment() else { @@ -185,7 +185,7 @@ trait Scanners extends ScannersCommon { /** append Unicode character to "cbuf" buffer */ - protected def putChar(c: Char) { + protected def putChar(c: Char): Unit = { // assert(cbuf.size < 10000, cbuf) cbuf.append(c) } @@ -196,7 +196,7 @@ trait Scanners extends ScannersCommon { protected def emitIdentifierDeprecationWarnings = true /** Clear buffer and set name and token */ - private def finishNamed(idtoken: Token = IDENTIFIER) { + private def finishNamed(idtoken: Token = IDENTIFIER): Unit = { name = newTermName(cbuf.toString) cbuf.clear() token = idtoken @@ -215,7 +215,7 @@ trait Scanners extends ScannersCommon { } /** Clear buffer and set string */ - private def setStrVal() { + private def setStrVal(): Unit = { strVal = cbuf.toString cbuf.clear() } @@ -270,7 +270,7 @@ trait Scanners extends ScannersCommon { /** Produce next token, filling TokenData fields of Scanner. */ - def nextToken() { + def nextToken(): Unit = { val lastToken = token // Adapt sepRegions according to last token (lastToken: @switch) match { @@ -341,7 +341,7 @@ trait Scanners extends ScannersCommon { prev copyFrom this val nextLastOffset = charOffset - 1 fetchToken() - def resetOffset() { + def resetOffset(): Unit = { offset = prev.offset lastOffset = prev.lastOffset } @@ -399,7 +399,7 @@ trait Scanners extends ScannersCommon { /** read next token, filling TokenData fields of Scanner. */ - protected final def fetchToken() { + protected final def fetchToken(): Unit = { offset = charOffset - 1 (ch: @switch) match { @@ -453,18 +453,15 @@ trait Scanners extends ScannersCommon { getOperatorRest() } case '0' => - def fetchZero() = { - putChar(ch) + def fetchLeadingZero(): Unit = { nextChar() - if (ch == 'x' || ch == 'X') { - nextChar() - base = 16 - } else { - base = 8 + ch match { + case 'x' | 'X' => base = 16 ; nextChar() + case _ => base = 8 // single decimal zero, perhaps } - getNumber() } - fetchZero() + fetchLeadingZero() + getNumber() case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => base = 10 getNumber() @@ -604,7 +601,7 @@ trait Scanners extends ScannersCommon { // Identifiers --------------------------------------------------------------- - private def getBackquotedIdent() { + private def getBackquotedIdent(): Unit = { nextChar() getLitChars('`') if (ch == '`') { @@ -664,7 +661,7 @@ trait Scanners extends ScannersCommon { else finishNamed() } - private def getIdentOrOperatorRest() { + private def getIdentOrOperatorRest(): Unit = { if (isIdentifierPart(ch)) getIdentRest() else ch match { @@ -688,9 +685,11 @@ trait Scanners extends ScannersCommon { setStrVal() nextChar() token = STRINGLIT - } else syntaxError("unclosed string literal") + } else unclosedStringLit() } + private def unclosedStringLit(): Unit = syntaxError("unclosed string literal") + private def getRawStringLit(): Unit = { if (ch == '\"') { nextRawChar() @@ -764,7 +763,7 @@ trait Scanners extends ScannersCommon { if (multiLine) incompleteInputError("unclosed multi-line string literal") else - syntaxError("unclosed string literal") + unclosedStringLit() } else { putChar(ch) @@ -857,7 +856,7 @@ trait Scanners extends ScannersCommon { /** read fractional part and exponent of floating point number * if one is present. */ - protected def getFraction() { + protected def getFraction(): Unit = { token = DOUBLELIT while ('0' <= ch && ch <= '9') { putChar(ch) @@ -900,62 +899,61 @@ trait Scanners extends ScannersCommon { */ def charVal: Char = if (strVal.length > 0) strVal.charAt(0) else 0 - /** Convert current strVal, base to long value + /** Convert current strVal, base to long value. * This is tricky because of max negative value. + * + * Conversions in base 10 and 16 are supported. As a permanent migration + * path, attempts to write base 8 literals except `0` emit a verbose error. */ def intVal(negated: Boolean): Long = { - if (token == CHARLIT && !negated) { - charVal.toLong - } else { - var value: Long = 0 - val divider = if (base == 10) 1 else 2 - val limit: Long = - if (token == LONGLIT) Long.MaxValue else Int.MaxValue - var i = 0 + def malformed: Long = { + if (base == 8) syntaxError("Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.)") + else syntaxError("malformed integer number") + 0 + } + def tooBig: Long = { + syntaxError("integer number too large") + 0 + } + def intConvert: Long = { val len = strVal.length - while (i < len) { - val d = digit2int(strVal charAt i, base) - if (d < 0) { - syntaxError("malformed integer number") - return 0 - } - if (value < 0 || - limit / (base / divider) < value || - limit - (d / divider) < value * (base / divider) && - !(negated && limit == value * base - 1 + d)) { - syntaxError("integer number too large") - return 0 - } - value = value * base + d - i += 1 + if (len == 0) { + if (base != 8) syntaxError("missing integer number") // e.g., 0x; + 0 + } else { + val divider = if (base == 10) 1 else 2 + val limit: Long = if (token == LONGLIT) Long.MaxValue else Int.MaxValue + @tailrec def convert(value: Long, i: Int): Long = + if (i >= len) value + else { + val d = digit2int(strVal charAt i, base) + if (d < 0) + malformed + else if (value < 0 || + limit / (base / divider) < value || + limit - (d / divider) < value * (base / divider) && + !(negated && limit == value * base - 1 + d)) + tooBig + else + convert(value * base + d, i + 1) + } + val result = convert(0, 0) + if (base == 8) malformed else if (negated) -result else result } - if (negated) -value else value } + if (token == CHARLIT && !negated) charVal.toLong else intConvert } def intVal: Long = intVal(negated = false) /** Convert current strVal, base to double value - */ + */ def floatVal(negated: Boolean): Double = { - - val limit: Double = - if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue + val limit: Double = if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue try { val value: Double = java.lang.Double.valueOf(strVal).doubleValue() - def isDeprecatedForm = { - val idx = strVal indexOf '.' - (idx == strVal.length - 1) || ( - (idx >= 0) - && (idx + 1 < strVal.length) - && (!Character.isDigit(strVal charAt (idx + 1))) - ) - } if (value > limit) syntaxError("floating point number too large") - if (isDeprecatedForm) - syntaxError("floating point number is missing digit after dot") - if (negated) -value else value } catch { case _: NumberFormatException => @@ -966,93 +964,50 @@ trait Scanners extends ScannersCommon { def floatVal: Double = floatVal(negated = false) - def checkNoLetter() { + def checkNoLetter(): Unit = { if (isIdentifierPart(ch) && ch >= ' ') syntaxError("Invalid literal number") } - /** Read a number into strVal and set base - */ - protected def getNumber() { - val base1 = if (base < 10) 10 else base - // Read 8,9's even if format is octal, produce a malformed number error afterwards. - // At this point, we have already read the first digit, so to tell an innocent 0 apart - // from an octal literal 0123... (which we want to disallow), we check whether there - // are any additional digits coming after the first one we have already read. - var notSingleZero = false - while (digit2int(ch, base1) >= 0) { - putChar(ch) - nextChar() - notSingleZero = true - } - token = INTLIT - - /* When we know for certain it's a number after using a touch of lookahead */ - def restOfNumber() = { - putChar(ch) - nextChar() + /** Read a number into strVal. + * + * The `base` can be 8, 10 or 16, where base 8 flags a leading zero. + * For ints, base 8 is legal only for the case of exactly one zero. + */ + protected def getNumber(): Unit = { + // consume digits of a radix + def consumeDigits(radix: Int): Unit = + while (digit2int(ch, radix) >= 0) { + putChar(ch) + nextChar() + } + // adding decimal point is always OK because `Double valueOf "0."` is OK + def restOfNonIntegralNumber(): Unit = { + putChar('.') + if (ch == '.') nextChar() getFraction() } - def restOfUncertainToken() = { - def isEfd = ch match { case 'e' | 'E' | 'f' | 'F' | 'd' | 'D' => true ; case _ => false } - def isL = ch match { case 'l' | 'L' => true ; case _ => false } - - if (base <= 10 && isEfd) - getFraction() - else { - // Checking for base == 8 is not enough, because base = 8 is set - // as soon as a 0 is read in `case '0'` of method fetchToken. - if (base == 8 && notSingleZero) syntaxError("Non-zero integral values may not have a leading zero.") - setStrVal() - if (isL) { - nextChar() - token = LONGLIT - } - else checkNoLetter() + // after int: 5e7f, 42L, 42.toDouble but not 42b. Repair 0d. + def restOfNumber(): Unit = { + ch match { + case 'e' | 'E' | 'f' | 'F' | + 'd' | 'D' => if (cbuf.isEmpty) putChar('0'); restOfNonIntegralNumber() + case 'l' | 'L' => token = LONGLIT ; setStrVal() ; nextChar() + case _ => token = INTLIT ; setStrVal() ; checkNoLetter() } } - if (base > 10 || ch != '.') - restOfUncertainToken() - else { - val lookahead = lookaheadReader - val c = lookahead.getc() - - /* Prohibit 1. */ - if (!isDigit(c)) - return setStrVal() - - val isDefinitelyNumber = (c: @switch) match { - /** Another digit is a giveaway. */ - case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => - true + // consume leading digits, provisionally an Int + consumeDigits(if (base == 16) 16 else 10) - /* Backquoted idents like 22.`foo`. */ - case '`' => - return setStrVal() /** Note the early return */ - - /* These letters may be part of a literal, or a method invocation on an Int. - */ - case 'd' | 'D' | 'f' | 'F' => - !isIdentifierPart(lookahead.getc()) - - /* A little more special handling for e.g. 5e7 */ - case 'e' | 'E' => - val ch = lookahead.getc() - !isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-') - - case x => - !isIdentifierStart(x) - } - if (isDefinitelyNumber) restOfNumber() - else restOfUncertainToken() - } + val detectedFloat: Boolean = base != 16 && ch == '.' && isDigit(lookaheadReader.getc) + if (detectedFloat) restOfNonIntegralNumber() else restOfNumber() } /** Parse character literal if current character is followed by \', * or follow with given op and return a symbol literal token */ - def charLitOr(op: () => Unit) { + def charLitOr(op: () => Unit): Unit = { putChar(ch) nextChar() if (ch == '\'') { @@ -1068,21 +1023,19 @@ trait Scanners extends ScannersCommon { // Errors ----------------------------------------------------------------- - /** generate an error at the given offset - */ - def syntaxError(off: Offset, msg: String) { + /** generate an error at the given offset */ + def syntaxError(off: Offset, msg: String): Unit = { error(off, msg) token = ERROR } - /** generate an error at the current token offset - */ + /** generate an error at the current token offset */ def syntaxError(msg: String): Unit = syntaxError(offset, msg) def deprecationWarning(msg: String): Unit = deprecationWarning(offset, msg) /** signal an error where the input ended in the middle of a token */ - def incompleteInputError(msg: String) { + def incompleteInputError(msg: String): Unit = { incompleteInputError(offset, msg) token = EOF } @@ -1134,7 +1087,7 @@ trait Scanners extends ScannersCommon { /** Initialization method: read first char, then first token */ - def init() { + def init(): Unit = { nextChar() nextToken() } @@ -1259,9 +1212,9 @@ trait Scanners extends ScannersCommon { class UnitScanner(val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) { def this(unit: CompilationUnit) = this(unit, List()) - override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg) - override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg) - override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg) + override def deprecationWarning(off: Offset, msg: String) = currentRun.reporting.deprecationWarning(unit.position(off), msg) + override def error (off: Offset, msg: String) = reporter.error(unit.position(off), msg) + override def incompleteInputError(off: Offset, msg: String) = currentRun.parsing.incompleteInputError(unit.position(off), msg) private var bracePatches: List[BracePatch] = patches @@ -1490,6 +1443,6 @@ trait Scanners extends ScannersCommon { // when skimming through the source file trying to heal braces override def emitIdentifierDeprecationWarnings = false - override def error(offset: Offset, msg: String) {} + override def error(offset: Offset, msg: String): Unit = () } } diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala index 1abc0c860cae..8cd915bf22fb 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala @@ -184,7 +184,8 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { ) val uri1 = attrMap(z) match { - case Apply(_, List(uri @ Literal(Constant(_)))) => mkAssign(uri) + case Apply(Select(New(Select(Select(Select(Ident(nme.ROOTPKG), nme.scala_), nme.xml), tpnme.Text)), nme.CONSTRUCTOR), List(uri @ Literal(Constant(_)))) => + mkAssign(uri) case Select(_, nme.Nil) => mkAssign(const(null)) // allow for xmlns="" -- bug #1626 case x => mkAssign(x) } diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala index 3a695c6f59c7..df2073785b84 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala @@ -83,7 +83,7 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse private def initialUnitBody(unit: CompilationUnit): Tree = { if (unit.isJava) new JavaUnitParser(unit).parse() - else if (global.reporter.incompleteHandled) newUnitParser(unit).parse() + else if (currentRun.parsing.incompleteHandled) newUnitParser(unit).parse() else newUnitParser(unit).smartParse() } diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index 32b5a98b9865..6bd123c51f93 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -7,7 +7,10 @@ package scala.tools.nsc package backend import io.AbstractFile -import util.{ClassPath,MergedClassPath,DeltaClassPath} +import scala.tools.nsc.classpath.FlatClassPath +import scala.tools.nsc.settings.ClassPathRepresentationType +import scala.tools.nsc.util.{ ClassPath, DeltaClassPath, MergedClassPath } +import scala.tools.util.FlatClassPathResolver import scala.tools.util.PathResolver trait JavaPlatform extends Platform { @@ -16,13 +19,23 @@ trait JavaPlatform extends Platform { import global._ import definitions._ - private var currentClassPath: Option[MergedClassPath[AbstractFile]] = None + private[nsc] var currentClassPath: Option[MergedClassPath[AbstractFile]] = None def classPath: ClassPath[AbstractFile] = { + assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Recursive, + "To use recursive classpath representation you must enable it with -YclasspathImpl:recursive compiler option.") + if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result) currentClassPath.get } + private[nsc] lazy val flatClassPath: FlatClassPath = { + assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Flat, + "To use flat classpath representation you must enable it with -YclasspathImpl:flat compiler option.") + + new FlatClassPathResolver(settings).result + } + /** Update classpath with a substituted subentry */ def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]) = currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst)) @@ -55,8 +68,6 @@ trait JavaPlatform extends Platform { (sym isNonBottomSubClass BoxedBooleanClass) } - def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean = true - def needCompile(bin: AbstractFile, src: AbstractFile) = src.lastModified >= bin.lastModified } diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala index 499f8a9290c1..c3bc213be1e3 100644 --- a/src/compiler/scala/tools/nsc/backend/Platform.scala +++ b/src/compiler/scala/tools/nsc/backend/Platform.scala @@ -8,6 +8,7 @@ package backend import util.ClassPath import io.AbstractFile +import scala.tools.nsc.classpath.FlatClassPath /** The platform dependent pieces of Global. */ @@ -15,9 +16,12 @@ trait Platform { val symbolTable: symtab.SymbolTable import symbolTable._ - /** The compiler classpath. */ + /** The old, recursive implementation of compiler classpath. */ def classPath: ClassPath[AbstractFile] + /** The new implementation of compiler classpath. */ + private[nsc] def flatClassPath: FlatClassPath + /** Update classpath with a substitution that maps entries to entries */ def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]) @@ -30,14 +34,6 @@ trait Platform { /** The various ways a boxed primitive might materialize at runtime. */ def isMaybeBoxed(sym: Symbol): Boolean - /** - * Tells whether a class should be loaded and entered into the package - * scope. On .NET, this method returns `false` for all synthetic classes - * (anonymous classes, implementation classes, module classes), their - * symtab is encoded in the pickle of another class. - */ - def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean - /** * Tells whether a class with both a binary and a source representation * (found in classpath and in sourcepath) should be re-compiled. Behaves diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala index f9551697d286..ad1975ef2370 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala @@ -300,14 +300,16 @@ trait BasicBlocks { if (!closed) instructionList = instructionList map (x => map.getOrElse(x, x)) else - instrs.zipWithIndex collect { - case (oldInstr, i) if map contains oldInstr => - // SI-6288 clone important here because `replaceInstruction` assigns - // a position to `newInstr`. Without this, a single instruction can - // be added twice, and the position last position assigned clobbers - // all previous positions in other usages. - val newInstr = map(oldInstr).clone() - code.touched |= replaceInstruction(i, newInstr) + instrs.iterator.zipWithIndex foreach { + case (oldInstr, i) => + if (map contains oldInstr) { + // SI-6288 clone important here because `replaceInstruction` assigns + // a position to `newInstr`. Without this, a single instruction can + // be added twice, and the position last position assigned clobbers + // all previous positions in other usages. + val newInstr = map(oldInstr).clone() + code.touched |= replaceInstruction(i, newInstr) + } } ////////////////////// Emit ////////////////////// diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 1cea4beddae0..72aa44d8d948 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -13,13 +13,12 @@ import scala.collection.{ mutable, immutable } import scala.collection.mutable.{ ListBuffer, Buffer } import scala.tools.nsc.symtab._ import scala.annotation.switch -import PartialFunction._ /** * @author Iulian Dragos * @version 1.0 */ -abstract class GenICode extends SubComponent { +abstract class GenICode extends SubComponent { import global._ import icodes._ import icodes.opcodes._ @@ -30,6 +29,9 @@ abstract class GenICode extends SubComponent { } import platform.isMaybeBoxed + private val bCodeICodeCommon: jvm.BCodeICodeCommon[global.type] = new jvm.BCodeICodeCommon(global) + import bCodeICodeCommon._ + val phaseName = "icode" override def newPhase(prev: Phase) = new ICodePhase(prev) @@ -678,7 +680,7 @@ abstract class GenICode extends SubComponent { val dims = arr.dimensions var elemKind = arr.elementKind if (args.length > dims) - unit.error(tree.pos, "too many arguments for array constructor: found " + args.length + + reporter.error(tree.pos, "too many arguments for array constructor: found " + args.length + " but array has only " + dims + " dimension(s)") if (args.length != dims) for (i <- args.length until dims) elemKind = ARRAY(elemKind) @@ -1075,7 +1077,7 @@ abstract class GenICode extends SubComponent { () case (_, UNIT) => ctx.bb.emit(DROP(from), pos) - // otherwise we'd better be doing a primtive -> primitive coercion or there's a problem + // otherwise we'd better be doing a primitive -> primitive coercion or there's a problem case _ if !from.isRefOrArrayType && !to.isRefOrArrayType => coerce(from, to) case _ => @@ -1136,7 +1138,7 @@ abstract class GenICode extends SubComponent { // a package here, check if there's a package object. val sym = ( if (!tree.symbol.isPackageClass) tree.symbol - else tree.symbol.info.member(nme.PACKAGE) match { + else tree.symbol.info.packageObject match { case NoSymbol => abort("Cannot use package as value: " + tree) case s => devWarning(s"Found ${tree.symbol} where a package object is required. Converting to ${s.moduleClass}") @@ -1326,15 +1328,6 @@ abstract class GenICode extends SubComponent { List(tree) } - /** Some useful equality helpers. - */ - def isNull(t: Tree) = cond(t) { case Literal(Constant(null)) => true } - def isLiteral(t: Tree) = cond(t) { case Literal(_) => true } - def isNonNullExpr(t: Tree) = isLiteral(t) || ((t.symbol ne null) && t.symbol.isModule) - - /* If l or r is constant null, returns the other ; otherwise null */ - def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null - /** * Find the label denoted by `lsym` and enter it in context `ctx`. * @@ -1431,11 +1424,18 @@ abstract class GenICode extends SubComponent { def genZandOrZor(and: Boolean): Boolean = { val ctxInterm = ctx.newBlock() - val branchesReachable = if (and) genCond(lhs, ctx, ctxInterm, elseCtx) + val lhsBranchesReachable = if (and) genCond(lhs, ctx, ctxInterm, elseCtx) else genCond(lhs, ctx, thenCtx, ctxInterm) - ctxInterm.bb killUnless branchesReachable + // If lhs is known to throw, we can kill the just created ctxInterm. + ctxInterm.bb killUnless lhsBranchesReachable + + val rhsBranchesReachable = genCond(rhs, ctxInterm, thenCtx, elseCtx) - genCond(rhs, ctxInterm, thenCtx, elseCtx) + // Reachable means "it does not always throw", i.e. "it might not throw". + // In an expression (a && b) or (a || b), the b branch might not be evaluated. + // Such an expression is therefore known to throw only if both expressions throw. Or, + // successors are reachable if either of the two is reachable (SI-8625). + lhsBranchesReachable || rhsBranchesReachable } def genRefEq(isEq: Boolean) = { val f = genEqEqPrimitive(lhs, rhs, ctx) _ diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala index bc35a9e7defd..10f0c6ee00b2 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala @@ -113,7 +113,8 @@ abstract class ICodes extends AnyRef global.loaders.lookupMemberAtTyperPhaseIfPossible(sym, name) lazy val symbolTable: global.type = global lazy val loaders: global.loaders.type = global.loaders - def classPath: util.ClassPath[AbstractFile] = ICodes.this.global.platform.classPath + + def classFileLookup: util.ClassFileLookup[AbstractFile] = global.classPath } /** A phase which works on icode. */ diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala index f81c42d836bb..27bf83648440 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala @@ -60,7 +60,7 @@ trait Primitives { self: ICodes => // type : (buf,el) => buf // range: lf,rg <- { BOOL, Ix, Ux, Rx, REF, STR } - // jvm : It should call the appropiate 'append' method on StringBuffer + // jvm : It should call the appropriate 'append' method on StringBuffer case class StringConcat(el: TypeKind) extends Primitive /** Signals the beginning of a series of concatenations. diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 676ee1268310..b0ad5bdaf97d 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -332,13 +332,13 @@ abstract class TypeFlowAnalysis { `remainingCALLs` also caches info about the typestack just before the callsite, so as to spare computing them again at inlining time. Besides caching, a further optimization involves skipping those basic blocks whose in-flow and out-flow isn't needed anyway (as explained next). - A basic block lacking a callsite in `remainingCALLs`, when visisted by the standard algorithm, won't cause any inlining. + A basic block lacking a callsite in `remainingCALLs`, when visited by the standard algorithm, won't cause any inlining. But as we know from the way type-flows are computed, computing the in- and out-flow for a basic block relies in general on those of other basic blocks. In detail, we want to focus on that sub-graph of the CFG such that control flow may reach a remaining candidate callsite. Those basic blocks not in that subgraph can be skipped altogether. That's why: - `forwardAnalysis()` in `MTFAGrowable` now checks for inclusion of a basic block in `relevantBBs` - same check is performed before adding a block to the worklist, and as part of choosing successors. - The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overridding most methods of the dataflow-analysis. + The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overriding most methods of the dataflow-analysis. The rest of the story takes place in Inliner, which does not visit all of the method's basic blocks but only on those represented in `remainingCALLs`. diff --git a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala new file mode 100644 index 000000000000..75aa0fc98469 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala @@ -0,0 +1,108 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc.backend.jvm + +import scala.tools.asm.tree.{InsnList, AbstractInsnNode, ClassNode, MethodNode} +import java.io.{StringWriter, PrintWriter} +import scala.tools.asm.util.{TraceClassVisitor, TraceMethodVisitor, Textifier} +import scala.tools.asm.ClassReader +import scala.collection.convert.decorateAsScala._ + +object AsmUtils { + + /** + * Print the bytecode of methods generated by GenBCode to the standard output. Only methods + * whose name contains `traceMethodPattern` are traced. + */ + final val traceMethodEnabled = false + final val traceMethodPattern = "" + + /** + * Print the bytecode of classes generated by GenBCode to the standard output. + */ + final val traceClassEnabled = false + final val traceClassPattern = "" + + /** + * Print the bytedcode of classes as they are serialized by the ASM library. The serialization + * performed by `asm.ClassWriter` can change the code generated by GenBCode. For example, it + * introduces stack map frames, it computes the maximal stack sizes, and it replaces dead + * code by NOPs (see also https://github.com/scala/scala/pull/3726#issuecomment-42861780). + */ + final val traceSerializedClassEnabled = false + final val traceSerializedClassPattern = "" + + def traceMethod(mnode: MethodNode): Unit = { + println(s"Bytecode for method ${mnode.name}") + println(textify(mnode)) + } + + def traceClass(cnode: ClassNode): Unit = { + println(s"Bytecode for class ${cnode.name}") + println(textify(cnode)) + } + + def traceClass(bytes: Array[Byte]): Unit = traceClass(readClass(bytes)) + + def readClass(bytes: Array[Byte]): ClassNode = { + val node = new ClassNode() + new ClassReader(bytes).accept(node, 0) + node + } + + /** + * Returns a human-readable representation of the cnode ClassNode. + */ + def textify(cnode: ClassNode): String = { + val trace = new TraceClassVisitor(new PrintWriter(new StringWriter)) + cnode.accept(trace) + val sw = new StringWriter + val pw = new PrintWriter(sw) + trace.p.print(pw) + sw.toString + } + + /** + * Returns a human-readable representation of the code in the mnode MethodNode. + */ + def textify(mnode: MethodNode): String = { + val trace = new TraceClassVisitor(new PrintWriter(new StringWriter)) + mnode.accept(trace) + val sw = new StringWriter + val pw = new PrintWriter(sw) + trace.p.print(pw) + sw.toString + } + + /** + * Returns a human-readable representation of the given instruction. + */ + def textify(insn: AbstractInsnNode): String = { + val trace = new TraceMethodVisitor(new Textifier) + insn.accept(trace) + val sw = new StringWriter + val pw = new PrintWriter(sw) + trace.p.print(pw) + sw.toString.trim + } + + /** + * Returns a human-readable representation of the given instruction sequence. + */ + def textify(insns: Iterator[AbstractInsnNode]): String = { + val trace = new TraceMethodVisitor(new Textifier) + insns.foreach(_.accept(trace)) + val sw: StringWriter = new StringWriter + val pw: PrintWriter = new PrintWriter(sw) + trace.p.print(pw) + sw.toString.trim + } + + /** + * Returns a human-readable representation of the given instruction sequence. + */ + def textify(insns: InsnList): String = textify(insns.iterator().asScala) +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala new file mode 100644 index 000000000000..a5f33aa7862d --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala @@ -0,0 +1,193 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc.backend.jvm + +import scala.tools.nsc.Global + +/** + * This trait contains code shared between GenBCode and GenASM that depends on types defined in + * the compiler cake (Global). + */ +final class BCodeAsmCommon[G <: Global](val global: G) { + import global._ + import definitions._ + + val ExcludedForwarderFlags = { + import scala.tools.nsc.symtab.Flags._ + // Should include DEFERRED but this breaks findMember. + SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO + } + + /** + * True if `classSym` is an anonymous class or a local class. I.e., false if `classSym` is a + * member class. This method is used to decide if we should emit an EnclosingMethod attribute. + * It is also used to decide whether the "owner" field in the InnerClass attribute should be + * null. + */ + def isAnonymousOrLocalClass(classSym: Symbol): Boolean = { + assert(classSym.isClass, s"not a class: $classSym") + // Here used to be an `assert(!classSym.isDelambdafyFunction)`: delambdafy lambda classes are + // always top-level. However, SI-8900 shows an example where the weak name-based implementation + // of isDelambdafyFunction failed (for a function declared in a package named "lambda"). + classSym.isAnonymousClass || !classSym.originalOwner.isClass + } + + /** + * Returns the enclosing method for non-member classes. In the following example + * + * class A { + * def f = { + * class B { + * class C + * } + * } + * } + * + * the method returns Some(f) for B, but None for C, because C is a member class. For non-member + * classes that are not enclosed by a method, it returns None: + * + * class A { + * { class B } + * } + * + * In this case, for B, we return None. + * + * The EnclosingMethod attribute needs to be added to non-member classes (see doc in BTypes). + * This is a source-level property, so we need to use the originalOwner chain to reconstruct it. + */ + private def enclosingMethodForEnclosingMethodAttribute(classSym: Symbol): Option[Symbol] = { + assert(classSym.isClass, classSym) + def enclosingMethod(sym: Symbol): Option[Symbol] = { + if (sym.isClass || sym == NoSymbol) None + else if (sym.isMethod) Some(sym) + else enclosingMethod(sym.originalOwner) + } + enclosingMethod(classSym.originalOwner) + } + + /** + * The enclosing class for emitting the EnclosingMethod attribute. Since this is a source-level + * property, this method looks at the originalOwner chain. See doc in BTypes. + */ + private def enclosingClassForEnclosingMethodAttribute(classSym: Symbol): Symbol = { + assert(classSym.isClass, classSym) + def enclosingClass(sym: Symbol): Symbol = { + if (sym.isClass) sym + else enclosingClass(sym.originalOwner) + } + enclosingClass(classSym.originalOwner) + } + + final case class EnclosingMethodEntry(owner: String, name: String, methodDescriptor: String) + + /** + * Data for emitting an EnclosingMethod attribute. None if `classSym` is a member class (not + * an anonymous or local class). See doc in BTypes. + * + * The class is parametrized by two functions to obtain a bytecode class descriptor for a class + * symbol, and to obtain a method signature descriptor fro a method symbol. These function depend + * on the implementation of GenASM / GenBCode, so they need to be passed in. + */ + def enclosingMethodAttribute(classSym: Symbol, classDesc: Symbol => String, methodDesc: Symbol => String): Option[EnclosingMethodEntry] = { + if (isAnonymousOrLocalClass(classSym)) { + val methodOpt = enclosingMethodForEnclosingMethodAttribute(classSym) + debuglog(s"enclosing method for $classSym is $methodOpt (in ${methodOpt.map(_.enclClass)})") + Some(EnclosingMethodEntry( + classDesc(enclosingClassForEnclosingMethodAttribute(classSym)), + methodOpt.map(_.javaSimpleName.toString).orNull, + methodOpt.map(methodDesc).orNull)) + } else { + None + } + } + + /** + * This is basically a re-implementation of sym.isStaticOwner, but using the originalOwner chain. + * + * The problem is that we are interested in a source-level property. Various phases changed the + * symbol's properties in the meantime, mostly lambdalift modified (destructively) the owner. + * Therefore, `sym.isStatic` is not what we want. For example, in + * object T { def f { object U } } + * the owner of U is T, so UModuleClass.isStatic is true. Phase travel does not help here. + */ + def isOriginallyStaticOwner(sym: Symbol): Boolean = { + sym.isPackageClass || sym.isModuleClass && isOriginallyStaticOwner(sym.originalOwner) + } + + /** + * The member classes of a class symbol. Note that the result of this method depends on the + * current phase, for example, after lambdalift, all local classes become member of the enclosing + * class. + */ + def memberClassesOf(classSymbol: Symbol): List[Symbol] = classSymbol.info.decls.collect({ + case sym if sym.isClass => + sym + case sym if sym.isModule => + val r = exitingPickler(sym.moduleClass) + assert(r != NoSymbol, sym.fullLocationString) + r + })(collection.breakOut) + + lazy val AnnotationRetentionPolicyModule = AnnotationRetentionPolicyAttr.companionModule + lazy val AnnotationRetentionPolicySourceValue = AnnotationRetentionPolicyModule.tpe.member(TermName("SOURCE")) + lazy val AnnotationRetentionPolicyClassValue = AnnotationRetentionPolicyModule.tpe.member(TermName("CLASS")) + lazy val AnnotationRetentionPolicyRuntimeValue = AnnotationRetentionPolicyModule.tpe.member(TermName("RUNTIME")) + + /** Whether an annotation should be emitted as a Java annotation + * .initialize: if 'annot' is read from pickle, atp might be un-initialized + */ + def shouldEmitAnnotation(annot: AnnotationInfo) = { + annot.symbol.initialize.isJavaDefined && + annot.matches(ClassfileAnnotationClass) && + retentionPolicyOf(annot) != AnnotationRetentionPolicySourceValue && + annot.args.isEmpty + } + + def isRuntimeVisible(annot: AnnotationInfo): Boolean = { + annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr) match { + case Some(retentionAnnot) => + retentionAnnot.assocs.contains(nme.value -> LiteralAnnotArg(Constant(AnnotationRetentionPolicyRuntimeValue))) + case _ => + // SI-8926: if the annotation class symbol doesn't have a @RetentionPolicy annotation, the + // annotation is emitted with visibility `RUNTIME` + true + } + } + + private def retentionPolicyOf(annot: AnnotationInfo): Symbol = + annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr).map(_.assocs).map(assoc => + assoc.collectFirst { + case (`nme`.value, LiteralAnnotArg(Constant(value: Symbol))) => value + }).flatten.getOrElse(AnnotationRetentionPolicyClassValue) + + def implementedInterfaces(classSym: Symbol): List[Symbol] = { + // Additional interface parents based on annotations and other cues + def newParentForAnnotation(ann: AnnotationInfo): Option[Type] = ann.symbol match { + case RemoteAttr => Some(RemoteInterfaceClass.tpe) + case _ => None + } + + def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait + + val allParents = classSym.info.parents ++ classSym.annotations.flatMap(newParentForAnnotation) + + // We keep the superClass when computing minimizeParents to eliminate more interfaces. + // Example: T can be eliminated from D + // trait T + // class C extends T + // class D extends C with T + val interfaces = erasure.minimizeParents(allParents) match { + case superClass :: ifs if !isInterfaceOrTrait(superClass.typeSymbol) => + ifs + case ifs => + // minimizeParents removes the superclass if it's redundant, for example: + // trait A + // class C extends Object with A // minimizeParents removes Object + ifs + } + interfaces.map(_.typeSymbol) + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 53142fbd8760..89d7acaa1144 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -9,7 +9,6 @@ package tools.nsc package backend package jvm -import scala.collection.{ mutable, immutable } import scala.annotation.switch import scala.tools.asm @@ -23,6 +22,9 @@ import scala.tools.asm abstract class BCodeBodyBuilder extends BCodeSkelBuilder { import global._ import definitions._ + import bTypes._ + import bCodeICodeCommon._ + import coreBTypes._ /* * Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions. @@ -45,16 +47,16 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { def emit(opc: Int) { mnode.visitInsn(opc) } def emitZeroOf(tk: BType) { - (tk.sort: @switch) match { - case asm.Type.BOOLEAN => bc.boolconst(false) - case asm.Type.BYTE | - asm.Type.SHORT | - asm.Type.CHAR | - asm.Type.INT => bc.iconst(0) - case asm.Type.LONG => bc.lconst(0) - case asm.Type.FLOAT => bc.fconst(0) - case asm.Type.DOUBLE => bc.dconst(0) - case asm.Type.VOID => () + tk match { + case BOOL => bc.boolconst(false) + case BYTE | + SHORT | + CHAR | + INT => bc.iconst(0) + case LONG => bc.lconst(0) + case FLOAT => bc.fconst(0) + case DOUBLE => bc.dconst(0) + case UNIT => () case _ => emit(asm.Opcodes.ACONST_NULL) } } @@ -90,7 +92,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val thrownKind = tpeTK(expr) // `throw null` is valid although scala.Null (as defined in src/libray-aux) isn't a subtype of Throwable. // Similarly for scala.Nothing (again, as defined in src/libray-aux). - assert(thrownKind.isNullType || thrownKind.isNothingType || exemplars.get(thrownKind).isSubtypeOf(ThrowableReference)) + assert(thrownKind.isNullType || thrownKind.isNothingType || thrownKind.asClassBType.isSubtypeOf(ThrowableReference)) genLoad(expr, thrownKind) lineNumber(expr) emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level. @@ -121,7 +123,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { // binary operation case rarg :: Nil => - resKind = maxType(tpeTK(larg), tpeTK(rarg)) + resKind = tpeTK(larg).maxType(tpeTK(rarg)) if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code)) { assert(resKind.isIntegralType || (resKind == BOOL), s"$resKind incompatible with arithmetic modulo operation.") @@ -165,7 +167,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { // load argument on stack assert(args.length == 1, s"Too many arguments for array get operation: $tree"); genLoad(args.head, INT) - generatedType = k.getComponentType + generatedType = k.asArrayBType.componentType bc.aload(elementType) } else if (scalaPrimitives.isArraySet(code)) { @@ -280,9 +282,10 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val Local(tk, _, idx, isSynth) = locals.getOrMakeLocal(sym) if (rhs == EmptyTree) { emitZeroOf(tk) } else { genLoad(rhs, tk) } + val localVarStart = currProgramPoint() bc.store(idx, tk) if (!isSynth) { // there are case ValDef's emitted by patmat - varsInScope ::= (sym -> currProgramPoint()) + varsInScope ::= (sym -> localVarStart) } generatedType = UNIT @@ -319,7 +322,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) generatedType = if (tree.symbol == ArrayClass) ObjectReference - else brefType(thisName) // inner class (if any) for claszSymbol already tracked. + else classBTypeFromSymbol(claszSymbol) } case Select(Ident(nme.EMPTY_PACKAGE_NAME), module) => @@ -417,7 +420,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { if (hostClass == null) internalName(field.owner) else internalName(hostClass) val fieldJName = field.javaSimpleName.toString - val fieldDescr = symInfoTK(field).getDescriptor + val fieldDescr = symInfoTK(field).descriptor val isStatic = field.isStaticMember val opc = if (isLoad) { if (isStatic) asm.Opcodes.GETSTATIC else asm.Opcodes.GETFIELD } @@ -457,9 +460,10 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { case ClazzTag => val toPush: BType = { - val kind = toTypeKind(const.typeValue) - if (kind.isValueType) classLiteral(kind) - else kind + toTypeKind(const.typeValue) match { + case kind: PrimitiveBType => boxedClassOfPrimitive(kind) + case kind => kind + } } mnode.visitLdcInsn(toPush.toASMType) @@ -467,7 +471,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val sym = const.symbolValue val ownerName = internalName(sym.owner) val fieldName = sym.javaSimpleName.toString - val fieldDesc = toTypeKind(sym.tpe.underlying).getDescriptor + val fieldDesc = toTypeKind(sym.tpe.underlying).descriptor mnode.visitFieldInsn( asm.Opcodes.GETSTATIC, ownerName, @@ -502,7 +506,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { case nextCleanup :: rest => if (saveReturnValue) { if (insideCleanupBlock) { - cunit.warning(r.pos, "Return statement found in finally-clause, discarding its return-value in favor of that of a more deeply nested return.") + reporter.warning(r.pos, "Return statement found in finally-clause, discarding its return-value in favor of that of a more deeply nested return.") bc drop returnType } else { // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted. @@ -539,26 +543,28 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { def genTypeApply(): BType = { genLoadQualifier(fun) - if (l.isValueType && r.isValueType) + // TODO @lry make pattern match + if (l.isPrimitive && r.isPrimitive) genConversion(l, r, cast) - else if (l.isValueType) { + else if (l.isPrimitive) { bc drop l if (cast) { - mnode.visitTypeInsn(asm.Opcodes.NEW, classCastExceptionReference.getInternalName) + mnode.visitTypeInsn(asm.Opcodes.NEW, classCastExceptionReference.internalName) bc dup ObjectReference emit(asm.Opcodes.ATHROW) } else { bc boolconst false } } - else if (r.isValueType && cast) { + else if (r.isPrimitive && cast) { abort(s"Erasure should have added an unboxing operation to prevent this cast. Tree: $app") } - else if (r.isValueType) { - bc isInstance classLiteral(r) + else if (r.isPrimitive) { + bc isInstance boxedClassOfPrimitive(r.asPrimitiveBType) } else { - genCast(r, cast) + assert(r.isRef, r) // ensure that it's not a method + genCast(r.asRefBType, cast) } if (cast) r else BOOL @@ -578,7 +584,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) genLoadArguments(args, paramTKs(app)) genCallMethod(fun.symbol, invokeStyle, pos = app.pos) - generatedType = asmMethodType(fun.symbol).getReturnType + generatedType = asmMethodType(fun.symbol).returnType // 'new' constructor call: Note: since constructors are // thought to return an instance of what they construct, @@ -589,34 +595,34 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { assert(ctor.isClassConstructor, s"'new' call to non-constructor: ${ctor.name}") generatedType = tpeTK(tpt) - assert(generatedType.isRefOrArrayType, s"Non reference type cannot be instantiated: $generatedType") + assert(generatedType.isRef, s"Non reference type cannot be instantiated: $generatedType") generatedType match { - case arr if generatedType.isArray => + case arr @ ArrayBType(componentType) => genLoadArguments(args, paramTKs(app)) - val dims = arr.getDimensions - var elemKind = arr.getElementType + val dims = arr.dimension + var elemKind = arr.elementType val argsSize = args.length if (argsSize > dims) { - cunit.error(app.pos, s"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)") + reporter.error(app.pos, s"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)") } if (argsSize < dims) { /* In one step: * elemKind = new BType(BType.ARRAY, arr.off + argsSize, arr.len - argsSize) * however the above does not enter a TypeName for each nested arrays in chrs. */ - for (i <- args.length until dims) elemKind = arrayOf(elemKind) + for (i <- args.length until dims) elemKind = ArrayBType(elemKind) } (argsSize : @switch) match { case 1 => bc newarray elemKind case _ => - val descr = ('[' * argsSize) + elemKind.getDescriptor // denotes the same as: arrayN(elemKind, argsSize).getDescriptor + val descr = ('[' * argsSize) + elemKind.descriptor // denotes the same as: arrayN(elemKind, argsSize).descriptor mnode.visitMultiANewArrayInsn(descr, argsSize) } - case rt if generatedType.hasObjectSort => - assert(exemplar(ctor.owner).c == rt, s"Symbol ${ctor.owner.fullName} is different from $rt") - mnode.visitTypeInsn(asm.Opcodes.NEW, rt.getInternalName) + case rt: ClassBType => + assert(classBTypeFromSymbol(ctor.owner) == rt, s"Symbol ${ctor.owner.fullName} is different from $rt") + mnode.visitTypeInsn(asm.Opcodes.NEW, rt.internalName) bc dup generatedType genLoadArguments(args, paramTKs(app)) genCallMethod(ctor, icodes.opcodes.Static(onInstance = true)) @@ -628,16 +634,16 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isBox(fun.symbol) => val nativeKind = tpeTK(expr) genLoad(expr, nativeKind) - val MethodNameAndType(mname, mdesc) = asmBoxTo(nativeKind) - bc.invokestatic(BoxesRunTime.getInternalName, mname, mdesc) + val MethodNameAndType(mname, methodType) = asmBoxTo(nativeKind) + bc.invokestatic(BoxesRunTime.internalName, mname, methodType.descriptor) generatedType = boxResultType(fun.symbol) // was toTypeKind(fun.symbol.tpe.resultType) case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isUnbox(fun.symbol) => genLoad(expr) val boxType = unboxResultType(fun.symbol) // was toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe) generatedType = boxType - val MethodNameAndType(mname, mdesc) = asmUnboxTo(boxType) - bc.invokestatic(BoxesRunTime.getInternalName, mname, mdesc) + val MethodNameAndType(mname, methodType) = asmUnboxTo(boxType) + bc.invokestatic(BoxesRunTime.internalName, mname, methodType.descriptor) case app @ Apply(fun, args) => val sym = fun.symbol @@ -682,7 +688,12 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { case _ => } if ((targetTypeKind != null) && (sym == definitions.Array_clone) && invokeStyle.isDynamic) { - val target: String = targetTypeKind.getInternalName + // An invokevirtual points to a CONSTANT_Methodref_info which in turn points to a + // CONSTANT_Class_info of the receiver type. + // The JVMS is not explicit about this, but that receiver type may be an array type + // descriptor (instead of a class internal name): + // invokevirtual #2; //Method "[I".clone:()Ljava/lang/Object + val target: String = targetTypeKind.asRefBType.classOrArrayType bc.invokevirtual(target, "clone", "()Ljava/lang/Object;") } else { @@ -693,7 +704,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { genNormalMethodCall() - generatedType = asmMethodType(sym).getReturnType + generatedType = asmMethodType(sym).returnType } } @@ -705,7 +716,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val ArrayValue(tpt @ TypeTree(), elems) = av val elmKind = tpeTK(tpt) - val generatedType = arrayOf(elmKind) + val generatedType = ArrayBType(elmKind) lineNumber(av) bc iconst elems.length @@ -798,16 +809,60 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } def adapt(from: BType, to: BType) { - if (!conforms(from, to)) { + if (!from.conformsTo(to)) { to match { case UNIT => bc drop from case _ => bc.emitT2T(from, to) } } else if (from.isNothingType) { - emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level. + /* There are two possibilities for from.isNothingType: emitting a "throw e" expressions and + * loading a (phantom) value of type Nothing. + * + * The Nothing type in Scala's type system does not exist in the JVM. In bytecode, Nothing + * is mapped to scala.runtime.Nothing$. To the JVM, a call to Predef.??? looks like it would + * return an object of type Nothing$. We need to do something with that phantom object on + * the stack. "Phantom" because it never exists: such methods always throw, but the JVM does + * not know that. + * + * Note: The two verifiers (old: type inference, new: type checking) have different + * requirements. Very briefly: + * + * Old (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.2.1): at + * each program point, no matter what branches were taken to get there + * - Stack is same size and has same typed values + * - Local and stack values need to have consistent types + * - In practice, the old verifier seems to ignore unreachable code and accept any + * instructions after an ATHROW. For example, there can be another ATHROW (without + * loading another throwable first). + * + * New (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.1) + * - Requires consistent stack map frames. GenBCode generates stack frames if -target:jvm-1.6 + * or higher. + * - In practice: the ASM library computes stack map frames for us (ClassWriter). Emitting + * correct frames after an ATHROW is probably complex, so ASM uses the following strategy: + * - Every time when generating an ATHROW, a new basic block is started. + * - During classfile writing, such basic blocks are found to be dead: no branches go there + * - Eliminating dead code would probably require complex shifts in the output byte buffer + * - But there's an easy solution: replace all code in the dead block with with + * `nop; nop; ... nop; athrow`, making sure the bytecode size stays the same + * - The corresponding stack frame can be easily generated: on entering a dead the block, + * the frame requires a single Throwable on the stack. + * - Since there are no branches to the dead block, the frame requirements are never violated. + * + * To summarize the above: it does matter what we emit after an ATHROW. + * + * NOW: if we end up here because we emitted a load of a (phantom) value of type Nothing$, + * there was no ATHROW emitted. So, we have to make the verifier happy and do something + * with that value. Since Nothing$ extends Throwable, the easiest is to just emit an ATHROW. + * + * If we ended up here because we generated a "throw e" expression, we know the last + * emitted instruction was an ATHROW. As explained above, it is OK to emit a second ATHROW, + * the verifiers will be happy. + */ + emit(asm.Opcodes.ATHROW) } else if (from.isNullType) { bc drop from - mnode.visitInsn(asm.Opcodes.ACONST_NULL) + emit(asm.Opcodes.ACONST_NULL) } else (from, to) match { case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => bc.emitT2T(INT, LONG) @@ -860,7 +915,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { def genLoadModule(tree: Tree): BType = { val module = ( if (!tree.symbol.isPackageClass) tree.symbol - else tree.symbol.info.member(nme.PACKAGE) match { + else tree.symbol.info.packageObject match { case NoSymbol => abort(s"SI-5604: Cannot use package as value: $tree") case s => abort(s"SI-5604: found package class where package object expected: $tree") } @@ -875,12 +930,12 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { if (claszSymbol == module.moduleClass && jMethodName != "readResolve" && !inStaticMethod) { mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) } else { - val mbt = symInfoTK(module) + val mbt = symInfoTK(module).asClassBType mnode.visitFieldInsn( asm.Opcodes.GETSTATIC, - mbt.getInternalName /* + "$" */ , + mbt.internalName /* + "$" */ , strMODULE_INSTANCE_FIELD, - mbt.getDescriptor // for nostalgics: toTypeKind(module.tpe).getDescriptor + mbt.descriptor // for nostalgics: toTypeKind(module.tpe).descriptor ) } } @@ -893,7 +948,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } } - def genCast(to: BType, cast: Boolean) { + def genCast(to: RefBType, cast: Boolean) { if (cast) { bc checkCast to } else { bc isInstance to } } @@ -939,7 +994,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { def genCallMethod(method: Symbol, style: InvokeStyle, hostClass0: Symbol = null, pos: Position = NoPosition) { val siteSymbol = claszSymbol - val hostSymbol = if (hostClass0 == null) method.owner else hostClass0; + val hostSymbol = if (hostClass0 == null) method.owner else hostClass0 val methodOwner = method.owner // info calls so that types are up to date; erasure may add lateINTERFACE to traits hostSymbol.info ; methodOwner.info @@ -957,18 +1012,17 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { || methodOwner == definitions.ObjectClass ) val receiver = if (useMethodOwner) methodOwner else hostSymbol - val bmOwner = asmClassType(receiver) - val jowner = bmOwner.getInternalName + val jowner = internalName(receiver) val jname = method.javaSimpleName.toString val bmType = asmMethodType(method) - val mdescr = bmType.getDescriptor + val mdescr = bmType.descriptor def initModule() { // we initialize the MODULE$ field immediately after the super ctor if (!isModuleInitialized && jMethodName == INSTANCE_CONSTRUCTOR_NAME && jname == INSTANCE_CONSTRUCTOR_NAME && - isStaticModule(siteSymbol)) { + isStaticModuleClass(siteSymbol)) { isModuleInitialized = true mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) mnode.visitFieldInsn( @@ -1020,22 +1074,11 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { tree :: Nil } - /* Some useful equality helpers. */ - def isNull(t: Tree) = { - t match { - case Literal(Constant(null)) => true - case _ => false - } - } - - /* If l or r is constant null, returns the other ; otherwise null */ - def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null - /* Emit code to compare the two top-most stack values using the 'op' operator. */ private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType) { if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT bc.emitIF_ICMP(op, success) - } else if (tk.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_) + } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) bc.emitIF_ACMP(op, success) } else { (tk: @unchecked) match { @@ -1056,7 +1099,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType) { if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT bc.emitIF(op, success) - } else if (tk.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_) + } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) // @unchecked because references aren't compared with GT, GE, LT, LE. (op : @unchecked) match { case icodes.EQ => bc emitIFNULL success @@ -1102,7 +1145,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { genCZJUMP(success, failure, op, ObjectReference) } else { - val tk = maxType(tpeTK(l), tpeTK(r)) + val tk = tpeTK(l).maxType(tpeTK(r)) genLoad(l, tk) genLoad(r, tk) genCJUMP(success, failure, op, tk) @@ -1141,7 +1184,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { case ZOR => genZandOrZor(and = false) case code => // TODO !!!!!!!!!! isReferenceType, in the sense of TypeKind? (ie non-array, non-boxed, non-nothing, may be null) - if (scalaPrimitives.isUniversalEqualityOp(code) && tpeTK(lhs).hasObjectSort) { + if (scalaPrimitives.isUniversalEqualityOp(code) && tpeTK(lhs).isClass) { // `lhs` has reference type if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure) else genEqEqPrimitive(lhs, rhs, failure, success) @@ -1200,9 +1243,15 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { // expr == null -> expr eq null genLoad(l, ObjectReference) genCZJUMP(success, failure, icodes.EQ, ObjectReference) + } else if (isNonNullExpr(l)) { + // SI-7852 Avoid null check if L is statically non-null. + genLoad(l, ObjectReference) + genLoad(r, ObjectReference) + genCallMethod(Object_equals, icodes.opcodes.Dynamic) + genCZJUMP(success, failure, icodes.NE, BOOL) } else { // l == r -> if (l eq null) r eq null else l.equals(r) - val eqEqTempLocal = locals.makeLocal(AnyRefReference, nme.EQEQ_LOCAL_VAR.toString) + val eqEqTempLocal = locals.makeLocal(ObjectReference, nme.EQEQ_LOCAL_VAR.toString) val lNull = new asm.Label val lNonNull = new asm.Label diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala deleted file mode 100644 index cc3265c5f9fb..000000000000 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala +++ /dev/null @@ -1,716 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2012 LAMP/EPFL - * @author Martin Odersky - */ - -package scala -package tools.nsc -package backend.jvm - -import scala.tools.asm -import scala.annotation.switch -import scala.collection.{ immutable, mutable } - -/* - * Immutable representations of bytecode-level types. - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded - * @version 1.0 - * - */ -abstract class BCodeGlue extends SubComponent { - - import global._ - - object BType { - - import global.chrs - - // ------------- sorts ------------- - - val VOID : Int = 0 - val BOOLEAN: Int = 1 - val CHAR : Int = 2 - val BYTE : Int = 3 - val SHORT : Int = 4 - val INT : Int = 5 - val FLOAT : Int = 6 - val LONG : Int = 7 - val DOUBLE : Int = 8 - val ARRAY : Int = 9 - val OBJECT : Int = 10 - val METHOD : Int = 11 - - // ------------- primitive types ------------- - - val VOID_TYPE = new BType(VOID, ('V' << 24) | (5 << 16) | (0 << 8) | 0, 1) - val BOOLEAN_TYPE = new BType(BOOLEAN, ('Z' << 24) | (0 << 16) | (5 << 8) | 1, 1) - val CHAR_TYPE = new BType(CHAR, ('C' << 24) | (0 << 16) | (6 << 8) | 1, 1) - val BYTE_TYPE = new BType(BYTE, ('B' << 24) | (0 << 16) | (5 << 8) | 1, 1) - val SHORT_TYPE = new BType(SHORT, ('S' << 24) | (0 << 16) | (7 << 8) | 1, 1) - val INT_TYPE = new BType(INT, ('I' << 24) | (0 << 16) | (0 << 8) | 1, 1) - val FLOAT_TYPE = new BType(FLOAT, ('F' << 24) | (2 << 16) | (2 << 8) | 1, 1) - val LONG_TYPE = new BType(LONG, ('J' << 24) | (1 << 16) | (1 << 8) | 2, 1) - val DOUBLE_TYPE = new BType(DOUBLE, ('D' << 24) | (3 << 16) | (3 << 8) | 2, 1) - - /* - * Returns the Java type corresponding to the given type descriptor. - * - * @param off the offset of this descriptor in the chrs buffer. - * @return the Java type corresponding to the given type descriptor. - * - * can-multi-thread - */ - def getType(off: Int): BType = { - var len = 0 - chrs(off) match { - case 'V' => VOID_TYPE - case 'Z' => BOOLEAN_TYPE - case 'C' => CHAR_TYPE - case 'B' => BYTE_TYPE - case 'S' => SHORT_TYPE - case 'I' => INT_TYPE - case 'F' => FLOAT_TYPE - case 'J' => LONG_TYPE - case 'D' => DOUBLE_TYPE - case '[' => - len = 1 - while (chrs(off + len) == '[') { - len += 1 - } - if (chrs(off + len) == 'L') { - len += 1 - while (chrs(off + len) != ';') { - len += 1 - } - } - new BType(ARRAY, off, len + 1) - case 'L' => - len = 1 - while (chrs(off + len) != ';') { - len += 1 - } - new BType(OBJECT, off + 1, len - 1) - // case '(': - case _ => - assert(chrs(off) == '(') - var resPos = off + 1 - while (chrs(resPos) != ')') { resPos += 1 } - val resType = getType(resPos + 1) - val len = resPos - off + 1 + resType.len; - new BType( - METHOD, - off, - if (resType.hasObjectSort) { - len + 2 // "+ 2" accounts for the "L ... ;" in a descriptor for a non-array reference. - } else { - len - } - ) - } - } - - /* Params denote an internal name. - * can-multi-thread - */ - def getObjectType(index: Int, length: Int): BType = { - val sort = if (chrs(index) == '[') ARRAY else OBJECT; - new BType(sort, index, length) - } - - /* - * @param methodDescriptor a method descriptor. - * - * must-single-thread - */ - def getMethodType(methodDescriptor: String): BType = { - val n = global.newTypeName(methodDescriptor) - new BType(BType.METHOD, n.start, n.length) // TODO assert isValidMethodDescriptor - } - - /* - * Returns the Java method type corresponding to the given argument and return types. - * - * @param returnType the return type of the method. - * @param argumentTypes the argument types of the method. - * @return the Java type corresponding to the given argument and return types. - * - * must-single-thread - */ - def getMethodType(returnType: BType, argumentTypes: Array[BType]): BType = { - val n = global.newTypeName(getMethodDescriptor(returnType, argumentTypes)) - new BType(BType.METHOD, n.start, n.length) - } - - /* - * Returns the Java types corresponding to the argument types of method descriptor whose first argument starts at idx0. - * - * @param idx0 index into chrs of the first argument. - * @return the Java types corresponding to the argument types of the given method descriptor. - * - * can-multi-thread - */ - private def getArgumentTypes(idx0: Int): Array[BType] = { - assert(chrs(idx0 - 1) == '(', "doesn't look like a method descriptor.") - val args = new Array[BType](getArgumentCount(idx0)) - var off = idx0 - var size = 0 - while (chrs(off) != ')') { - args(size) = getType(off) - off += args(size).len - if (args(size).sort == OBJECT) { off += 2 } - // debug: assert("LVZBSCIJFD[)".contains(chrs(off))) - size += 1 - } - // debug: var check = 0; while (check < args.length) { assert(args(check) != null); check += 1 } - args - } - - /* - * Returns the number of argument types of this method type, whose first argument starts at idx0. - * - * @param idx0 index into chrs of the first argument. - * @return the number of argument types of this method type. - * - * can-multi-thread - */ - private def getArgumentCount(idx0: Int): Int = { - assert(chrs(idx0 - 1) == '(', "doesn't look like a method descriptor.") - var off = idx0 - var size = 0 - var keepGoing = true - while (keepGoing) { - val car = chrs(off) - off += 1 - if (car == ')') { - keepGoing = false - } else if (car == 'L') { - while (chrs(off) != ';') { off += 1 } - off += 1 - size += 1 - } else if (car != '[') { - size += 1 - } - } - - size - } - - /* - * Returns the Java type corresponding to the return type of the given - * method descriptor. - * - * @param methodDescriptor a method descriptor. - * @return the Java type corresponding to the return type of the given method descriptor. - * - * must-single-thread - */ - def getReturnType(methodDescriptor: String): BType = { - val n = global.newTypeName(methodDescriptor) - val delta = n.pos(')') // `delta` is relative to the Name's zero-based start position, not a valid index into chrs. - assert(delta < n.length, s"not a valid method descriptor: $methodDescriptor") - getType(n.start + delta + 1) - } - - /* - * Returns the descriptor corresponding to the given argument and return types. - * Note: no BType is created here for the resulting method descriptor, - * if that's desired the invoker is responsible for that. - * - * @param returnType the return type of the method. - * @param argumentTypes the argument types of the method. - * @return the descriptor corresponding to the given argument and return types. - * - * can-multi-thread - */ - def getMethodDescriptor( - returnType: BType, - argumentTypes: Array[BType]): String = - { - val buf = new StringBuffer() - buf.append('(') - var i = 0 - while (i < argumentTypes.length) { - argumentTypes(i).getDescriptor(buf) - i += 1 - } - buf.append(')') - returnType.getDescriptor(buf) - buf.toString() - } - - } // end of object BType - - /* - * Based on ASM's Type class. Namer's chrs is used in this class for the same purposes as the `buf` char array in asm.Type. - * - * All methods of this classs can-multi-thread - */ - final class BType(val sort: Int, val off: Int, val len: Int) { - - import global.chrs - - /* - * can-multi-thread - */ - def toASMType: scala.tools.asm.Type = { - import scala.tools.asm - // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" - (sort: @switch) match { - case asm.Type.VOID => asm.Type.VOID_TYPE - case asm.Type.BOOLEAN => asm.Type.BOOLEAN_TYPE - case asm.Type.CHAR => asm.Type.CHAR_TYPE - case asm.Type.BYTE => asm.Type.BYTE_TYPE - case asm.Type.SHORT => asm.Type.SHORT_TYPE - case asm.Type.INT => asm.Type.INT_TYPE - case asm.Type.FLOAT => asm.Type.FLOAT_TYPE - case asm.Type.LONG => asm.Type.LONG_TYPE - case asm.Type.DOUBLE => asm.Type.DOUBLE_TYPE - case asm.Type.ARRAY | - asm.Type.OBJECT => asm.Type.getObjectType(getInternalName) - case asm.Type.METHOD => asm.Type.getMethodType(getDescriptor) - } - } - - /* - * Unlike for ICode's REFERENCE, isBoxedType(t) implies isReferenceType(t) - * Also, `isReferenceType(RT_NOTHING) == true` , similarly for RT_NULL. - * Use isNullType() , isNothingType() to detect Nothing and Null. - * - * can-multi-thread - */ - def hasObjectSort = (sort == BType.OBJECT) - - /* - * Returns the number of dimensions of this array type. This method should - * only be used for an array type. - * - * @return the number of dimensions of this array type. - * - * can-multi-thread - */ - def getDimensions: Int = { - var i = 1 - while (chrs(off + i) == '[') { - i += 1 - } - i - } - - /* - * Returns the (ultimate) element type of this array type. - * This method should only be used for an array type. - * - * @return Returns the type of the elements of this array type. - * - * can-multi-thread - */ - def getElementType: BType = { - assert(isArray, s"Asked for the element type of a non-array type: $this") - BType.getType(off + getDimensions) - } - - /* - * Returns the internal name of the class corresponding to this object or - * array type. The internal name of a class is its fully qualified name (as - * returned by Class.getName(), where '.' are replaced by '/'. This method - * should only be used for an object or array type. - * - * @return the internal name of the class corresponding to this object type. - * - * can-multi-thread - */ - def getInternalName: String = { - new String(chrs, off, len) - } - - /* - * @return the suffix of the internal name until the last '/' (if '/' present), internal name otherwise. - * - * can-multi-thread - */ - def getSimpleName: String = { - assert(hasObjectSort, s"not of object sort: $toString") - val iname = getInternalName - val idx = iname.lastIndexOf('/') - if (idx == -1) iname - else iname.substring(idx + 1) - } - - /* - * Returns the argument types of methods of this type. - * This method should only be used for method types. - * - * @return the argument types of methods of this type. - * - * can-multi-thread - */ - def getArgumentTypes: Array[BType] = { - BType.getArgumentTypes(off + 1) - } - - /* - * Returns the return type of methods of this type. - * This method should only be used for method types. - * - * @return the return type of methods of this type. - * - * can-multi-thread - */ - def getReturnType: BType = { - assert(chrs(off) == '(', s"doesn't look like a method descriptor: $toString") - var resPos = off + 1 - while (chrs(resPos) != ')') { resPos += 1 } - BType.getType(resPos + 1) - } - - // ------------------------------------------------------------------------ - // Inspector methods - // ------------------------------------------------------------------------ - - def isPrimitiveOrVoid = (sort < BType.ARRAY) // can-multi-thread - def isValueType = (sort < BType.ARRAY) // can-multi-thread - def isArray = (sort == BType.ARRAY) // can-multi-thread - def isUnitType = (sort == BType.VOID) // can-multi-thread - - def isRefOrArrayType = { hasObjectSort || isArray } // can-multi-thread - def isNonUnitValueType = { isValueType && !isUnitType } // can-multi-thread - - def isNonSpecial = { !isValueType && !isArray && !isPhantomType } // can-multi-thread - def isNothingType = { (this == RT_NOTHING) || (this == CT_NOTHING) } // can-multi-thread - def isNullType = { (this == RT_NULL) || (this == CT_NULL) } // can-multi-thread - def isPhantomType = { isNothingType || isNullType } // can-multi-thread - - /* - * can-multi-thread - */ - def isBoxed = { - this match { - case BOXED_UNIT | BOXED_BOOLEAN | BOXED_CHAR | - BOXED_BYTE | BOXED_SHORT | BOXED_INT | - BOXED_FLOAT | BOXED_LONG | BOXED_DOUBLE - => true - case _ - => false - } - } - - /* On the JVM, - * BOOL, BYTE, CHAR, SHORT, and INT - * are like Ints for the purpose of lub calculation. - * - * can-multi-thread - */ - def isIntSizedType = { - (sort : @switch) match { - case BType.BOOLEAN | BType.CHAR | - BType.BYTE | BType.SHORT | BType.INT - => true - case _ - => false - } - } - - /* On the JVM, similar to isIntSizedType except that BOOL isn't integral while LONG is. - * - * can-multi-thread - */ - def isIntegralType = { - (sort : @switch) match { - case BType.CHAR | - BType.BYTE | BType.SHORT | BType.INT | - BType.LONG - => true - case _ - => false - } - } - - /* On the JVM, FLOAT and DOUBLE. - * - * can-multi-thread - */ - def isRealType = { (sort == BType.FLOAT ) || (sort == BType.DOUBLE) } - - def isNumericType = (isIntegralType || isRealType) // can-multi-thread - - /* Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?) - * - * can-multi-thread - */ - def isWideType = (getSize == 2) - - /* - * Element vs. Component type of an array: - * Quoting from the JVMS, Sec. 2.4 "Reference Types and Values" - * - * An array type consists of a component type with a single dimension (whose - * length is not given by the type). The component type of an array type may itself be - * an array type. If, starting from any array type, one considers its component type, - * and then (if that is also an array type) the component type of that type, and so on, - * eventually one must reach a component type that is not an array type; this is called - * the element type of the array type. The element type of an array type is necessarily - * either a primitive type, or a class type, or an interface type. - * - */ - - /* The type of items this array holds. - * - * can-multi-thread - */ - def getComponentType: BType = { - assert(isArray, s"Asked for the component type of a non-array type: $this") - BType.getType(off + 1) - } - - // ------------------------------------------------------------------------ - // Conversion to type descriptors - // ------------------------------------------------------------------------ - - /* - * @return the descriptor corresponding to this Java type. - * - * can-multi-thread - */ - def getDescriptor: String = { - val buf = new StringBuffer() - getDescriptor(buf) - buf.toString() - } - - /* - * Appends the descriptor corresponding to this Java type to the given string buffer. - * - * @param buf the string buffer to which the descriptor must be appended. - * - * can-multi-thread - */ - private def getDescriptor(buf: StringBuffer) { - if (isPrimitiveOrVoid) { - // descriptor is in byte 3 of 'off' for primitive types (buf == null) - buf.append(((off & 0xFF000000) >>> 24).asInstanceOf[Char]) - } else if (sort == BType.OBJECT) { - buf.append('L') - buf.append(chrs, off, len) - buf.append(';') - } else { // sort == ARRAY || sort == METHOD - buf.append(chrs, off, len) - } - } - - // ------------------------------------------------------------------------ - // Corresponding size and opcodes - // ------------------------------------------------------------------------ - - /* - * Returns the size of values of this type. - * This method must not be used for method types. - * - * @return the size of values of this type, i.e., 2 for long and - * double, 0 for void and 1 otherwise. - * - * can-multi-thread - */ - def getSize: Int = { - // the size is in byte 0 of 'off' for primitive types (buf == null) - if (isPrimitiveOrVoid) (off & 0xFF) else 1 - } - - /* - * Returns a JVM instruction opcode adapted to this Java type. This method - * must not be used for method types. - * - * @param opcode a JVM instruction opcode. This opcode must be one of ILOAD, - * ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL, - * ISHR, IUSHR, IAND, IOR, IXOR and IRETURN. - * @return an opcode that is similar to the given opcode, but adapted to - * this Java type. For example, if this type is float and - * opcode is IRETURN, this method returns FRETURN. - * - * can-multi-thread - */ - def getOpcode(opcode: Int): Int = { - import scala.tools.asm.Opcodes - if (opcode == Opcodes.IALOAD || opcode == Opcodes.IASTORE) { - // the offset for IALOAD or IASTORE is in byte 1 of 'off' for - // primitive types (buf == null) - opcode + (if (isPrimitiveOrVoid) (off & 0xFF00) >> 8 else 4) - } else { - // the offset for other instructions is in byte 2 of 'off' for - // primitive types (buf == null) - opcode + (if (isPrimitiveOrVoid) (off & 0xFF0000) >> 16 else 4) - } - } - - // ------------------------------------------------------------------------ - // Equals, hashCode and toString - // ------------------------------------------------------------------------ - - /* - * Tests if the given object is equal to this type. - * - * @param o the object to be compared to this type. - * @return true if the given object is equal to this type. - * - * can-multi-thread - */ - override def equals(o: Any): Boolean = { - if (!(o.isInstanceOf[BType])) { - return false - } - val t = o.asInstanceOf[BType] - if (this eq t) { - return true - } - if (sort != t.sort) { - return false - } - if (sort >= BType.ARRAY) { - if (len != t.len) { - return false - } - // sort checked already - if (off == t.off) { - return true - } - var i = 0 - while (i < len) { - if (chrs(off + i) != chrs(t.off + i)) { - return false - } - i += 1 - } - // If we reach here, we could update the largest of (this.off, t.off) to match the other, so as to simplify future == comparisons. - // But that would require a var rather than val. - } - true - } - - /* - * @return a hash code value for this type. - * - * can-multi-thread - */ - override def hashCode(): Int = { - var hc = 13 * sort; - if (sort >= BType.ARRAY) { - var i = off - val end = i + len - while (i < end) { - hc = 17 * (hc + chrs(i)) - i += 1 - } - } - hc - } - - /* - * @return the descriptor of this type. - * - * can-multi-thread - */ - override def toString: String = { getDescriptor } - - } - - /* - * Creates a TypeName and the BType token for it. - * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that. - * - * must-single-thread - */ - def brefType(iname: String): BType = { brefType(newTypeName(iname.toCharArray(), 0, iname.length())) } - - /* - * Creates a BType token for the TypeName received as argument. - * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that. - * - * can-multi-thread - */ - def brefType(iname: TypeName): BType = { BType.getObjectType(iname.start, iname.length) } - - // due to keyboard economy only - val UNIT = BType.VOID_TYPE - val BOOL = BType.BOOLEAN_TYPE - val CHAR = BType.CHAR_TYPE - val BYTE = BType.BYTE_TYPE - val SHORT = BType.SHORT_TYPE - val INT = BType.INT_TYPE - val LONG = BType.LONG_TYPE - val FLOAT = BType.FLOAT_TYPE - val DOUBLE = BType.DOUBLE_TYPE - - val BOXED_UNIT = brefType("java/lang/Void") - val BOXED_BOOLEAN = brefType("java/lang/Boolean") - val BOXED_BYTE = brefType("java/lang/Byte") - val BOXED_SHORT = brefType("java/lang/Short") - val BOXED_CHAR = brefType("java/lang/Character") - val BOXED_INT = brefType("java/lang/Integer") - val BOXED_LONG = brefType("java/lang/Long") - val BOXED_FLOAT = brefType("java/lang/Float") - val BOXED_DOUBLE = brefType("java/lang/Double") - - /* - * RT_NOTHING and RT_NULL exist at run-time only. - * They are the bytecode-level manifestation (in method signatures only) of what shows up as NothingClass resp. NullClass in Scala ASTs. - * Therefore, when RT_NOTHING or RT_NULL are to be emitted, - * a mapping is needed: the internal names of NothingClass and NullClass can't be emitted as-is. - */ - val RT_NOTHING = brefType("scala/runtime/Nothing$") - val RT_NULL = brefType("scala/runtime/Null$") - val CT_NOTHING = brefType("scala/Nothing") // TODO needed? - val CT_NULL = brefType("scala/Null") // TODO needed? - - val srBooleanRef = brefType("scala/runtime/BooleanRef") - val srByteRef = brefType("scala/runtime/ByteRef") - val srCharRef = brefType("scala/runtime/CharRef") - val srIntRef = brefType("scala/runtime/IntRef") - val srLongRef = brefType("scala/runtime/LongRef") - val srFloatRef = brefType("scala/runtime/FloatRef") - val srDoubleRef = brefType("scala/runtime/DoubleRef") - - /* Map from type kinds to the Java reference types. - * Useful when pushing class literals onto the operand stack (ldc instruction taking a class literal). - * @see Predef.classOf - * @see genConstant() - */ - val classLiteral = immutable.Map[BType, BType]( - UNIT -> BOXED_UNIT, - BOOL -> BOXED_BOOLEAN, - BYTE -> BOXED_BYTE, - SHORT -> BOXED_SHORT, - CHAR -> BOXED_CHAR, - INT -> BOXED_INT, - LONG -> BOXED_LONG, - FLOAT -> BOXED_FLOAT, - DOUBLE -> BOXED_DOUBLE - ) - - case class MethodNameAndType(mname: String, mdesc: String) - - val asmBoxTo: Map[BType, MethodNameAndType] = { - Map( - BOOL -> MethodNameAndType("boxToBoolean", "(Z)Ljava/lang/Boolean;" ) , - BYTE -> MethodNameAndType("boxToByte", "(B)Ljava/lang/Byte;" ) , - CHAR -> MethodNameAndType("boxToCharacter", "(C)Ljava/lang/Character;") , - SHORT -> MethodNameAndType("boxToShort", "(S)Ljava/lang/Short;" ) , - INT -> MethodNameAndType("boxToInteger", "(I)Ljava/lang/Integer;" ) , - LONG -> MethodNameAndType("boxToLong", "(J)Ljava/lang/Long;" ) , - FLOAT -> MethodNameAndType("boxToFloat", "(F)Ljava/lang/Float;" ) , - DOUBLE -> MethodNameAndType("boxToDouble", "(D)Ljava/lang/Double;" ) - ) - } - - val asmUnboxTo: Map[BType, MethodNameAndType] = { - Map( - BOOL -> MethodNameAndType("unboxToBoolean", "(Ljava/lang/Object;)Z") , - BYTE -> MethodNameAndType("unboxToByte", "(Ljava/lang/Object;)B") , - CHAR -> MethodNameAndType("unboxToChar", "(Ljava/lang/Object;)C") , - SHORT -> MethodNameAndType("unboxToShort", "(Ljava/lang/Object;)S") , - INT -> MethodNameAndType("unboxToInt", "(Ljava/lang/Object;)I") , - LONG -> MethodNameAndType("unboxToLong", "(Ljava/lang/Object;)J") , - FLOAT -> MethodNameAndType("unboxToFloat", "(Ljava/lang/Object;)F") , - DOUBLE -> MethodNameAndType("unboxToDouble", "(Ljava/lang/Object;)D") - ) - } -} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 359e5d6c2931..8d1c37532ece 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -8,8 +8,7 @@ package tools.nsc package backend.jvm import scala.tools.asm -import scala.annotation.switch -import scala.collection.{ immutable, mutable } +import scala.collection.mutable import scala.tools.nsc.io.AbstractFile /* @@ -19,9 +18,10 @@ import scala.tools.nsc.io.AbstractFile * @version 1.0 * */ -abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { - +abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { import global._ + import bTypes._ + import coreBTypes._ /* * must-single-thread @@ -38,7 +38,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { outputDirectory(csym) } catch { case ex: Throwable => - cunit.error(cunit.body.pos, s"Couldn't create file for class $cName\n${ex.getMessage}") + reporter.error(cunit.body.pos, s"Couldn't create file for class $cName\n${ex.getMessage}") null } } @@ -53,80 +53,24 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { // https://issues.scala-lang.org/browse/SI-3872 // ----------------------------------------------------------------------------------------- - /* - * can-multi-thread - */ - def firstCommonSuffix(as: List[Tracked], bs: List[Tracked]): BType = { - var chainA = as - var chainB = bs - var fcs: Tracked = null - do { - if (chainB contains chainA.head) fcs = chainA.head - else if (chainA contains chainB.head) fcs = chainB.head - else { - chainA = chainA.tail - chainB = chainB.tail - } - } while (fcs == null) - fcs.c - } - /* An `asm.ClassWriter` that uses `jvmWiseLUB()` * The internal name of the least common ancestor of the types given by inameA and inameB. * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow */ final class CClassWriter(flags: Int) extends asm.ClassWriter(flags) { - /* - * This method is thread re-entrant because chrs never grows during its operation (that's because all TypeNames being looked up have already been entered). - * To stress this point, rather than using `newTypeName()` we use `lookupTypeName()` - * - * can-multi-thread + /** + * This method is thread-safe: it depends only on the BTypes component, which does not depend + * on global. TODO @lry move to a different place where no global is in scope, on bTypes. */ override def getCommonSuperClass(inameA: String, inameB: String): String = { - val a = brefType(lookupTypeName(inameA.toCharArray)) - val b = brefType(lookupTypeName(inameB.toCharArray)) - val lca = jvmWiseLUB(a, b) - val lcaName = lca.getInternalName // don't call javaName because that side-effects innerClassBuffer. - assert(lcaName != "scala/Any") - - lcaName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things. - } - - } - - /* - * Finding the least upper bound in agreement with the bytecode verifier (given two internal names handed out by ASM) - * Background: - * http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf - * http://comments.gmane.org/gmane.comp.java.vm.languages/2293 - * https://issues.scala-lang.org/browse/SI-3872 - * - * can-multi-thread - */ - def jvmWiseLUB(a: BType, b: BType): BType = { - - assert(a.isNonSpecial, s"jvmWiseLUB() received a non-plain-class $a") - assert(b.isNonSpecial, s"jvmWiseLUB() received a non-plain-class $b") - - val ta = exemplars.get(a) - val tb = exemplars.get(b) - - val res = (ta.isInterface, tb.isInterface) match { - case (true, true) => - // exercised by test/files/run/t4761.scala - if (tb.isSubtypeOf(ta.c)) ta.c - else if (ta.isSubtypeOf(tb.c)) tb.c - else ObjectReference - case (true, false) => - if (tb.isSubtypeOf(a)) a else ObjectReference - case (false, true) => - if (ta.isSubtypeOf(b)) b else ObjectReference - case _ => - firstCommonSuffix(ta :: ta.superClasses, tb :: tb.superClasses) + val a = classBTypeFromInternalName(inameA) + val b = classBTypeFromInternalName(inameB) + val lub = a.jvmWiseLUB(b) + val lubName = lub.internalName + assert(lubName != "scala/Any") + lubName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things. } - assert(res.isNonSpecial, "jvmWiseLUB() returned a non-plain-class.") - res } /* @@ -139,7 +83,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { */ def apply(sym: Symbol, csymCompUnit: CompilationUnit): Boolean = { def fail(msg: String, pos: Position = sym.pos) = { - csymCompUnit.warning(sym.pos, + reporter.warning(sym.pos, sym.name + s" has a main method with parameter type Array[String], but ${sym.fullName('.')} will not be a runnable program.\n Reason: $msg" // TODO: make this next claim true, if possible @@ -228,7 +172,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { def fieldSymbols(cls: Symbol): List[Symbol] = { for (f <- cls.info.decls.toList ; if !f.isMethod && f.isTerm && !f.isModule - ) yield f; + ) yield f } /* @@ -238,6 +182,13 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { cd.impl.body collect { case dd: DefDef => dd.symbol } } + /* + * must-single-thread + */ + def serialVUID(csym: Symbol): Option[Long] = csym getAnnotation definitions.SerialVersionUIDAttr collect { + case AnnotationInfo(_, _, (_, LiteralAnnotArg(const)) :: Nil) => const.longValue + } + /* * Populates the InnerClasses JVM attribute with `refedInnerClasses`. * In addition to inner classes mentioned somewhere in `jclass` (where `jclass` is a class file being emitted) @@ -252,38 +203,16 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { * * can-multi-thread */ - final def addInnerClassesASM(jclass: asm.ClassVisitor, refedInnerClasses: Iterable[BType]) { - // used to detect duplicates. - val seen = mutable.Map.empty[String, String] - // result without duplicates, not yet sorted. - val result = mutable.Set.empty[InnerClassEntry] - - for(s: BType <- refedInnerClasses; - e: InnerClassEntry <- exemplars.get(s).innersChain) { - - assert(e.name != null, "saveInnerClassesFor() is broken.") // documentation - val doAdd = seen.get(e.name) match { - // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute) - case Some(prevOName) => - // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State, - // i.e. for them it must be the case that oname == java/lang/Thread - assert(prevOName == e.outerName, "duplicate") - false - case None => true - } + final def addInnerClassesASM(jclass: asm.ClassVisitor, refedInnerClasses: List[ClassBType]) { + val allNestedClasses = refedInnerClasses.flatMap(_.enclosingNestedClassesChain).distinct - if (doAdd) { - seen += (e.name -> e.outerName) - result += e - } - - } - // sorting ensures inner classes are listed after their enclosing class thus satisfying the Eclipse Java compiler - for(e <- result.toList sortBy (_.name.toString)) { - jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.access) + // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler + for (nestedClass <- allNestedClasses.sortBy(_.internalName.toString)) { + // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes. + val Some(e) = nestedClass.innerClassAttributeEntry + jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags) } - - } // end of method addInnerClassesASM() + } /* * Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only @@ -314,8 +243,8 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { * can-multi-thread */ def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = { - val dest = new Array[Byte](len); - System.arraycopy(b, offset, dest, 0, len); + val dest = new Array[Byte](len) + System.arraycopy(b, offset, dest, 0, len) new asm.CustomAttr(name, dest) } @@ -376,9 +305,9 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { def debugLevel = settings.debuginfo.indexOfChoice - val emitSource = debugLevel >= 1 - val emitLines = debugLevel >= 2 - val emitVars = debugLevel >= 3 + final val emitSource = debugLevel >= 1 + final val emitLines = debugLevel >= 2 + final val emitVars = debugLevel >= 3 /* * Contains class-symbols that: @@ -387,203 +316,160 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { * * In other words, the lifetime of `innerClassBufferASM` is associated to "the class being generated". */ - val innerClassBufferASM = mutable.Set.empty[BType] + final val innerClassBufferASM = mutable.Set.empty[ClassBType] - /* - * Tracks (if needed) the inner class given by `sym`. - * - * must-single-thread + /** + * The class internal name for a given class symbol. If the symbol describes a nested class, the + * ClassBType is added to the innerClassBufferASM. */ - final def internalName(sym: Symbol): String = { asmClassType(sym).getInternalName } + final def internalName(sym: Symbol): String = { + // For each java class, the scala compiler creates a class and a module (thus a module class). + // If the `sym` is a java module class, we use the java class instead. This ensures that we + // register the class (instead of the module class) in innerClassBufferASM. + // The two symbols have the same name, so the resulting internalName is the same. + val classSym = if (sym.isJavaDefined && sym.isModuleClass) sym.linkedClassOfClass else sym + getClassBTypeAndRegisterInnerClass(classSym).internalName + } - /* - * Tracks (if needed) the inner class given by `sym`. + private def assertClassNotArray(sym: Symbol): Unit = { + assert(sym.isClass, sym) + assert(sym != definitions.ArrayClass || isCompilingArray, sym) + } + + private def assertClassNotArrayNotPrimitive(sym: Symbol): Unit = { + assertClassNotArray(sym) + assert(!primitiveTypeMap.contains(sym) || isCompilingPrimitive, sym) + } + + /** + * The ClassBType for a class symbol. If the class is nested, the ClassBType is added to the + * innerClassBufferASM. * - * must-single-thread + * The class symbol scala.Nothing is mapped to the class scala.runtime.Nothing$. Similarly, + * scala.Null is mapped to scala.runtime.Null$. This is because there exist no class files + * for the Nothing / Null. If used for example as a parameter type, we use the runtime classes + * in the classfile method signature. + * + * Note that the referenced class symbol may be an implementation class. For example when + * compiling a mixed-in method that forwards to the static method in the implementation class, + * the class descriptor of the receiver (the implementation class) is obtained by creating the + * ClassBType. */ - final def asmClassType(sym: Symbol): BType = { - assert( - hasInternalName(sym), - { - val msg0 = if (sym.isAbstractType) "An AbstractTypeSymbol (SI-7122) " else "A symbol "; - msg0 + s"has reached the bytecode emitter, for which no JVM-level internal name can be found: ${sym.fullName}" - } - ) - val phantOpt = phantomTypeMap.get(sym) - if (phantOpt.isDefined) { - return phantOpt.get - } - val tracked = exemplar(sym) - val tk = tracked.c - if (tracked.isInnerClass) { - innerClassBufferASM += tk - } + final def getClassBTypeAndRegisterInnerClass(sym: Symbol): ClassBType = { + assertClassNotArrayNotPrimitive(sym) - tk + if (sym == definitions.NothingClass) RT_NOTHING + else if (sym == definitions.NullClass) RT_NULL + else { + val r = classBTypeFromSymbol(sym) + if (r.isNestedClass) innerClassBufferASM += r + r + } } - /* - * Returns the BType for the given type. - * Tracks (if needed) the inner class given by `t`. + /** + * This method returns the BType for a type reference, for example a parameter type. * - * must-single-thread + * If the result is a ClassBType for a nested class, it is added to the innerClassBufferASM. + * + * If `t` references a class, toTypeKind ensures that the class is not an implementation class. + * See also comment on getClassBTypeAndRegisterInnerClass, which is invoked for implementation + * classes. */ final def toTypeKind(t: Type): BType = { + import definitions.ArrayClass - /* Interfaces have to be handled delicately to avoid introducing spurious errors, - * but if we treat them all as AnyRef we lose too much information. + /** + * Primitive types are represented as TypeRefs to the class symbol of, for example, scala.Int. + * The `primitiveTypeMap` maps those class symbols to the corresponding PrimitiveBType. */ - def newReference(sym0: Symbol): BType = { - assert(!primitiveTypeMap.contains(sym0), "Use primitiveTypeMap instead.") - assert(sym0 != definitions.ArrayClass, "Use arrayOf() instead.") - - if (sym0 == definitions.NullClass) return RT_NULL; - if (sym0 == definitions.NothingClass) return RT_NOTHING; - - val sym = ( - if (!sym0.isPackageClass) sym0 - else sym0.info.member(nme.PACKAGE) match { - case NoSymbol => abort(s"SI-5604: Cannot use package as value: ${sym0.fullName}") - case s => abort(s"SI-5604: found package class where package object expected: $s") - } - ) - - // Can't call .toInterface (at this phase) or we trip an assertion. - // See PackratParser#grow for a method which fails with an apparent mismatch - // between "object PackratParsers$class" and "trait PackratParsers" - if (sym.isImplClass) { - // pos/spec-List.scala is the sole failure if we don't check for NoSymbol - val traitSym = sym.owner.info.decl(tpnme.interfaceName(sym.name)) - if (traitSym != NoSymbol) { - // this tracks the inner class in innerClassBufferASM, if needed. - return asmClassType(traitSym) - } - } - - assert(hasInternalName(sym), s"Invoked for a symbol lacking JVM internal name: ${sym.fullName}") - assert(!phantomTypeMap.contains(sym), "phantom types not supposed to reach here.") - - val tracked = exemplar(sym) - val tk = tracked.c - if (tracked.isInnerClass) { - innerClassBufferASM += tk - } - - tk + def primitiveOrClassToBType(sym: Symbol): BType = { + assertClassNotArray(sym) + assert(!sym.isImplClass, sym) + primitiveTypeMap.getOrElse(sym, getClassBTypeAndRegisterInnerClass(sym)) } - def primitiveOrRefType(sym: Symbol): BType = { - assert(sym != definitions.ArrayClass, "Use primitiveOrArrayOrRefType() instead.") - - primitiveTypeMap.getOrElse(sym, newReference(sym)) - } - - def primitiveOrRefType2(sym: Symbol): BType = { - primitiveTypeMap.get(sym) match { - case Some(pt) => pt - case None => - sym match { - case definitions.NullClass => RT_NULL - case definitions.NothingClass => RT_NOTHING - case _ if sym.isClass => newReference(sym) - case _ => - assert(sym.isType, sym) // it must be compiling Array[a] - ObjectReference - } - } + /** + * When compiling Array.scala, the type parameter T is not erased and shows up in method + * signatures, e.g. `def apply(i: Int): T`. A TyperRef to T is replaced by ObjectReference. + */ + def nonClassTypeRefToBType(sym: Symbol): ClassBType = { + assert(sym.isType && isCompilingArray, sym) + ObjectReference } - import definitions.ArrayClass - - // Call to .normalize fixes #3003 (follow type aliases). Otherwise, primitiveOrArrayOrRefType() would return ObjectReference. - t.normalize match { - - case ThisType(sym) => - if (sym == ArrayClass) ObjectReference - else phantomTypeMap.getOrElse(sym, exemplar(sym).c) - - case SingleType(_, sym) => primitiveOrRefType(sym) - - case _: ConstantType => toTypeKind(t.underlying) - - case TypeRef(_, sym, args) => - if (sym == ArrayClass) arrayOf(toTypeKind(args.head)) - else primitiveOrRefType2(sym) - - case ClassInfoType(_, _, sym) => - assert(sym != ArrayClass, "ClassInfoType to ArrayClass!") - primitiveOrRefType(sym) - - // !!! Iulian says types which make no sense after erasure should not reach here, which includes the ExistentialType, AnnotatedType, RefinedType. - case ExistentialType(_, t) => toTypeKind(t) // TODO shouldn't get here but the following does: akka-actor/src/main/scala/akka/util/WildcardTree.scala - case AnnotatedType(_, w) => toTypeKind(w) // TODO test/files/jvm/annotations.scala causes an AnnotatedType to reach here. - case RefinedType(parents, _) => parents map toTypeKind reduceLeft jvmWiseLUB - - // For sure WildcardTypes shouldn't reach here either, but when debugging such situations this may come in handy. - // case WildcardType => REFERENCE(ObjectClass) - case norm => abort( - s"Unknown type: $t, $norm [${t.getClass}, ${norm.getClass}] TypeRef? ${t.isInstanceOf[TypeRef]}" - ) + t.dealiasWiden match { + case TypeRef(_, ArrayClass, List(arg)) => ArrayBType(toTypeKind(arg)) // Array type such as Array[Int] (kept by erasure) + case TypeRef(_, sym, _) if !sym.isClass => nonClassTypeRefToBType(sym) // See comment on nonClassTypeRefToBType + case TypeRef(_, sym, _) => primitiveOrClassToBType(sym) // Common reference to a type such as scala.Int or java.lang.String + case ClassInfoType(_, _, sym) => primitiveOrClassToBType(sym) // We get here, for example, for genLoadModule, which invokes toTypeKind(moduleClassSymbol.info) + + /* AnnotatedType should (probably) be eliminated by erasure. However we know it happens for + * meta-annotated annotations (@(ann @getter) val x = 0), so we don't emit a warning. + * The type in the AnnotationInfo is an AnnotatedTpe. Tested in jvm/annotations.scala. + */ + case a @ AnnotatedType(_, t) => + debuglog(s"typeKind of annotated type $a") + toTypeKind(t) + + /* ExistentialType should (probably) be eliminated by erasure. We know they get here for + * classOf constants: + * class C[T] + * class T { final val k = classOf[C[_]] } + */ + case e @ ExistentialType(_, t) => + debuglog(s"typeKind of existential type $e") + toTypeKind(t) + + /* The cases below should probably never occur. They are kept for now to avoid introducing + * new compiler crashes, but we added a warning. The compiler / library bootstrap and the + * test suite don't produce any warning. + */ + + case tp => + currentUnit.warning(tp.typeSymbol.pos, + s"an unexpected type representation reached the compiler backend while compiling $currentUnit: $tp. " + + "If possible, please file a bug on issues.scala-lang.org.") + + tp match { + case ThisType(ArrayClass) => ObjectReference // was introduced in 9b17332f11 to fix SI-999, but this code is not reached in its test, or any other test + case ThisType(sym) => getClassBTypeAndRegisterInnerClass(sym) + case SingleType(_, sym) => primitiveOrClassToBType(sym) + case ConstantType(_) => toTypeKind(t.underlying) + case RefinedType(parents, _) => parents.map(toTypeKind(_).asClassBType).reduceLeft((a, b) => a.jvmWiseLUB(b)) + } } - - } // end of method toTypeKind() + } /* * must-single-thread */ - def asmMethodType(msym: Symbol): BType = { + final def asmMethodType(msym: Symbol): MethodBType = { assert(msym.isMethod, s"not a method-symbol: $msym") val resT: BType = - if (msym.isClassConstructor || msym.isConstructor) BType.VOID_TYPE - else toTypeKind(msym.tpe.resultType); - BType.getMethodType( resT, mkArray(msym.tpe.paramTypes map toTypeKind) ) - } - - /* - * Returns all direct member inner classes of `csym`, - * thus making sure they get entries in the InnerClasses JVM attribute - * even if otherwise not mentioned in the class being built. - * - * must-single-thread - */ - final def trackMemberClasses(csym: Symbol, lateClosuresBTs: List[BType]): List[BType] = { - val lateInnerClasses = exitingErasure { - for (sym <- List(csym, csym.linkedClassOfClass); memberc <- sym.info.decls.map(innerClassSymbolFor) if memberc.isClass) - yield memberc - } - // as a precaution, do the following outside the above `exitingErasure` otherwise funny internal names might be computed. - val result = for(memberc <- lateInnerClasses) yield { - val tracked = exemplar(memberc) - val memberCTK = tracked.c - assert(tracked.isInnerClass, s"saveInnerClassesFor() says this was no inner-class after all: ${memberc.fullName}") - - memberCTK - } - - exemplar(csym).directMemberClasses = result - - result + if (msym.isClassConstructor || msym.isConstructor) UNIT + else toTypeKind(msym.tpe.resultType) + MethodBType(msym.tpe.paramTypes map toTypeKind, resT) } - /* - * Tracks (if needed) the inner class given by `t`. - * - * must-single-thread + /** + * The jvm descriptor of a type. If `t` references a nested class, its ClassBType is added to + * the innerClassBufferASM. */ - final def descriptor(t: Type): String = { toTypeKind(t).getDescriptor } + final def descriptor(t: Type): String = { toTypeKind(t).descriptor } - /* - * Tracks (if needed) the inner class given by `sym`. - * - * must-single-thread + /** + * The jvm descriptor for a symbol. If `sym` represents a nested class, its ClassBType is added + * to the innerClassBufferASM. */ - final def descriptor(sym: Symbol): String = { asmClassType(sym).getDescriptor } + final def descriptor(sym: Symbol): String = { getClassBTypeAndRegisterInnerClass(sym).descriptor } } // end of trait BCInnerClassGen trait BCAnnotGen extends BCInnerClassGen { import genASM.{ubytesToCharArray, arrEncode} + import bCodeAsmCommon.{shouldEmitAnnotation, isRuntimeVisible} /* * can-multi-thread @@ -648,17 +534,6 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { } } - /* Whether an annotation should be emitted as a Java annotation - * .initialize: if 'annot' is read from pickle, atp might be un-initialized - * - * must-single-thread - */ - private def shouldEmitAnnotation(annot: AnnotationInfo) = - annot.symbol.initialize.isJavaDefined && - annot.matches(definitions.ClassfileAnnotationClass) && - annot.args.isEmpty && - !annot.matches(definitions.DeprecatedAttr) - /* * In general, * must-single-thread @@ -678,7 +553,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { for(annot <- annotations; if shouldEmitAnnotation(annot)) { val AnnotationInfo(typ, args, assocs) = annot assert(args.isEmpty, args) - val av = cw.visitAnnotation(descriptor(typ), true) + val av = cw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot)) emitAssocs(av, assocs) } } @@ -690,7 +565,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { for(annot <- annotations; if shouldEmitAnnotation(annot)) { val AnnotationInfo(typ, args, assocs) = annot assert(args.isEmpty, args) - val av = mw.visitAnnotation(descriptor(typ), true) + val av = mw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot)) emitAssocs(av, assocs) } } @@ -702,7 +577,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { for(annot <- annotations; if shouldEmitAnnotation(annot)) { val AnnotationInfo(typ, args, assocs) = annot assert(args.isEmpty, args) - val av = fw.visitAnnotation(descriptor(typ), true) + val av = fw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot)) emitAssocs(av, assocs) } } @@ -717,7 +592,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { annot <- annots) { val AnnotationInfo(typ, args, assocs) = annot assert(args.isEmpty, args) - val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), true) + val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), isRuntimeVisible(annot)) emitAssocs(pannVisitor, assocs) } } @@ -740,13 +615,6 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { trait BCForwardersGen extends BCAnnotGen with BCJGenSigGen { - // ----------------------------------------------------------------------------------------- - // Static forwarders (related to mirror classes but also present in - // a plain class lacking companion module, for details see `isCandidateForForwarders`). - // ----------------------------------------------------------------------------------------- - - val ExcludedForwarderFlags = genASM.ExcludedForwarderFlags - /* Adds a @remote annotation, actual use unknown. * * Invoked from genMethod() and addForwarder(). @@ -782,7 +650,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { */ // TODO: evaluate the other flags we might be dropping on the floor here. // TODO: ACC_SYNTHETIC ? - val flags = PublicStatic | ( + val flags = GenBCode.PublicStatic | ( if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0 ) @@ -793,7 +661,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { val thrownExceptions: List[String] = getExceptions(throws) val jReturnType = toTypeKind(methodInfo.resultType) - val mdesc = BType.getMethodType(jReturnType, mkArray(paramJavaTypes)).getDescriptor + val mdesc = MethodBType(paramJavaTypes, jReturnType).descriptor val mirrorMethodName = m.javaSimpleName.toString val mirrorMethod: asm.MethodVisitor = jclass.visitMethod( flags, @@ -812,13 +680,13 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { var index = 0 for(jparamType <- paramJavaTypes) { - mirrorMethod.visitVarInsn(jparamType.getOpcode(asm.Opcodes.ILOAD), index) - assert(jparamType.sort != BType.METHOD, jparamType) - index += jparamType.getSize + mirrorMethod.visitVarInsn(jparamType.typedOpcode(asm.Opcodes.ILOAD), index) + assert(!jparamType.isInstanceOf[MethodBType], jparamType) + index += jparamType.size } - mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, asmMethodType(m).getDescriptor) - mirrorMethod.visitInsn(jReturnType.getOpcode(asm.Opcodes.IRETURN)) + mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, asmMethodType(m).descriptor, false) + mirrorMethod.visitInsn(jReturnType.typedOpcode(asm.Opcodes.IRETURN)) mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments mirrorMethod.visitEnd() @@ -842,7 +710,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { } debuglog(s"Potentially conflicting names for forwarders: $conflictingNames") - for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, symtab.Flags.METHOD)) { + for (m <- moduleClass.info.membersBasedOnFlags(bCodeAsmCommon.ExcludedForwarderFlags, symtab.Flags.METHOD)) { if (m.isType || m.isDeferred || (m.owner eq definitions.ObjectClass) || m.isConstructor) debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'") else if (conflictingNames(m.name)) @@ -880,13 +748,6 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { // The particular value in use for `MIN_SWITCH_DENSITY` reflects a heuristic. val MIN_SWITCH_DENSITY = 0.7 - /* - * must-single-thread - */ - def serialVUID(csym: Symbol): Option[Long] = csym getAnnotation definitions.SerialVersionUIDAttr collect { - case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue - } - /* * Add public static final field serialVersionUID with value `id` * @@ -895,72 +756,18 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { def addSerialVUID(id: Long, jclass: asm.ClassVisitor) { // add static serialVersionUID field if `clasz` annotated with `@SerialVersionUID(uid: Long)` jclass.visitField( - PublicStaticFinal, + GenBCode.PublicStaticFinal, "serialVersionUID", "J", null, // no java-generic-signature new java.lang.Long(id) ).visitEnd() } - - /* - * @param owner internal name of the enclosing class of the class. - * - * @param name the name of the method that contains the class. - - * @param methodType the method that contains the class. - */ - case class EnclMethodEntry(owner: String, name: String, methodType: BType) - - /* - * @return null if the current class is not internal to a method - * - * Quoting from JVMS 4.7.7 The EnclosingMethod Attribute - * A class must have an EnclosingMethod attribute if and only if it is a local class or an anonymous class. - * A class may have no more than one EnclosingMethod attribute. - * - * must-single-thread - */ - def getEnclosingMethodAttribute(clazz: Symbol): EnclMethodEntry = { // JVMS 4.7.7 - - def newEEE(eClass: Symbol, m: Symbol) = { - EnclMethodEntry( - internalName(eClass), - m.javaSimpleName.toString, - asmMethodType(m) - ) - } - - var res: EnclMethodEntry = null - val sym = clazz.originalEnclosingMethod - if (sym.isMethod) { - debuglog(s"enclosing method for $clazz is $sym (in ${sym.enclClass})") - res = newEEE(sym.enclClass, sym) - } else if (clazz.isAnonymousClass) { - val enclClass = clazz.rawowner - assert(enclClass.isClass, enclClass) - val sym = enclClass.primaryConstructor - if (sym == NoSymbol) { - log(s"Ran out of room looking for an enclosing method for $clazz: no constructor here: $enclClass.") - } else { - debuglog(s"enclosing method for $clazz is $sym (in $enclClass)") - res = newEEE(enclClass, sym) - } - } - - res - } - } // end of trait BCClassGen - /* basic functionality for class file building of plain, mirror, and beaninfo classes. */ - abstract class JBuilder extends BCInnerClassGen { - - } // end of class JBuilder - /* functionality for building plain and mirror classes */ abstract class JCommonBuilder - extends JBuilder + extends BCInnerClassGen with BCAnnotGen with BCForwardersGen with BCPickles { } @@ -979,41 +786,38 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { * * must-single-thread */ - def genMirrorClass(modsym: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = { - assert(modsym.companionClass == NoSymbol, modsym) + def genMirrorClass(moduleClass: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = { + assert(moduleClass.isModuleClass) + assert(moduleClass.companionClass == NoSymbol, moduleClass) innerClassBufferASM.clear() this.cunit = cunit - val moduleName = internalName(modsym) // + "$" - val mirrorName = moduleName.substring(0, moduleName.length() - 1) - val flags = (asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL) + val bType = mirrorClassClassBType(moduleClass) val mirrorClass = new asm.tree.ClassNode mirrorClass.visit( classfileVersion, - flags, - mirrorName, + bType.info.flags, + bType.internalName, null /* no java-generic-signature */, - JAVA_LANG_OBJECT.getInternalName, + ObjectReference.internalName, EMPTY_STRING_ARRAY ) - if (emitSource) { - mirrorClass.visitSource("" + cunit.source, - null /* SourceDebugExtension */) - } + if (emitSource) + mirrorClass.visitSource("" + cunit.source, null /* SourceDebugExtension */) - val ssa = getAnnotPickle(mirrorName, modsym.companionSymbol) + val ssa = getAnnotPickle(bType.internalName, moduleClass.companionSymbol) mirrorClass.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign) - emitAnnotations(mirrorClass, modsym.annotations ++ ssa) + emitAnnotations(mirrorClass, moduleClass.annotations ++ ssa) - addForwarders(isRemote(modsym), mirrorClass, mirrorName, modsym) + addForwarders(isRemote(moduleClass), mirrorClass, bType.internalName, moduleClass) - innerClassBufferASM ++= trackMemberClasses(modsym, Nil /* TODO what about Late-Closure-Classes */ ) + innerClassBufferASM ++= bType.info.nestedClasses addInnerClassesASM(mirrorClass, innerClassBufferASM.toList) mirrorClass.visitEnd() - ("" + modsym.name) // this side-effect is necessary, really. + ("" + moduleClass.name) // this side-effect is necessary, really. mirrorClass } @@ -1021,7 +825,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { } // end of class JMirrorBuilder /* builder of bean info classes */ - class JBeanInfoBuilder extends JBuilder { + class JBeanInfoBuilder extends BCInnerClassGen { /* * Generate a bean info class that describes the given class. @@ -1036,10 +840,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { innerClassBufferASM.clear() - val flags = mkFlags( - javaFlags(cls), - if (isDeprecated(cls)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag - ) + val flags = javaFlags(cls) val beanInfoName = (internalName(cls) + "BeanInfo") val beanInfoClass = new asm.tree.ClassNode @@ -1085,12 +886,11 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { EMPTY_STRING_ARRAY // no throwable exceptions ) - val stringArrayJType: BType = arrayOf(JAVA_LANG_STRING) - val conJType: BType = - BType.getMethodType( - BType.VOID_TYPE, - Array(exemplar(definitions.ClassClass).c, stringArrayJType, stringArrayJType) - ) + val stringArrayJType: BType = ArrayBType(StringReference) + val conJType: BType = MethodBType( + classBTypeFromSymbol(definitions.ClassClass) :: stringArrayJType :: stringArrayJType :: Nil, + UNIT + ) def push(lst: List[String]) { var fi = 0 @@ -1099,7 +899,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { constructor.visitLdcInsn(new java.lang.Integer(fi)) if (f == null) { constructor.visitInsn(asm.Opcodes.ACONST_NULL) } else { constructor.visitLdcInsn(f) } - constructor.visitInsn(JAVA_LANG_STRING.getOpcode(asm.Opcodes.IASTORE)) + constructor.visitInsn(StringReference.typedOpcode(asm.Opcodes.IASTORE)) fi += 1 } } @@ -1108,27 +908,27 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { constructor.visitVarInsn(asm.Opcodes.ALOAD, 0) // push the class - constructor.visitLdcInsn(exemplar(cls).c) + constructor.visitLdcInsn(classBTypeFromSymbol(cls).toASMType) // push the string array of field information constructor.visitLdcInsn(new java.lang.Integer(fieldList.length)) - constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName) + constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, StringReference.internalName) push(fieldList) // push the string array of method information constructor.visitLdcInsn(new java.lang.Integer(methodList.length)) - constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName) + constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, StringReference.internalName) push(methodList) // invoke the superclass constructor, which will do the // necessary java reflection and create Method objects. - constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.getDescriptor) + constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.descriptor, false) constructor.visitInsn(asm.Opcodes.RETURN) constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments constructor.visitEnd() - innerClassBufferASM ++= trackMemberClasses(cls, Nil /* TODO what about Late-Closure-Classes */ ) + innerClassBufferASM ++= classBTypeFromSymbol(cls).info.nestedClasses addInnerClassesASM(beanInfoClass, innerClassBufferASM.toList) beanInfoClass.visitEnd() @@ -1160,11 +960,11 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { */ def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String) { // this tracks the inner class in innerClassBufferASM, if needed. - val androidCreatorType = asmClassType(AndroidCreatorClass) - val tdesc_creator = androidCreatorType.getDescriptor + val androidCreatorType = getClassBTypeAndRegisterInnerClass(AndroidCreatorClass) + val tdesc_creator = androidCreatorType.descriptor cnode.visitField( - PublicStaticFinal, + GenBCode.PublicStaticFinal, "CREATOR", tdesc_creator, null, // no java-generic-signature @@ -1182,12 +982,13 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { ) // INVOKEVIRTUAL `moduleName`.CREATOR() : android.os.Parcelable$Creator; - val bt = BType.getMethodType(androidCreatorType, Array.empty[BType]) + val bt = MethodBType(Nil, androidCreatorType) clinit.visitMethodInsn( asm.Opcodes.INVOKEVIRTUAL, moduleName, "CREATOR", - bt.getDescriptor + bt.descriptor, + false ) // PUTSTATIC `thisName`.CREATOR; @@ -1200,5 +1001,4 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { } } // end of trait JAndroidBuilder - } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeICodeCommon.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeICodeCommon.scala new file mode 100644 index 000000000000..50d20921d50c --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeICodeCommon.scala @@ -0,0 +1,25 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc.backend.jvm + +import scala.tools.nsc.Global +import PartialFunction._ + +/** + * This trait contains code shared between GenBCode and GenICode that depends on types defined in + * the compiler cake (Global). + */ +final class BCodeICodeCommon[G <: Global](val global: G) { + import global._ + + /** Some useful equality helpers. */ + def isNull(t: Tree) = cond(t) { case Literal(Constant(null)) => true } + def isLiteral(t: Tree) = cond(t) { case Literal(_) => true } + def isNonNullExpr(t: Tree) = isLiteral(t) || ((t.symbol ne null) && t.symbol.isModule) + + /** If l or r is constant null, returns the other ; otherwise null */ + def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala index c3492b79a9e6..c3db28151b55 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala @@ -9,8 +9,7 @@ package backend.jvm import scala.tools.asm import scala.annotation.switch -import scala.collection.{ immutable, mutable } -import collection.convert.Wrappers.JListWrapper +import scala.collection.mutable /* * A high-level facade to the ASM API for bytecode generation. @@ -19,22 +18,24 @@ import collection.convert.Wrappers.JListWrapper * @version 1.0 * */ -abstract class BCodeIdiomatic extends BCodeGlue { +abstract class BCodeIdiomatic extends SubComponent { + val bTypes = new BTypesFromSymbols[global.type](global) import global._ + import bTypes._ + import coreBTypes._ val classfileVersion: Int = settings.target.value match { case "jvm-1.5" => asm.Opcodes.V1_5 case "jvm-1.6" => asm.Opcodes.V1_6 case "jvm-1.7" => asm.Opcodes.V1_7 + case "jvm-1.8" => asm.Opcodes.V1_8 } val majorVersion: Int = (classfileVersion & 0xFF) val emitStackMapFrame = (majorVersion >= 50) - def mkFlags(args: Int*) = args.foldLeft(0)(_ | _) - - val extraProc: Int = mkFlags( + val extraProc: Int = GenBCode.mkFlags( asm.ClassWriter.COMPUTE_MAXS, if (emitStackMapFrame) asm.ClassWriter.COMPUTE_FRAMES else 0 ) @@ -44,15 +45,6 @@ abstract class BCodeIdiomatic extends BCodeGlue { val CLASS_CONSTRUCTOR_NAME = "" val INSTANCE_CONSTRUCTOR_NAME = "" - val ObjectReference = brefType("java/lang/Object") - val AnyRefReference = ObjectReference - val objArrayReference = arrayOf(ObjectReference) - - val JAVA_LANG_OBJECT = ObjectReference - val JAVA_LANG_STRING = brefType("java/lang/String") - - var StringBuilderReference: BType = null - val EMPTY_STRING_ARRAY = Array.empty[String] val EMPTY_INT_ARRAY = Array.empty[Int] val EMPTY_LABEL_ARRAY = Array.empty[asm.Label] @@ -108,17 +100,6 @@ abstract class BCodeIdiomatic extends BCodeGlue { a } - /* - * The type of 1-dimensional arrays of `elem` type. - * The invoker is responsible for tracking (if needed) the inner class given by the elem BType. - * - * must-single-thread - */ - final def arrayOf(elem: BType): BType = { - assert(!(elem.isUnitType), s"The element type of an array can't be: $elem") - brefType("[" + elem.getDescriptor) - } - /* Just a namespace for utilities that encapsulate MethodVisitor idioms. * In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role, * but the methods here allow choosing when to transition from ICode to ASM types @@ -242,12 +223,12 @@ abstract class BCodeIdiomatic extends BCodeGlue { final def genStringConcat(el: BType) { val jtype = - if (el.isArray || el.hasObjectSort) JAVA_LANG_OBJECT - else el; + if (el.isArray || el.isClass) ObjectReference + else el - val bt = BType.getMethodType(StringBuilderReference, Array(jtype)) + val bt = MethodBType(List(jtype), StringBuilderReference) - invokevirtual(StringBuilderClassName, "append", bt.getDescriptor) + invokevirtual(StringBuilderClassName, "append", bt.descriptor) } /* @@ -268,8 +249,8 @@ abstract class BCodeIdiomatic extends BCodeGlue { final def emitT2T(from: BType, to: BType) { assert( - from.isNonUnitValueType && to.isNonUnitValueType, - s"Cannot emit primitive conversion from $from to $to" + from.isNonVoidPrimitiveType && to.isNonVoidPrimitiveType, + s"Cannot emit primitive conversion from $from to $to - ${global.currentUnit}" ) def pickOne(opcs: Array[Int]) { // TODO index on to.sort @@ -290,37 +271,37 @@ abstract class BCodeIdiomatic extends BCodeGlue { assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to") // We're done with BOOL already - (from.sort: @switch) match { + (from: @unchecked) match { // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" - case asm.Type.BYTE => pickOne(JCodeMethodN.fromByteT2T) - case asm.Type.SHORT => pickOne(JCodeMethodN.fromShortT2T) - case asm.Type.CHAR => pickOne(JCodeMethodN.fromCharT2T) - case asm.Type.INT => pickOne(JCodeMethodN.fromIntT2T) + case BYTE => pickOne(JCodeMethodN.fromByteT2T) + case SHORT => pickOne(JCodeMethodN.fromShortT2T) + case CHAR => pickOne(JCodeMethodN.fromCharT2T) + case INT => pickOne(JCodeMethodN.fromIntT2T) - case asm.Type.FLOAT => + case FLOAT => import asm.Opcodes.{ F2L, F2D, F2I } - (to.sort: @switch) match { - case asm.Type.LONG => emit(F2L) - case asm.Type.DOUBLE => emit(F2D) - case _ => emit(F2I); emitT2T(INT, to) + to match { + case LONG => emit(F2L) + case DOUBLE => emit(F2D) + case _ => emit(F2I); emitT2T(INT, to) } - case asm.Type.LONG => + case LONG => import asm.Opcodes.{ L2F, L2D, L2I } - (to.sort: @switch) match { - case asm.Type.FLOAT => emit(L2F) - case asm.Type.DOUBLE => emit(L2D) - case _ => emit(L2I); emitT2T(INT, to) + to match { + case FLOAT => emit(L2F) + case DOUBLE => emit(L2D) + case _ => emit(L2I); emitT2T(INT, to) } - case asm.Type.DOUBLE => + case DOUBLE => import asm.Opcodes.{ D2L, D2F, D2I } - (to.sort: @switch) match { - case asm.Type.FLOAT => emit(D2F) - case asm.Type.LONG => emit(D2L) - case _ => emit(D2I); emitT2T(INT, to) + to match { + case FLOAT => emit(D2F) + case LONG => emit(D2L) + case _ => emit(D2I); emitT2T(INT, to) } } } // end of emitT2T() @@ -372,24 +353,26 @@ abstract class BCodeIdiomatic extends BCodeGlue { // can-multi-thread final def newarray(elem: BType) { - if (elem.isRefOrArrayType || elem.isPhantomType ) { - /* phantom type at play in `Array(null)`, SI-1513. On the other hand, Array(()) has element type `scala.runtime.BoxedUnit` which hasObjectSort. */ - jmethod.visitTypeInsn(Opcodes.ANEWARRAY, elem.getInternalName) - } else { - val rand = { - // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" - (elem.sort: @switch) match { - case asm.Type.BOOLEAN => Opcodes.T_BOOLEAN - case asm.Type.BYTE => Opcodes.T_BYTE - case asm.Type.SHORT => Opcodes.T_SHORT - case asm.Type.CHAR => Opcodes.T_CHAR - case asm.Type.INT => Opcodes.T_INT - case asm.Type.LONG => Opcodes.T_LONG - case asm.Type.FLOAT => Opcodes.T_FLOAT - case asm.Type.DOUBLE => Opcodes.T_DOUBLE + elem match { + case c: RefBType => + /* phantom type at play in `Array(null)`, SI-1513. On the other hand, Array(()) has element type `scala.runtime.BoxedUnit` which isObject. */ + jmethod.visitTypeInsn(Opcodes.ANEWARRAY, c.classOrArrayType) + case _ => + assert(elem.isNonVoidPrimitiveType) + val rand = { + // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" + (elem: @unchecked) match { + case BOOL => Opcodes.T_BOOLEAN + case BYTE => Opcodes.T_BYTE + case SHORT => Opcodes.T_SHORT + case CHAR => Opcodes.T_CHAR + case INT => Opcodes.T_INT + case LONG => Opcodes.T_LONG + case FLOAT => Opcodes.T_FLOAT + case DOUBLE => Opcodes.T_DOUBLE + } } - } - jmethod.visitIntInsn(Opcodes.NEWARRAY, rand) + jmethod.visitIntInsn(Opcodes.NEWARRAY, rand) } } @@ -409,19 +392,19 @@ abstract class BCodeIdiomatic extends BCodeGlue { // can-multi-thread final def invokespecial(owner: String, name: String, desc: String) { - jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc) + jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc, false) } // can-multi-thread final def invokestatic(owner: String, name: String, desc: String) { - jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc) + jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc, false) } // can-multi-thread final def invokeinterface(owner: String, name: String, desc: String) { - jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc) + jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc, true) } // can-multi-thread final def invokevirtual(owner: String, name: String, desc: String) { - jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc) + jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc, false) } // can-multi-thread @@ -529,7 +512,7 @@ abstract class BCodeIdiomatic extends BCodeGlue { // can-multi-thread final def emitVarInsn(opc: Int, idx: Int, tk: BType) { assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc) - jmethod.visitVarInsn(tk.getOpcode(opc), idx) + jmethod.visitVarInsn(tk.typedOpcode(opc), idx) } // ---------------- array load and store ---------------- @@ -538,7 +521,7 @@ abstract class BCodeIdiomatic extends BCodeGlue { final def emitTypeBased(opcs: Array[Int], tk: BType) { assert(tk != UNIT, tk) val opc = { - if (tk.isRefOrArrayType) { opcs(0) } + if (tk.isRef) { opcs(0) } else if (tk.isIntSizedType) { (tk: @unchecked) match { case BOOL | BYTE => opcs(1) @@ -563,11 +546,11 @@ abstract class BCodeIdiomatic extends BCodeGlue { final def emitPrimitive(opcs: Array[Int], tk: BType) { val opc = { // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" - (tk.sort: @switch) match { - case asm.Type.LONG => opcs(1) - case asm.Type.FLOAT => opcs(2) - case asm.Type.DOUBLE => opcs(3) - case _ => opcs(0) + tk match { + case LONG => opcs(1) + case FLOAT => opcs(2) + case DOUBLE => opcs(3) + case _ => opcs(0) } } emit(opc) @@ -582,15 +565,14 @@ abstract class BCodeIdiomatic extends BCodeGlue { // ---------------- type checks and casts ---------------- // can-multi-thread - final def isInstance(tk: BType) { - jmethod.visitTypeInsn(Opcodes.INSTANCEOF, tk.getInternalName) + final def isInstance(tk: RefBType): Unit = { + jmethod.visitTypeInsn(Opcodes.INSTANCEOF, tk.classOrArrayType) } // can-multi-thread - final def checkCast(tk: BType) { - assert(tk.isRefOrArrayType, s"checkcast on primitive type: $tk") + final def checkCast(tk: RefBType): Unit = { // TODO ICode also requires: but that's too much, right? assert(!isBoxedType(tk), "checkcast on boxed type: " + tk) - jmethod.visitTypeInsn(Opcodes.CHECKCAST, tk.getInternalName) + jmethod.visitTypeInsn(Opcodes.CHECKCAST, tk.classOrArrayType) } } // end of class JCodeMethodN @@ -650,7 +632,7 @@ abstract class BCodeIdiomatic extends BCodeGlue { */ final def coercionTo(code: Int): BType = { import scalaPrimitives._ - (code: @scala.annotation.switch) match { + (code: @switch) match { case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR case B2S | C2S | S2S | I2S | L2S | F2S | D2S => SHORT @@ -661,21 +643,6 @@ abstract class BCodeIdiomatic extends BCodeGlue { } } - final val typeOfArrayOp: Map[Int, BType] = { - import scalaPrimitives._ - Map( - (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++ - (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++ - (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++ - (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++ - (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT)) ++ - (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++ - (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++ - (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++ - (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectReference)) : _* - ) - } - /* * Collects (in `result`) all LabelDef nodes enclosed (directly or not) by each node it visits. * diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index 360ce58ecc57..142c901c2166 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -14,6 +14,8 @@ import scala.tools.nsc.symtab._ import scala.annotation.switch import scala.tools.asm +import scala.tools.asm.util.{TraceMethodVisitor, ASMifier} +import java.io.PrintWriter /* * @@ -23,6 +25,9 @@ import scala.tools.asm */ abstract class BCodeSkelBuilder extends BCodeHelpers { import global._ + import bTypes._ + import coreBTypes._ + import bCodeAsmCommon._ /* * There's a dedicated PlainClassBuilder for each CompilationUnit, @@ -92,7 +97,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { claszSymbol = cd.symbol isCZParcelable = isAndroidParcelableClass(claszSymbol) - isCZStaticModule = isStaticModule(claszSymbol) + isCZStaticModule = isStaticModuleClass(claszSymbol) isCZRemote = isRemote(claszSymbol) thisName = internalName(claszSymbol) @@ -113,10 +118,14 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { addClassFields() - innerClassBufferASM ++= trackMemberClasses(claszSymbol, Nil) - + innerClassBufferASM ++= classBTypeFromSymbol(claszSymbol).info.nestedClasses gen(cd.impl) + addInnerClassesASM(cnode, innerClassBufferASM.toList) + + if (AsmUtils.traceClassEnabled && cnode.name.contains(AsmUtils.traceClassPattern)) + AsmUtils.traceClass(cnode) + cnode.innerClasses assert(cd.symbol == claszSymbol, "Someone messed up BCodePhase.claszSymbol during genPlainClass().") } // end of method genPlainClass() @@ -127,40 +136,28 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { private def initJClass(jclass: asm.ClassVisitor) { val ps = claszSymbol.info.parents - val superClass: String = if (ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else internalName(ps.head.typeSymbol); - val ifaces: Array[String] = { - val arrIfacesTr: Array[Tracked] = exemplar(claszSymbol).ifaces - val arrIfaces = new Array[String](arrIfacesTr.length) - var i = 0 - while (i < arrIfacesTr.length) { - val ifaceTr = arrIfacesTr(i) - val bt = ifaceTr.c - if (ifaceTr.isInnerClass) { innerClassBufferASM += bt } - arrIfaces(i) = bt.getInternalName - i += 1 - } - arrIfaces + val superClass: String = if (ps.isEmpty) ObjectReference.internalName else internalName(ps.head.typeSymbol) + val interfaceNames = classBTypeFromSymbol(claszSymbol).info.interfaces map { + case classBType => + if (classBType.isNestedClass) { innerClassBufferASM += classBType } + classBType.internalName } - // `internalName()` tracks inner classes. - val flags = mkFlags( - javaFlags(claszSymbol), - if (isDeprecated(claszSymbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag - ) + val flags = javaFlags(claszSymbol) val thisSignature = getGenericSignature(claszSymbol, claszSymbol.owner) cnode.visit(classfileVersion, flags, thisName, thisSignature, - superClass, ifaces) + superClass, interfaceNames.toArray) if (emitSource) { cnode.visitSource(cunit.source.toString, null /* SourceDebugExtension */) } - val enclM = getEnclosingMethodAttribute(claszSymbol) - if (enclM != null) { - val EnclMethodEntry(className, methodName, methodType) = enclM - cnode.visitOuterClass(className, methodName, methodType.getDescriptor) + enclosingMethodAttribute(claszSymbol, internalName, asmMethodType(_).descriptor) match { + case Some(EnclosingMethodEntry(className, methodName, methodDescriptor)) => + cnode.visitOuterClass(className, methodName, methodDescriptor) + case _ => () } val ssa = getAnnotPickle(thisName, claszSymbol) @@ -200,7 +197,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { */ private def addModuleInstanceField() { val fv = - cnode.visitField(PublicStaticFinal, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED + cnode.visitField(GenBCode.PublicStaticFinal, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED strMODULE_INSTANCE_FIELD, "L" + thisName + ";", null, // no java-generic-signature @@ -216,7 +213,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { private def fabricateStaticInit() { val clinit: asm.MethodVisitor = cnode.visitMethod( - PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED + GenBCode.PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED CLASS_CONSTRUCTOR_NAME, "()V", null, // no java-generic-signature @@ -228,7 +225,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { if (isCZStaticModule) { clinit.visitTypeInsn(asm.Opcodes.NEW, thisName) clinit.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, - thisName, INSTANCE_CONSTRUCTOR_NAME, "()V") + thisName, INSTANCE_CONSTRUCTOR_NAME, "()V", false) } if (isCZParcelable) { legacyAddCreatorCode(clinit, cnode, thisName) } clinit.visitInsn(asm.Opcodes.RETURN) @@ -247,15 +244,12 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { */ for (f <- fieldSymbols(claszSymbol)) { val javagensig = getGenericSignature(f, claszSymbol) - val flags = mkFlags( - javaFieldFlags(f), - if (isDeprecated(f)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag - ) + val flags = javaFieldFlags(f) val jfield = new asm.tree.FieldNode( flags, f.javaSimpleName.toString, - symInfoTK(f).getDescriptor, + symInfoTK(f).descriptor, javagensig, null // no initial value ) @@ -352,6 +346,13 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { /* * Bookkeeping for method-local vars and method-params. + * + * TODO: use fewer slots. local variable slots are never re-used in separate blocks. + * In the following example, x and y could use the same slot. + * def foo() = { + * { val x = 1 } + * { val y = "a" } + * } */ object locals { @@ -391,8 +392,8 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { assert(nxtIdx != -1, "not a valid start index") val loc = Local(tk, sym.javaSimpleName.toString, nxtIdx, sym.isSynthetic) slots += (sym -> loc) - assert(tk.getSize > 0, "makeLocal called for a symbol whose type is Unit.") - nxtIdx += tk.getSize + assert(tk.size > 0, "makeLocal called for a symbol whose type is Unit.") + nxtIdx += tk.size loc } @@ -525,7 +526,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { if (isMethSymStaticCtor) CLASS_CONSTRUCTOR_NAME else jMethodName - val mdesc = asmMethodType(methSymbol).getDescriptor + val mdesc = asmMethodType(methSymbol).descriptor mnode = cnode.visitMethod( flags, bytecodeName, @@ -549,7 +550,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { methSymbol = dd.symbol jMethodName = methSymbol.javaSimpleName.toString - returnType = asmMethodType(dd.symbol).getReturnType + returnType = asmMethodType(dd.symbol).returnType isMethSymStaticCtor = methSymbol.isStaticConstructor resetMethodBookkeeping(dd) @@ -563,18 +564,17 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { if (params.size > MaximumJvmParameters) { // SI-7324 - cunit.error(methSymbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.") + reporter.error(methSymbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.") return } val isNative = methSymbol.hasAnnotation(definitions.NativeAttr) val isAbstractMethod = (methSymbol.isDeferred || methSymbol.owner.isInterface) - val flags = mkFlags( + val flags = GenBCode.mkFlags( javaFlags(methSymbol), if (claszSymbol.isInterface) asm.Opcodes.ACC_ABSTRACT else 0, if (methSymbol.isStrictFP) asm.Opcodes.ACC_STRICT else 0, - if (isNative) asm.Opcodes.ACC_NATIVE else 0, // native methods of objects are generated in mirror classes - if (isDeprecated(methSymbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag + if (isNative) asm.Opcodes.ACC_NATIVE else 0 // native methods of objects are generated in mirror classes ) // TODO needed? for(ann <- m.symbol.annotations) { ann.symbol.initialize } @@ -639,6 +639,10 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { // Note we don't invoke visitMax, thus there are no FrameNode among mnode.instructions. // The only non-instruction nodes to be found are LabelNode and LineNumberNode. } + + if (AsmUtils.traceMethodEnabled && mnode.name.contains(AsmUtils.traceMethodPattern)) + AsmUtils.traceMethod(mnode) + mnode = null } // end of method genDefDef() @@ -675,8 +679,8 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { val callee = methSymbol.enclClass.primaryConstructor val jname = callee.javaSimpleName.toString val jowner = internalName(callee.owner) - val jtype = asmMethodType(callee).getDescriptor - insnModB = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESPECIAL, jowner, jname, jtype) + val jtype = asmMethodType(callee).descriptor + insnModB = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESPECIAL, jowner, jname, jtype, false) } var insnParcA: asm.tree.AbstractInsnNode = null @@ -684,7 +688,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { // android creator code if (isCZParcelable) { // add a static field ("CREATOR") to this class to cache android.os.Parcelable$Creator - val andrFieldDescr = asmClassType(AndroidCreatorClass).getDescriptor + val andrFieldDescr = getClassBTypeAndRegisterInnerClass(AndroidCreatorClass).descriptor cnode.visitField( asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL, "CREATOR", @@ -696,8 +700,8 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { val callee = definitions.getMember(claszSymbol.companionModule, androidFieldName) val jowner = internalName(callee.owner) val jname = callee.javaSimpleName.toString - val jtype = asmMethodType(callee).getDescriptor - insnParcA = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESTATIC, jowner, jname, jtype) + val jtype = asmMethodType(callee).descriptor + insnParcA = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESTATIC, jowner, jname, jtype, false) // PUTSTATIC `thisName`.CREATOR; insnParcB = new asm.tree.FieldInsnNode(asm.Opcodes.PUTSTATIC, thisName, "CREATOR", andrFieldDescr) } @@ -713,7 +717,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { def emitLocalVarScope(sym: Symbol, start: asm.Label, end: asm.Label, force: Boolean = false) { val Local(tk, name, idx, isSynth) = locals(sym) if (force || !isSynth) { - mnode.visitLocalVariable(name, tk.getDescriptor, null, start, end, idx) + mnode.visitLocalVariable(name, tk.descriptor, null, start, end, idx) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala index 9ddb7a3ce832..b94208c1a5a4 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala @@ -9,9 +9,7 @@ package tools.nsc package backend package jvm -import scala.collection.{ mutable, immutable } -import scala.annotation.switch - +import scala.collection.immutable import scala.tools.asm /* @@ -22,7 +20,8 @@ import scala.tools.asm */ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { import global._ - + import bTypes._ + import coreBTypes._ /* * Functionality to lower `synchronized` and `try` expressions. @@ -184,7 +183,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { val caseHandlers: List[EHClause] = for (CaseDef(pat, _, caseBody) <- catches) yield { pat match { - case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt), caseBody) + case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt).asClassBType, caseBody) case Ident(nme.WILDCARD) => NamelessEH(ThrowableReference, caseBody) case Bind(_, _) => BoundEH (pat.symbol, caseBody) } @@ -250,7 +249,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { // (2.a) emit case clause proper val startHandler = currProgramPoint() var endHandler: asm.Label = null - var excType: BType = null + var excType: ClassBType = null registerCleanup(finCleanup) ch match { case NamelessEH(typeToDrop, caseBody) => @@ -269,7 +268,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { nopIfNeeded(startHandler) endHandler = currProgramPoint() emitLocalVarScope(patSymbol, startHandler, endHandler) - excType = patTK + excType = patTK.asClassBType } unregisterCleanup(finCleanup) // (2.b) mark the try-body as protected by this case clause. @@ -285,7 +284,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { * ------ */ - // a note on terminology: this is not "postHandlers", despite appearences. + // a note on terminology: this is not "postHandlers", despite appearances. // "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts. if (hasFinally) { nopIfNeeded(startTryBody) @@ -357,10 +356,10 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { } } - def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: BType) { + def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: ClassBType) { val excInternalName: String = if (excType == null) null - else excType.getInternalName + else excType.internalName assert(start != end, "protecting a range of zero instructions leads to illegal class format. Solution: add a NOP to that range.") mnode.visitTryCatchBlock(start, end, handler, excInternalName) } @@ -387,7 +386,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { def mayCleanStack(tree: Tree): Boolean = tree exists { t => t.isInstanceOf[Try] } trait EHClause - case class NamelessEH(typeToDrop: BType, caseBody: Tree) extends EHClause + case class NamelessEH(typeToDrop: ClassBType, caseBody: Tree) extends EHClause case class BoundEH (patSymbol: Symbol, caseBody: Tree) extends EHClause } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala deleted file mode 100644 index 1eca69936a30..000000000000 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala +++ /dev/null @@ -1,880 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2012 LAMP/EPFL - * @author Martin Odersky - */ - -package scala -package tools.nsc -package backend.jvm - -import scala.tools.asm -import scala.collection.{ immutable, mutable } - -/* - * Utilities to mediate between types as represented in Scala ASTs and ASM trees. - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded - * @version 1.0 - * - */ -abstract class BCodeTypes extends BCodeIdiomatic { - - import global._ - - // when compiling the Scala library, some assertions don't hold (e.g., scala.Boolean has null superClass although it's not an interface) - val isCompilingStdLib = !(settings.sourcepath.isDefault) - - val srBoxedUnit = brefType("scala/runtime/BoxedUnit") - - // special names - var StringReference : BType = null - var ThrowableReference : BType = null - var jlCloneableReference : BType = null // java/lang/Cloneable - var jlNPEReference : BType = null // java/lang/NullPointerException - var jioSerializableReference : BType = null // java/io/Serializable - var scalaSerializableReference : BType = null // scala/Serializable - var classCastExceptionReference : BType = null // java/lang/ClassCastException - - /* A map from scala primitive type-symbols to BTypes */ - var primitiveTypeMap: Map[Symbol, BType] = null - /* A map from scala type-symbols for Nothing and Null to (runtime version) BTypes */ - var phantomTypeMap: Map[Symbol, BType] = null - /* Maps the method symbol for a box method to the boxed type of the result. - * For example, the method symbol for `Byte.box()`) is mapped to the BType `Ljava/lang/Integer;`. */ - var boxResultType: Map[Symbol, BType] = null - /* Maps the method symbol for an unbox method to the primitive type of the result. - * For example, the method symbol for `Byte.unbox()`) is mapped to the BType BYTE. */ - var unboxResultType: Map[Symbol, BType] = null - - var hashMethodSym: Symbol = null // scala.runtime.ScalaRunTime.hash - - var AndroidParcelableInterface: Symbol = null - var AndroidCreatorClass : Symbol = null // this is an inner class, use asmType() to get hold of its BType while tracking in innerClassBufferASM - - var BeanInfoAttr: Symbol = null - - /* The Object => String overload. */ - var String_valueOf: Symbol = null - - var ArrayInterfaces: Set[Tracked] = null - - // scala.FunctionX and scala.runtim.AbstractFunctionX - val FunctionReference = new Array[Tracked](definitions.MaxFunctionArity + 1) - val AbstractFunctionReference = new Array[Tracked](definitions.MaxFunctionArity + 1) - val abstractFunctionArityMap = mutable.Map.empty[BType, Int] - - var PartialFunctionReference: BType = null // scala.PartialFunction - var AbstractPartialFunctionReference: BType = null // scala.runtime.AbstractPartialFunction - - var BoxesRunTime: BType = null - - /* - * must-single-thread - */ - def initBCodeTypes() { - import definitions._ - - primitiveTypeMap = - Map( - UnitClass -> UNIT, - BooleanClass -> BOOL, - CharClass -> CHAR, - ByteClass -> BYTE, - ShortClass -> SHORT, - IntClass -> INT, - LongClass -> LONG, - FloatClass -> FLOAT, - DoubleClass -> DOUBLE - ) - - phantomTypeMap = - Map( - NothingClass -> RT_NOTHING, - NullClass -> RT_NULL, - NothingClass -> RT_NOTHING, // we map on purpose to RT_NOTHING, getting rid of the distinction compile-time vs. runtime for NullClass. - NullClass -> RT_NULL // ditto. - ) - - boxResultType = - for((csym, msym) <- currentRun.runDefinitions.boxMethod) - yield (msym -> classLiteral(primitiveTypeMap(csym))) - - unboxResultType = - for((csym, msym) <- currentRun.runDefinitions.unboxMethod) - yield (msym -> primitiveTypeMap(csym)) - - // boxed classes are looked up in the `exemplars` map by jvmWiseLUB(). - // Other than that, they aren't needed there (e.g., `isSubtypeOf()` special-cases boxed classes, similarly for others). - val boxedClasses = List(BoxedBooleanClass, BoxedCharacterClass, BoxedByteClass, BoxedShortClass, BoxedIntClass, BoxedLongClass, BoxedFloatClass, BoxedDoubleClass) - for(csym <- boxedClasses) { - val key = brefType(csym.javaBinaryName.toTypeName) - val tr = buildExemplar(key, csym) - symExemplars.put(csym, tr) - exemplars.put(tr.c, tr) - } - - // reversePrimitiveMap = (primitiveTypeMap map { case (s, pt) => (s.tpe, pt) } map (_.swap)).toMap - - hashMethodSym = getMember(ScalaRunTimeModule, nme.hash_) - - // TODO avoiding going through through missingHook for every line in the REPL: https://github.com/scala/scala/commit/8d962ed4ddd310cc784121c426a2e3f56a112540 - AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable") - AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator") - - // the following couldn't be an eager vals in Phase constructors: - // that might cause cycles before Global has finished initialization. - BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo") - - String_valueOf = { - getMember(StringModule, nme.valueOf) filter (sym => - sym.info.paramTypes match { - case List(pt) => pt.typeSymbol == ObjectClass - case _ => false - } - ) - } - - exemplar(JavaCloneableClass) - exemplar(JavaSerializableClass) - exemplar(SerializableClass) - - StringReference = exemplar(StringClass).c - StringBuilderReference = exemplar(StringBuilderClass).c - ThrowableReference = exemplar(ThrowableClass).c - jlCloneableReference = exemplar(JavaCloneableClass).c - jlNPEReference = exemplar(NullPointerExceptionClass).c - jioSerializableReference = exemplar(JavaSerializableClass).c - scalaSerializableReference = exemplar(SerializableClass).c - classCastExceptionReference = exemplar(ClassCastExceptionClass).c - - /* - * The bytecode emitter special-cases String concatenation, in that three methods of `JCodeMethodN` - * ( `genStartConcat()` , `genStringConcat()` , and `genEndConcat()` ) - * don't obtain the method descriptor of the callee via `asmMethodType()` (as normally done) - * but directly emit callsites on StringBuilder using literal constant for method descriptors. - * In order to make sure those method descriptors are available as BTypes, they are initialized here. - */ - BType.getMethodType("()V") // necessary for JCodeMethodN.genStartConcat - BType.getMethodType("()Ljava/lang/String;") // necessary for JCodeMethodN.genEndConcat - - PartialFunctionReference = exemplar(PartialFunctionClass).c - for(idx <- 0 to definitions.MaxFunctionArity) { - FunctionReference(idx) = exemplar(FunctionClass(idx)) - AbstractFunctionReference(idx) = exemplar(AbstractFunctionClass(idx)) - abstractFunctionArityMap += (AbstractFunctionReference(idx).c -> idx) - AbstractPartialFunctionReference = exemplar(AbstractPartialFunctionClass).c - } - - // later a few analyses (e.g. refreshInnerClasses) will look up BTypes based on descriptors in instructions - // we make sure those BTypes can be found via lookup as opposed to creating them on the fly. - BoxesRunTime = brefType("scala/runtime/BoxesRunTime") - asmBoxTo.values foreach { mnat: MethodNameAndType => BType.getMethodType(mnat.mdesc) } - asmUnboxTo.values foreach { mnat: MethodNameAndType => BType.getMethodType(mnat.mdesc) } - - } - - /* - * must-single-thread - */ - def clearBCodeTypes() { - symExemplars.clear() - exemplars.clear() - } - - val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC - val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL - - val strMODULE_INSTANCE_FIELD = nme.MODULE_INSTANCE_FIELD.toString - - // ------------------------------------------------ - // accessory maps tracking the isInterface, innerClasses, superClass, and supportedInterfaces relations, - // allowing answering `conforms()` without resorting to typer. - // ------------------------------------------------ - - val exemplars = new java.util.concurrent.ConcurrentHashMap[BType, Tracked] - val symExemplars = new java.util.concurrent.ConcurrentHashMap[Symbol, Tracked] - - /* - * Typically, a question about a BType can be answered only by using the BType as lookup key in one or more maps. - * A `Tracked` object saves time by holding together information required to answer those questions: - * - * - `sc` denotes the bytecode-level superclass if any, null otherwise - * - * - `ifaces` denotes the interfaces explicitly declared. - * Not included are those transitively supported, but the utility method `allLeafIfaces()` can be used for that. - * - * - `innersChain` denotes the containing classes for a non-package-level class `c`, null otherwise. - * Note: the optimizer may inline anonymous closures, thus eliding those inner classes - * (no physical class file is emitted for elided classes). - * Before committing `innersChain` to bytecode, cross-check with the list of elided classes (SI-6546). - * - * All methods of this class can-multi-thread - */ - case class Tracked(c: BType, flags: Int, sc: Tracked, ifaces: Array[Tracked], innersChain: Array[InnerClassEntry]) { - - // not a case-field because we initialize it only for JVM classes we emit. - private var _directMemberClasses: List[BType] = null - - def directMemberClasses: List[BType] = { - assert(_directMemberClasses != null, s"getter directMemberClasses() invoked too early for $c") - _directMemberClasses - } - - def directMemberClasses_=(bs: List[BType]) { - if (_directMemberClasses != null) { - // TODO we enter here when both mirror class and plain class are emitted for the same ModuleClassSymbol. - assert(_directMemberClasses == bs.sortBy(_.off)) - } - _directMemberClasses = bs.sortBy(_.off) - } - - /* `isCompilingStdLib` saves the day when compiling: - * (1) scala.Nothing (the test `c.isNonSpecial` fails for it) - * (2) scala.Boolean (it has null superClass and is not an interface) - */ - assert(c.isNonSpecial || isCompilingStdLib /*(1)*/, s"non well-formed plain-type: $this") - assert( - if (sc == null) { (c == ObjectReference) || isInterface || isCompilingStdLib /*(2)*/ } - else { (c != ObjectReference) && !sc.isInterface } - , "non well-formed plain-type: " + this - ) - assert(ifaces.forall(i => i.c.isNonSpecial && i.isInterface), s"non well-formed plain-type: $this") - - import asm.Opcodes._ - def hasFlags(mask: Int) = (flags & mask) != 0 - def isInterface = hasFlags(ACC_INTERFACE) - def isFinal = hasFlags(ACC_FINAL) - def isInnerClass = { innersChain != null } - def isLambda = { - // ie isLCC || isTraditionalClosureClass - isFinal && (c.getSimpleName.contains(tpnme.ANON_FUN_NAME.toString)) && isFunctionType(c) - } - - /* can-multi-thread */ - def superClasses: List[Tracked] = { - if (sc == null) Nil else sc :: sc.superClasses - } - - /* can-multi-thread */ - def isSubtypeOf(other: BType): Boolean = { - assert(other.isNonSpecial, "so called special cases have to be handled in BCodeTypes.conforms()") - - if (c == other) return true; - - val otherIsIface = exemplars.get(other).isInterface - - if (this.isInterface) { - if (other == ObjectReference) return true; - if (!otherIsIface) return false; - } - else { - if (sc != null && sc.isSubtypeOf(other)) return true; - if (!otherIsIface) return false; - } - - var idx = 0 - while (idx < ifaces.length) { - if (ifaces(idx).isSubtypeOf(other)) return true; - idx += 1 - } - - false - } - - /* - * The `ifaces` field lists only those interfaces declared by `c` - * From the set of all supported interfaces, this method discards those which are supertypes of others in the set. - */ - def allLeafIfaces: Set[Tracked] = { - if (sc == null) { ifaces.toSet } - else { minimizeInterfaces(ifaces.toSet ++ sc.allLeafIfaces) } - } - - /* - * This type may not support in its entirety the interface given by the argument, however it may support some of its super-interfaces. - * We visualize each such supported subset of the argument's functionality as a "branch". This method returns all such branches. - * - * In other words, let Ri be a branch supported by `ib`, - * this method returns all Ri such that this <:< Ri, where each Ri is maximally deep. - */ - def supportedBranches(ib: Tracked): Set[Tracked] = { - assert(ib.isInterface, s"Non-interface argument: $ib") - - val result: Set[Tracked] = - if (this.isSubtypeOf(ib.c)) { Set(ib) } - else { ib.ifaces.toSet[Tracked].flatMap( bi => supportedBranches(bi) ) } - - checkAllInterfaces(result) - - result - } - - override def toString = { c.toString } - - } - - /* must-single-thread */ - final def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) } - - /* must-single-thread */ - final def hasInternalName(sym: Symbol) = { sym.isClass || (sym.isModule && !sym.isMethod) } - - /* must-single-thread */ - def getSuperInterfaces(csym: Symbol): List[Symbol] = { - - // Additional interface parents based on annotations and other cues - def newParentForAttr(ann: AnnotationInfo): Symbol = ann.symbol match { - case definitions.RemoteAttr => definitions.RemoteInterfaceClass - case _ => NoSymbol - } - - /* Drop redundant interfaces (which are implemented by some other parent) from the immediate parents. - * In other words, no two interfaces in the result are related by subtyping. - * This method works on Symbols, a similar one (not duplicate) works on Tracked instances. - */ - def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = { - var rest = lstIfaces - var leaves = List.empty[Symbol] - while (!rest.isEmpty) { - val candidate = rest.head - val nonLeaf = leaves exists { lsym => lsym isSubClass candidate } - if (!nonLeaf) { - leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym }) - } - rest = rest.tail - } - - leaves - } - - val superInterfaces0: List[Symbol] = csym.mixinClasses - val superInterfaces = existingSymbols(superInterfaces0 ++ csym.annotations.map(newParentForAttr)).distinct - - assert(!superInterfaces.contains(NoSymbol), s"found NoSymbol among: ${superInterfaces.mkString}") - assert(superInterfaces.forall(s => s.isInterface || s.isTrait), s"found non-interface among: ${superInterfaces.mkString}") - - minimizeInterfaces(superInterfaces) - } - - /* - * Records the superClass and supportedInterfaces relations, - * so that afterwards queries can be answered without resorting to typer. - * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that. - * On the other hand, this method does record the inner-class status of the argument, via `buildExemplar()`. - * - * must-single-thread - */ - final def exemplar(csym0: Symbol): Tracked = { - assert(csym0 != NoSymbol, "NoSymbol can't be tracked") - - val csym = { - if (csym0.isJavaDefined && csym0.isModuleClass) csym0.linkedClassOfClass - else if (csym0.isModule) csym0.moduleClass - else csym0 // we track only module-classes and plain-classes - } - - assert(!primitiveTypeMap.contains(csym) || isCompilingStdLib, s"primitive types not tracked here: ${csym.fullName}") - assert(!phantomTypeMap.contains(csym), s"phantom types not tracked here: ${csym.fullName}") - - val opt = symExemplars.get(csym) - if (opt != null) { - return opt - } - - val key = brefType(csym.javaBinaryName.toTypeName) - assert(key.isNonSpecial || isCompilingStdLib, s"Not a class to track: ${csym.fullName}") - - // TODO accomodate the fix for SI-5031 of https://github.com/scala/scala/commit/0527b2549bcada2fda2201daa630369b377d0877 - // TODO Weaken this assertion? buildExemplar() needs to be updated, too. In the meantime, pos/t5031_3 has been moved to test/disabled/pos. - val whatWasInExemplars = exemplars.get(key) - assert(whatWasInExemplars == null, "Maps `symExemplars` and `exemplars` got out of synch.") - val tr = buildExemplar(key, csym) - symExemplars.put(csym, tr) - if (csym != csym0) { symExemplars.put(csym0, tr) } - exemplars.put(tr.c, tr) // tr.c is the hash-consed, internalized, canonical representative for csym's key. - tr - } - - val EMPTY_TRACKED_ARRAY = Array.empty[Tracked] - - /* - * must-single-thread - */ - private def buildExemplar(key: BType, csym: Symbol): Tracked = { - val sc = - if (csym.isImplClass) definitions.ObjectClass - else csym.superClass - assert( - if (csym == definitions.ObjectClass) - sc == NoSymbol - else if (csym.isInterface) - sc == definitions.ObjectClass - else - ((sc != NoSymbol) && !sc.isInterface) || isCompilingStdLib, - "superClass out of order" - ) - val ifaces = getSuperInterfaces(csym) map exemplar; - val ifacesArr = - if (ifaces.isEmpty) EMPTY_TRACKED_ARRAY - else { - val arr = new Array[Tracked](ifaces.size) - ifaces.copyToArray(arr) - arr - } - - val flags = mkFlags( - javaFlags(csym), - if (isDeprecated(csym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag - ) - - val tsc = if (sc == NoSymbol) null else exemplar(sc) - - val innersChain = saveInnerClassesFor(csym, key) - - Tracked(key, flags, tsc, ifacesArr, innersChain) - } - - // ---------------- utilities around interfaces represented by Tracked instances. ---------------- - - /* Drop redundant interfaces (those which are implemented by some other). - * In other words, no two interfaces in the result are related by subtyping. - * This method works on Tracked elements, a similar one (not duplicate) works on Symbols. - */ - def minimizeInterfaces(lstIfaces: Set[Tracked]): Set[Tracked] = { - checkAllInterfaces(lstIfaces) - var rest = lstIfaces.toList - var leaves = List.empty[Tracked] - while (!rest.isEmpty) { - val candidate = rest.head - val nonLeaf = leaves exists { leaf => leaf.isSubtypeOf(candidate.c) } - if (!nonLeaf) { - leaves = candidate :: (leaves filterNot { leaf => candidate.isSubtypeOf(leaf.c) }) - } - rest = rest.tail - } - - leaves.toSet - } - - def allInterfaces(is: Iterable[Tracked]): Boolean = { is forall { i => i.isInterface } } - def nonInterfaces(is: Iterable[Tracked]): Iterable[Tracked] = { is filterNot { i => i.isInterface } } - - def checkAllInterfaces(ifaces: Iterable[Tracked]) { - assert(allInterfaces(ifaces), s"Non-interfaces: ${nonInterfaces(ifaces).mkString}") - } - - /* - * Subtype check `a <:< b` on BTypes that takes into account the JVM built-in numeric promotions (e.g. BYTE to INT). - * Its operation can be visualized more easily in terms of the Java bytecode type hierarchy. - * This method used to be called, in the ICode world, TypeKind.<:<() - * - * can-multi-thread - */ - final def conforms(a: BType, b: BType): Boolean = { - if (a.isArray) { // may be null - /* Array subtyping is covariant here, as in Java bytecode. Also necessary for Java interop. */ - if ((b == jlCloneableReference) || - (b == jioSerializableReference) || - (b == AnyRefReference)) { true } - else if (b.isArray) { conforms(a.getComponentType, b.getComponentType) } - else { false } - } - else if (a.isBoxed) { // may be null - if (b.isBoxed) { a == b } - else if (b == AnyRefReference) { true } - else if (!(b.hasObjectSort)) { false } - else { exemplars.get(a).isSubtypeOf(b) } // e.g., java/lang/Double conforms to java/lang/Number - } - else if (a.isNullType) { // known to be null - if (b.isNothingType) { false } - else if (b.isValueType) { false } - else { true } - } - else if (a.isNothingType) { // known to be Nothing - true - } - else if (a.isUnitType) { - b.isUnitType - } - else if (a.hasObjectSort) { // may be null - if (a.isNothingType) { true } - else if (b.hasObjectSort) { exemplars.get(a).isSubtypeOf(b) } - else if (b.isArray) { a.isNullType } // documentation only, because `if(a.isNullType)` (above) covers this case already. - else { false } - } - else { - - def msg = s"(a: $a, b: $b)" - - assert(a.isNonUnitValueType, s"a isn't a non-Unit value type. $msg") - assert(b.isValueType, s"b isn't a value type. $msg") - - (a eq b) || (a match { - case BOOL | BYTE | SHORT | CHAR => b == INT || b == LONG // TODO Actually, BOOL does NOT conform to LONG. Even with adapt(). - case _ => a == b - }) - } - } - - /* The maxValueType of (Char, Byte) and of (Char, Short) is Int, to encompass the negative values of Byte and Short. See ticket #2087. - * - * can-multi-thread - */ - def maxValueType(a: BType, other: BType): BType = { - assert(a.isValueType, "maxValueType() is defined only for 1st arg valuetypes (2nd arg doesn't matter).") - - def uncomparable: Nothing = { - abort(s"Uncomparable BTypes: $a with $other") - } - - if (a.isNothingType) return other; - if (other.isNothingType) return a; - if (a == other) return a; - - a match { - - case UNIT => uncomparable - case BOOL => uncomparable - - case BYTE => - if (other == CHAR) INT - else if (other.isNumericType) other - else uncomparable - - case SHORT => - other match { - case BYTE => SHORT - case CHAR => INT - case INT | LONG | FLOAT | DOUBLE => other - case _ => uncomparable - } - - case CHAR => - other match { - case BYTE | SHORT => INT - case INT | LONG | FLOAT | DOUBLE => other - case _ => uncomparable - } - - case INT => - other match { - case BYTE | SHORT | CHAR => INT - case LONG | FLOAT | DOUBLE => other - case _ => uncomparable - } - - case LONG => - if (other.isIntegralType) LONG - else if (other.isRealType) DOUBLE - else uncomparable - - case FLOAT => - if (other == DOUBLE) DOUBLE - else if (other.isNumericType) FLOAT - else uncomparable - - case DOUBLE => - if (other.isNumericType) DOUBLE - else uncomparable - - case _ => uncomparable - } - } - - /* Takes promotions of numeric primitives into account. - * - * can-multi-thread - */ - final def maxType(a: BType, other: BType): BType = { - if (a.isValueType) { maxValueType(a, other) } - else { - if (a.isNothingType) return other; - if (other.isNothingType) return a; - if (a == other) return a; - // Approximate `lub`. The common type of two references is always AnyRef. - // For 'real' least upper bound wrt to subclassing use method 'lub'. - assert(a.isArray || a.isBoxed || a.hasObjectSort, s"This is not a valuetype and it's not something else, what is it? $a") - // TODO For some reason, ICode thinks `REFERENCE(...).maxType(BOXED(whatever))` is `uncomparable`. Here, that has maxType AnyRefReference. - // BTW, when swapping arguments, ICode says BOXED(whatever).maxType(REFERENCE(...)) == AnyRefReference, so I guess the above was an oversight in REFERENCE.maxType() - if (other.isRefOrArrayType) { AnyRefReference } - else { abort(s"Uncomparable BTypes: $a with $other") } - } - } - - /* - * Whether the argument is a subtype of - * scala.PartialFunction[-A, +B] extends (A => B) - * N.B.: this method returns true for a scala.runtime.AbstractPartialFunction - * - * can-multi-thread - */ - def isPartialFunctionType(t: BType): Boolean = { - (t.hasObjectSort) && exemplars.get(t).isSubtypeOf(PartialFunctionReference) - } - - /* - * Whether the argument is a subtype of scala.FunctionX where 0 <= X <= definitions.MaxFunctionArity - * - * can-multi-thread - */ - def isFunctionType(t: BType): Boolean = { - if (!t.hasObjectSort) return false - var idx = 0 - val et: Tracked = exemplars.get(t) - while (idx <= definitions.MaxFunctionArity) { - if (et.isSubtypeOf(FunctionReference(idx).c)) { - return true - } - idx += 1 - } - false - } - - /* - * must-single-thread - */ - def isTopLevelModule(sym: Symbol): Boolean = { - exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass } - } - - /* - * must-single-thread - */ - def isStaticModule(sym: Symbol): Boolean = { - sym.isModuleClass && !sym.isImplClass && !sym.isLifted - } - - // --------------------------------------------------------------------- - // ---------------- InnerClasses attribute (JVMS 4.7.6) ---------------- - // --------------------------------------------------------------------- - - val INNER_CLASSES_FLAGS = - (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED | - asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_FINAL) - - /* - * @param name the internal name of an inner class. - * @param outerName the internal name of the class to which the inner class belongs. - * May be `null` for non-member inner classes (ie for a Java local class or a Java anonymous class). - * @param innerName the (simple) name of the inner class inside its enclosing class. It's `null` for anonymous inner classes. - * @param access the access flags of the inner class as originally declared in the enclosing class. - */ - case class InnerClassEntry(name: String, outerName: String, innerName: String, access: Int) { - assert(name != null, "Null isn't good as class name in an InnerClassEntry.") - } - - /* For given symbol return a symbol corresponding to a class that should be declared as inner class. - * - * For example: - * class A { - * class B - * object C - * } - * - * then method will return: - * NoSymbol for A, - * the same symbol for A.B (corresponding to A$B class), and - * A$C$ symbol for A.C. - * - * must-single-thread - */ - def innerClassSymbolFor(s: Symbol): Symbol = - if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol - - /* - * Computes the chain of inner-class (over the is-member-of relation) for the given argument. - * The resulting chain will be cached in `exemplars`. - * - * The chain thus cached is valid during this compiler run, see in contrast - * `innerClassBufferASM` for a cache that is valid only for the class being emitted. - * - * The argument can be any symbol, but given that this method is invoked only from `buildExemplar()`, - * in practice it has been vetted to be a class-symbol. - * - * Returns: - * - * - a non-empty array of entries for an inner-class argument. - * The array's first element is the outermost top-level class, - * the array's last element corresponds to csym. - * - * - null otherwise. - * - * This method does not add to `innerClassBufferASM`, use instead `exemplar()` for that. - * - * must-single-thread - */ - final def saveInnerClassesFor(csym: Symbol, csymTK: BType): Array[InnerClassEntry] = { - - val ics = innerClassSymbolFor(csym) - if (ics == NoSymbol) { - return null - } - assert(ics == csym, s"Disagreement between innerClassSymbolFor() and exemplar()'s tracked symbol for the same input: ${csym.fullName}") - - var chain: List[Symbol] = Nil - var x = ics - while (x ne NoSymbol) { - assert(x.isClass, s"not a class symbol: ${x.fullName}") - val isInner = !x.rawowner.isPackageClass - if (isInner) { - chain ::= x - x = innerClassSymbolFor(x.rawowner) - } else { - x = NoSymbol - } - } - - // now that we have all of `ics` , `csym` , and soon the inner-classes-chain, it's too tempting not to cache. - if (chain.isEmpty) { null } - else { - val arr = new Array[InnerClassEntry](chain.size) - (chain map toInnerClassEntry).copyToArray(arr) - - arr - } - } - - /* - * must-single-thread - */ - private def toInnerClassEntry(innerSym: Symbol): InnerClassEntry = { - - /* The outer name for this inner class. Note that it returns null - * when the inner class should not get an index in the constant pool. - * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS. - */ - def outerName(innerSym: Symbol): Name = { - if (innerSym.originalEnclosingMethod != NoSymbol) - null - else { - val outerName = innerSym.rawowner.javaBinaryName - if (isTopLevelModule(innerSym.rawowner)) nme.stripModuleSuffix(outerName) - else outerName - } - } - - def innerName(innerSym: Symbol): String = { - if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction) - null - else - innerSym.rawname + innerSym.moduleSuffix - } - - val flagsWithFinal: Int = mkFlags( - if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0, - javaFlags(innerSym), - if (isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag - ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED) - val flags = if (innerSym.isModuleClass) flagsWithFinal & ~asm.Opcodes.ACC_FINAL else flagsWithFinal // For SI-5676, object overriding. - - val jname = innerSym.javaBinaryName.toString // never null - val oname = { // null when method-enclosed - val on = outerName(innerSym) - if (on == null) null else on.toString - } - val iname = { // null for anonymous inner class - val in = innerName(innerSym) - if (in == null) null else in.toString - } - - InnerClassEntry(jname, oname, iname, flags) - } - - // -------------------------------------------- - // ---------------- Java flags ---------------- - // -------------------------------------------- - - /* - * can-multi-thread - */ - final def hasPublicBitSet(flags: Int) = ((flags & asm.Opcodes.ACC_PUBLIC) != 0) - - /* - * must-single-thread - */ - final def isRemote(s: Symbol) = (s hasAnnotation definitions.RemoteAttr) - - /* - * Return the Java modifiers for the given symbol. - * Java modifiers for classes: - * - public, abstract, final, strictfp (not used) - * for interfaces: - * - the same as for classes, without 'final' - * for fields: - * - public, private (*) - * - static, final - * for methods: - * - the same as for fields, plus: - * - abstract, synchronized (not used), strictfp (not used), native (not used) - * - * (*) protected cannot be used, since inner classes 'see' protected members, - * and they would fail verification after lifted. - * - * must-single-thread - */ - def javaFlags(sym: Symbol): Int = { - // constructors of module classes should be private - // PP: why are they only being marked private at this stage and not earlier? - val privateFlag = - sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner)) - - // Final: the only fields which can receive ACC_FINAL are eager vals. - // Neither vars nor lazy vals can, because: - // - // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3 - // "Another problem is that the specification allows aggressive - // optimization of final fields. Within a thread, it is permissible to - // reorder reads of a final field with those modifications of a final - // field that do not take place in the constructor." - // - // A var or lazy val which is marked final still has meaning to the - // scala compiler. The word final is heavily overloaded unfortunately; - // for us it means "not overridable". At present you can't override - // vars regardless; this may change. - // - // The logic does not check .isFinal (which checks flags for the FINAL flag, - // and includes symbols marked lateFINAL) instead inspecting rawflags so - // we can exclude lateFINAL. Such symbols are eligible for inlining, but to - // avoid breaking proxy software which depends on subclassing, we do not - // emit ACC_FINAL. - // Nested objects won't receive ACC_FINAL in order to allow for their overriding. - - val finalFlag = ( - (((sym.rawflags & symtab.Flags.FINAL) != 0) || isTopLevelModule(sym)) - && !sym.enclClass.isInterface - && !sym.isClassConstructor - && !sym.isMutable // lazy vals and vars both - ) - - // Primitives are "abstract final" to prohibit instantiation - // without having to provide any implementations, but that is an - // illegal combination of modifiers at the bytecode level so - // suppress final if abstract if present. - import asm.Opcodes._ - mkFlags( - if (privateFlag) ACC_PRIVATE else ACC_PUBLIC, - if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0, - if (sym.isInterface) ACC_INTERFACE else 0, - if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0, - if (sym.isStaticMember) ACC_STATIC else 0, - if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0, - if (sym.isArtifact) ACC_SYNTHETIC else 0, - if (sym.isClass && !sym.isInterface) ACC_SUPER else 0, - if (sym.hasEnumFlag) ACC_ENUM else 0, - if (sym.isVarargsMethod) ACC_VARARGS else 0, - if (sym.hasFlag(symtab.Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0 - ) - } - - /* - * must-single-thread - */ - def javaFieldFlags(sym: Symbol) = { - javaFlags(sym) | mkFlags( - if (sym hasAnnotation definitions.TransientAttr) asm.Opcodes.ACC_TRANSIENT else 0, - if (sym hasAnnotation definitions.VolatileAttr) asm.Opcodes.ACC_VOLATILE else 0, - if (sym.isMutable) 0 else asm.Opcodes.ACC_FINAL - ) - } - -} // end of class BCodeTypes diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala new file mode 100644 index 000000000000..a9bce82acd63 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -0,0 +1,917 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc +package backend.jvm + +import scala.tools.asm +import asm.Opcodes +import scala.tools.asm.tree.{InnerClassNode, ClassNode} +import opt.ByteCodeRepository +import scala.collection.convert.decorateAsScala._ + +/** + * The BTypes component defines The BType class hierarchy. BTypes encapsulate all type information + * that is required after building the ASM nodes. This includes optimizations, generation of + * InnerClass attributes and generation of stack map frames. + * + * This representation is immutable and independent of the compiler data structures, hence it can + * be queried by concurrent threads. + */ +abstract class BTypes { + import BTypes.InternalName + + // Some core BTypes are required here, in class BType, where no Global instance is available. + // The Global is only available in the subclass BTypesFromSymbols. We cannot depend on the actual + // implementation (CoreBTypesProxy) here because it has members that refer to global.Symbol. + val coreBTypes: CoreBTypesProxyGlobalIndependent[this.type] + import coreBTypes._ + + /** + * Tools for parsing classfiles, used by the inliner. + */ + val byteCodeRepository: ByteCodeRepository + + // Allows to define per-run caches here and in the CallGraph component, which don't have a global + def recordPerRunCache[T <: collection.generic.Clearable](cache: T): T + + /** + * A map from internal names to ClassBTypes. Every ClassBType is added to this map on its + * construction. + * + * This map is used when computing stack map frames. The asm.ClassWriter invokes the method + * `getCommonSuperClass`. In this method we need to obtain the ClassBType for a given internal + * name. The method assumes that every class type that appears in the bytecode exists in the map. + * + * Concurrent because stack map frames are computed when in the class writer, which might run + * on multiple classes concurrently. + */ + val classBTypeFromInternalName: collection.concurrent.Map[InternalName, ClassBType] = recordPerRunCache(collection.concurrent.TrieMap.empty[InternalName, ClassBType]) + + /** + * Parse the classfile for `internalName` and construct the [[ClassBType]]. + */ + def classBTypeFromParsedClassfile(internalName: InternalName): ClassBType = { + classBTypeFromClassNode(byteCodeRepository.classNode(internalName)) + } + + /** + * Construct the [[ClassBType]] for a parsed classfile. + */ + def classBTypeFromClassNode(classNode: ClassNode): ClassBType = { + classBTypeFromInternalName.getOrElse(classNode.name, { + setClassInfo(classNode, ClassBType(classNode.name)) + }) + } + + private def setClassInfo(classNode: ClassNode, classBType: ClassBType): ClassBType = { + val superClass = classNode.superName match { + case null => + assert(classNode.name == ObjectReference.internalName, s"class with missing super type: ${classNode.name}") + None + case superName => + Some(classBTypeFromParsedClassfile(superName)) + } + + val interfaces: List[ClassBType] = classNode.interfaces.asScala.map(classBTypeFromParsedClassfile)(collection.breakOut) + + val flags = classNode.access + + /** + * Find all nested classes of classNode. The innerClasses attribute contains all nested classes + * that are declared inside classNode or used in the bytecode of classNode. So some of them are + * nested in some other class than classNode, and we need to filter them. + * + * For member classes, innerClassNode.outerName is defined, so we compare that to classNode.name. + * + * For local and anonymous classes, innerClassNode.outerName is null. Such classes are required + * to have an EnclosingMethod attribute declaring the outer class. So we keep those local and + * anonymous classes whose outerClass is classNode.name. + * + */ + def nestedInCurrentClass(innerClassNode: InnerClassNode): Boolean = { + (innerClassNode.outerName != null && innerClassNode.outerName == classNode.name) || + (innerClassNode.outerName == null && byteCodeRepository.classNode(innerClassNode.name).outerClass == classNode.name) + } + + val nestedClasses: List[ClassBType] = classNode.innerClasses.asScala.collect({ + case i if nestedInCurrentClass(i) => classBTypeFromParsedClassfile(i.name) + })(collection.breakOut) + + // if classNode is a nested class, it has an innerClass attribute for itself. in this + // case we build the NestedInfo. + val nestedInfo = classNode.innerClasses.asScala.find(_.name == classNode.name) map { + case innerEntry => + val enclosingClass = + if (innerEntry.outerName != null) { + // if classNode is a member class, the outerName is non-null + classBTypeFromParsedClassfile(innerEntry.outerName) + } else { + // for anonymous or local classes, the outerName is null, but the enclosing class is + // stored in the EnclosingMethod attribute (which ASM encodes in classNode.outerClass). + classBTypeFromParsedClassfile(classNode.outerClass) + } + val staticFlag = (innerEntry.access & Opcodes.ACC_STATIC) != 0 + NestedInfo(enclosingClass, Option(innerEntry.outerName), Option(innerEntry.innerName), staticFlag) + } + classBType.info = ClassInfo(superClass, interfaces, flags, nestedClasses, nestedInfo) + classBType + } + + /** + * A BType is either a primitive type, a ClassBType, an ArrayBType of one of these, or a MethodType + * referring to BTypes. + */ + sealed trait BType { + final override def toString: String = this match { + case UNIT => "V" + case BOOL => "Z" + case CHAR => "C" + case BYTE => "B" + case SHORT => "S" + case INT => "I" + case FLOAT => "F" + case LONG => "J" + case DOUBLE => "D" + case ClassBType(internalName) => "L" + internalName + ";" + case ArrayBType(component) => "[" + component + case MethodBType(args, res) => "(" + args.mkString + ")" + res + } + + /** + * @return The Java descriptor of this type. Examples: + * - int: I + * - java.lang.String: Ljava/lang/String; + * - int[]: [I + * - Object m(String s, double d): (Ljava/lang/String;D)Ljava/lang/Object; + */ + final def descriptor = toString + + /** + * @return 0 for void, 2 for long and double, 1 otherwise + */ + final def size: Int = this match { + case UNIT => 0 + case LONG | DOUBLE => 2 + case _ => 1 + } + + final def isPrimitive: Boolean = this.isInstanceOf[PrimitiveBType] + final def isRef: Boolean = this.isInstanceOf[RefBType] + final def isArray: Boolean = this.isInstanceOf[ArrayBType] + final def isClass: Boolean = this.isInstanceOf[ClassBType] + final def isMethod: Boolean = this.isInstanceOf[MethodBType] + + final def isNonVoidPrimitiveType = isPrimitive && this != UNIT + + final def isNullType = this == RT_NULL + final def isNothingType = this == RT_NOTHING + + final def isBoxed = this.isClass && boxedClasses(this.asClassBType) + + final def isIntSizedType = this == BOOL || this == CHAR || this == BYTE || + this == SHORT || this == INT + final def isIntegralType = this == INT || this == BYTE || this == LONG || + this == CHAR || this == SHORT + final def isRealType = this == FLOAT || this == DOUBLE + final def isNumericType = isIntegralType || isRealType + final def isWideType = size == 2 + + /* + * Subtype check `this <:< other` on BTypes that takes into account the JVM built-in numeric + * promotions (e.g. BYTE to INT). Its operation can be visualized more easily in terms of the + * Java bytecode type hierarchy. + */ + final def conformsTo(other: BType): Boolean = { + assert(isRef || isPrimitive, s"conformsTo cannot handle $this") + assert(other.isRef || other.isPrimitive, s"conformsTo cannot handle $other") + + this match { + case ArrayBType(component) => + if (other == ObjectReference || other == jlCloneableReference || other == jioSerializableReference) true + else other match { + case ArrayBType(otherComponoent) => component.conformsTo(otherComponoent) + case _ => false + } + + case classType: ClassBType => + if (isBoxed) { + if (other.isBoxed) this == other + else if (other == ObjectReference) true + else other match { + case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType) // e.g., java/lang/Double conforms to java/lang/Number + case _ => false + } + } else if (isNullType) { + if (other.isNothingType) false + else if (other.isPrimitive) false + else true // Null conforms to all classes (except Nothing) and arrays. + } else if (isNothingType) { + true + } else other match { + case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType) + // case ArrayBType(_) => this.isNullType // documentation only, because `if (isNullType)` above covers this case + case _ => + // isNothingType || // documentation only, because `if (isNothingType)` above covers this case + false + } + + case UNIT => + other == UNIT + case BOOL | BYTE | SHORT | CHAR => + this == other || other == INT || other == LONG // TODO Actually, BOOL does NOT conform to LONG. Even with adapt(). + case _ => + assert(isPrimitive && other.isPrimitive, s"Expected primitive types $this - $other") + this == other + } + } + + /** + * Compute the upper bound of two types. + * Takes promotions of numeric primitives into account. + */ + final def maxType(other: BType): BType = this match { + case pt: PrimitiveBType => pt.maxValueType(other) + + case _: ArrayBType | _: ClassBType => + if (isNothingType) return other + if (other.isNothingType) return this + if (this == other) return this + + assert(other.isRef, s"Cannot compute maxType: $this, $other") + // Approximate `lub`. The common type of two references is always ObjectReference. + ObjectReference + + case _: MethodBType => + throw new AssertionError(s"unexpected method type when computing maxType: $this") + } + + /** + * See documentation of [[typedOpcode]]. + * The numbers are taken from asm.Type.VOID_TYPE ff., the values are those shifted by << 8. + */ + private def loadStoreOpcodeOffset: Int = this match { + case UNIT | INT => 0 + case BOOL | BYTE => 5 + case CHAR => 6 + case SHORT => 7 + case FLOAT => 2 + case LONG => 1 + case DOUBLE => 3 + case _ => 4 + } + + /** + * See documentation of [[typedOpcode]]. + * The numbers are taken from asm.Type.VOID_TYPE ff., the values are those shifted by << 16. + */ + private def typedOpcodeOffset: Int = this match { + case UNIT => 5 + case BOOL | CHAR | BYTE | SHORT | INT => 0 + case FLOAT => 2 + case LONG => 1 + case DOUBLE => 3 + case _ => 4 + } + + /** + * Some JVM opcodes have typed variants. This method returns the correct opcode according to + * the type. + * + * @param opcode A JVM instruction opcode. This opcode must be one of ILOAD, ISTORE, IALOAD, + * IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL, ISHR, IUSHR, IAND, IOR + * IXOR and IRETURN. + * @return The opcode adapted to this java type. For example, if this type is `float` and + * `opcode` is `IRETURN`, this method returns `FRETURN`. + */ + final def typedOpcode(opcode: Int): Int = { + if (opcode == Opcodes.IALOAD || opcode == Opcodes.IASTORE) + opcode + loadStoreOpcodeOffset + else + opcode + typedOpcodeOffset + } + + /** + * The asm.Type corresponding to this BType. + * + * Note about asm.Type.getObjectType (*): For class types, the method expects the internal + * name, i.e. without the surrounding 'L' and ';'. For array types on the other hand, the + * method expects a full descriptor, for example "[Ljava/lang/String;". + * + * See method asm.Type.getType that creates a asm.Type from a type descriptor + * - for an OBJECT type, the 'L' and ';' are not part of the range of the created Type + * - for an ARRAY type, the full descriptor is part of the range + */ + def toASMType: asm.Type = this match { + case UNIT => asm.Type.VOID_TYPE + case BOOL => asm.Type.BOOLEAN_TYPE + case CHAR => asm.Type.CHAR_TYPE + case BYTE => asm.Type.BYTE_TYPE + case SHORT => asm.Type.SHORT_TYPE + case INT => asm.Type.INT_TYPE + case FLOAT => asm.Type.FLOAT_TYPE + case LONG => asm.Type.LONG_TYPE + case DOUBLE => asm.Type.DOUBLE_TYPE + case ClassBType(internalName) => asm.Type.getObjectType(internalName) // see (*) above + case a: ArrayBType => asm.Type.getObjectType(a.descriptor) + case m: MethodBType => asm.Type.getMethodType(m.descriptor) + } + + def asRefBType : RefBType = this.asInstanceOf[RefBType] + def asArrayBType : ArrayBType = this.asInstanceOf[ArrayBType] + def asClassBType : ClassBType = this.asInstanceOf[ClassBType] + def asPrimitiveBType : PrimitiveBType = this.asInstanceOf[PrimitiveBType] + } + + sealed trait PrimitiveBType extends BType { + + /** + * The upper bound of two primitive types. The `other` type has to be either a primitive + * type or Nothing. + * + * The maxValueType of (Char, Byte) and of (Char, Short) is Int, to encompass the negative + * values of Byte and Short. See ticket #2087. + */ + final def maxValueType(other: BType): BType = { + + def uncomparable: Nothing = throw new AssertionError(s"Cannot compute maxValueType: $this, $other") + + if (!other.isPrimitive && !other.isNothingType) uncomparable + + if (other.isNothingType) return this + if (this == other) return this + + this match { + case BYTE => + if (other == CHAR) INT + else if (other.isNumericType) other + else uncomparable + + case SHORT => + other match { + case BYTE => SHORT + case CHAR => INT + case INT | LONG | FLOAT | DOUBLE => other + case _ => uncomparable + } + + case CHAR => + other match { + case BYTE | SHORT => INT + case INT | LONG | FLOAT | DOUBLE => other + case _ => uncomparable + } + + case INT => + other match { + case BYTE | SHORT | CHAR => INT + case LONG | FLOAT | DOUBLE => other + case _ => uncomparable + } + + case LONG => + if (other.isIntegralType) LONG + else if (other.isRealType) DOUBLE + else uncomparable + + case FLOAT => + if (other == DOUBLE) DOUBLE + else if (other.isNumericType) FLOAT + else uncomparable + + case DOUBLE => + if (other.isNumericType) DOUBLE + else uncomparable + + case UNIT | BOOL => uncomparable + } + } + } + + case object UNIT extends PrimitiveBType + case object BOOL extends PrimitiveBType + case object CHAR extends PrimitiveBType + case object BYTE extends PrimitiveBType + case object SHORT extends PrimitiveBType + case object INT extends PrimitiveBType + case object FLOAT extends PrimitiveBType + case object LONG extends PrimitiveBType + case object DOUBLE extends PrimitiveBType + + sealed trait RefBType extends BType { + /** + * The class or array type of this reference type. Used for ANEWARRAY, MULTIANEWARRAY, + * INSTANCEOF and CHECKCAST instructions. Also used for emitting invokevirtual calls to + * (a: Array[T]).clone() for any T, see genApply. + * + * In contrast to the descriptor, this string does not contain the surrounding 'L' and ';' for + * class types, for example "java/lang/String". + * However, for array types, the full descriptor is used, for example "[Ljava/lang/String;". + * + * This can be verified for example using javap or ASMifier. + */ + def classOrArrayType: String = this match { + case ClassBType(internalName) => internalName + case a: ArrayBType => a.descriptor + } + } + + /** + * InnerClass and EnclosingMethod attributes (EnclosingMethod is displayed as OUTERCLASS in asm). + * + * In this summary, "class" means "class or interface". + * + * JLS: http://docs.oracle.com/javase/specs/jls/se8/html/index.html + * JVMS: http://docs.oracle.com/javase/specs/jvms/se8/html/index.html + * + * Terminology + * ----------- + * + * - Nested class (JLS 8): class whose declaration occurs within the body of another class + * + * - Top-level class (JLS 8): non-nested class + * + * - Inner class (JLS 8.1.3): nested class that is not (explicitly or implicitly) static + * + * - Member class (JLS 8.5): class directly enclosed in the body of a class (and not, for + * example, defined in a method). Member classes cannot be anonymous. May be static. + * + * - Local class (JLS 14.3): nested, non-anonymous class that is not a member of a class + * - cannot be static (therefore they are "inner" classes) + * - can be defined in a method, a constructor or in an initializer block + * + * - Initializer block (JLS 8.6 / 8.7): block of statements in a java class + * - static initializer: executed before constructor body + * - instance initializer: executed when class is initialized (instance creation, static + * field access, ...) + * + * - A static nested class can be defined as + * - a static member class (explicitly static), or + * - a member class of an interface (implicitly static) + * - local classes are never static, even if they are defined in a static method. + * + * Note: it is NOT the case that all inner classes (non-static) have an outer pointer. Example: + * class C { static void foo { class D {} } } + * The class D is an inner class (non-static), but javac does not add an outer pointer to it. + * + * InnerClass + * ---------- + * + * The JVMS 4.7.6 requires an entry for every class mentioned in a CONSTANT_Class_info in the + * constant pool (CP) that is not a member of a package (JLS 7.1). + * + * The JLS 13.1, points 9. / 10. requires: a class must reference (in the CP) + * - its immediately enclosing class + * - all of its member classes + * - all local and anonymous classes that are referenced (or declared) elsewhere (method, + * constructor, initializer block, field initializer) + * + * In a comment, the 4.7.6 spec says: this implies an entry in the InnerClass attribute for + * - All enclosing classes (except the outermost, which is top-level) + * - My comment: not sure how this is implied, below (*) a Java counter-example. + * In any case, the Java compiler seems to add all enclosing classes, even if they are not + * otherwise mentioned in the CP. So we should do the same. + * - All nested classes (including anonymous and local, but not transitively) + * + * Fields in the InnerClass entries: + * - inner class: the (nested) class C we are talking about + * - outer class: the class of which C is a member. Has to be null for non-members, i.e. for + * local and anonymous classes. NOTE: this co-incides with the presence of an + * EnclosingMethod attribute (see below) + * - inner name: A string with the simple name of the inner class. Null for anonymous classes. + * - flags: access property flags, details in JVMS, table in 4.7.6. Static flag: see + * discussion below. + * + * + * Note 1: when a nested class is present in the InnerClass attribute, all of its enclosing + * classes have to be present as well (by the rules above). Example: + * + * class Outer { class I1 { class I2 { } } } + * class User { Outer.I1.I2 foo() { } } + * + * The return type "Outer.I1.I2" puts "Outer$I1$I2" in the CP, therefore the class is added to the + * InnerClass attribute. For this entry, the "outer class" field will be "Outer$I1". This in turn + * adds "Outer$I1" to the CP, which requires adding that class to the InnerClass attribute. + * (For local / anonymous classes this would not be the case, since the "outer class" attribute + * would be empty. However, no class (other than the enclosing class) can refer to them, as they + * have no name.) + * + * In the current implementation of the Scala compiler, when adding a class to the InnerClass + * attribute, all of its enclosing classes will be added as well. Javac seems to do the same, + * see (*). + * + * + * Note 2: If a class name is mentioned only in a CONSTANT_Utf8_info, but not in a + * CONSTANT_Class_info, the JVMS does not require an entry in the InnerClass attribute. However, + * the Java compiler seems to add such classes anyway. For example, when using an annotation, the + * annotation class is stored as a CONSTANT_Utf8_info in the CP: + * + * @O.Ann void foo() { } + * + * adds "const #13 = Asciz LO$Ann;;" in the constant pool. The "RuntimeInvisibleAnnotations" + * attribute refers to that constant pool entry. Even though there is no other reference to + * `O.Ann`, the java compiler adds an entry for that class to the InnerClass attribute (which + * entails adding a CONSTANT_Class_info for the class). + * + * + * + * EnclosingMethod + * --------------- + * + * JVMS 4.7.7: the attribute must be present "if and only if it represents a local class + * or an anonymous class" (i.e. not for member classes). + * + * The attribute is mis-named, it should be called "EnclosingClass". It has to be defined for all + * local and anonymous classes, no matter if there is an enclosing method or not. Accordingly, the + * "class" field (see below) must be always defined, while the "method" field may be null. + * + * NOTE: When a EnclosingMethod attribute is requried (local and anonymous classes), the "outer" + * field in the InnerClass table must be null. + * + * Fields: + * - class: the enclosing class + * - method: the enclosing method (or constructor). Null if the class is not enclosed by a + * method, i.e. for + * - local or anonymous classes defined in (static or non-static) initializer blocks + * - anonymous classes defined in initializer blocks or field initializers + * + * Note: the field is required for anonymous classes defined within local variable + * initializers (within a method), Java example below (**). + * + * For local and anonymous classes in initializer blocks or field initializers, and + * class-level anonymous classes, the scala compiler sets the "method" field to null. + * + * + * (*) + * public class Test { + * void foo() { + * class Foo1 { + * // constructor statement block + * { + * class Foo2 { + * class Foo3 { } + * } + * } + * } + * } + * } + * + * The class file Test$1Foo1$1Foo2$Foo3 has no reference to the class Test$1Foo1, however it + * still contains an InnerClass attribute for Test$1Foo1. + * Maybe this is just because the Java compiler follows the JVMS comment ("InnerClasses + * information for each enclosing class"). + * + * + * (**) + * void foo() { + * // anonymous class defined in local variable initializer expression. + * Runnable x = true ? (new Runnable() { + * public void run() { return; } + * }) : null; + * } + * + * The EnclosingMethod attribute of the anonymous class mentions "foo" in the "method" field. + * + * + * Java Compatibility + * ------------------ + * + * In the InnerClass entry for classes in top-level modules, the "outer class" is emitted as the + * mirror class (or the existing companion class), i.e. C1 is nested in T (not T$). + * For classes nested in a nested object, the "outer class" is the module class: C2 is nested in T$N$ + * object T { + * class C1 + * object N { class C2 } + * } + * + * Reason: java compat. It's a "best effort" "solution". If you want to use "C1" from Java, you + * can write "T.C1", and the Java compiler will translate that to the classfile T$C1. + * + * If we would emit the "outer class" of C1 as "T$", then in Java you'd need to write "T$.C1" + * because the java compiler looks at the InnerClass attribute to find if an inner class exists. + * However, the Java compiler would then translate the '.' to '$' and you'd get the class name + * "T$$C1". This class file obviously does not exist. + * + * Directly using the encoded class name "T$C1" in Java does not work: since the classfile + * describes a nested class, the Java compiler hides it from the classpath and will report + * "cannot find symbol T$C1". This means that the class T.N.C2 cannot be referenced from a + * Java source file in any way. + * + * + * STATIC flag + * ----------- + * + * Java: static member classes have the static flag in the InnerClass attribute, for example B in + * class A { static class B { } } + * + * The spec is not very clear about when the static flag should be emitted. It says: "Marked or + * implicitly static in source." + * + * The presence of the static flag does NOT coincide with the absence of an "outer" field in the + * class. The java compiler never puts the static flag for local classes, even if they don't have + * an outer pointer: + * + * class A { + * void f() { class B {} } + * static void g() { class C {} } + * } + * + * B has an outer pointer, C doesn't. Both B and C are NOT marked static in the InnerClass table. + * + * It seems sane to follow the same principle in the Scala compiler. So: + * + * package p + * object O1 { + * class C1 // static inner class + * object O2 { // static inner module + * def f = { + * class C2 { // non-static inner class, even though there's no outer pointer + * class C3 // non-static, has an outer pointer + * } + * } + * } + * } + * + * Mirror Classes + * -------------- + * + * TODO: innerclass attributes on mirror class, bean info class + */ + + /** + * A ClassBType represents a class or interface type. The necessary information to build a + * ClassBType is extracted from compiler symbols and types, see BTypesFromSymbols. + */ + final case class ClassBType(internalName: InternalName) extends RefBType { + /** + * Write-once variable allows initializing a cyclic graph of infos. This is required for + * nested classes. Example: for the definition `class A { class B }` we have + * + * B.info.nestedInfo.outerClass == A + * A.info.nestedClasses contains B + */ + private var _info: ClassInfo = null + + def info: ClassInfo = { + assert(_info != null, s"ClassBType.info not yet assigned: $this") + _info + } + + def info_=(i: ClassInfo): Unit = { + assert(_info == null, s"Cannot set ClassBType.info multiple times: $this") + _info = i + checkInfoConsistency() + } + + classBTypeFromInternalName(internalName) = this + + private def checkInfoConsistency(): Unit = { + // we assert some properties. however, some of the linked ClassBType (members, superClass, + // interfaces) may not yet have an `_info` (initialization of cyclic structures). so we do a + // best-effort verification. + def ifInit(c: ClassBType)(p: ClassBType => Boolean): Boolean = c._info == null || p(c) + + def isJLO(t: ClassBType) = t.internalName == ObjectReference.internalName + + assert(!ClassBType.isInternalPhantomType(internalName), s"Cannot create ClassBType for phantom type $this") + + assert( + if (info.superClass.isEmpty) { isJLO(this) || (isCompilingPrimitive && ClassBType.hasNoSuper(internalName)) } + else if (isInterface) isJLO(info.superClass.get) + else !isJLO(this) && ifInit(info.superClass.get)(!_.isInterface), + s"Invalid superClass in $this: ${info.superClass}" + ) + assert( + info.interfaces.forall(c => ifInit(c)(_.isInterface)), + s"Invalid interfaces in $this: ${info.interfaces}" + ) + + assert(info.nestedClasses.forall(c => ifInit(c)(_.isNestedClass)), info.nestedClasses) + } + + /** + * @return The class name without the package prefix + */ + def simpleName: String = internalName.split("/").last + + def isInterface = (info.flags & asm.Opcodes.ACC_INTERFACE) != 0 + + def superClassesTransitive: List[ClassBType] = info.superClass match { + case None => Nil + case Some(sc) => sc :: sc.superClassesTransitive + } + + def isNestedClass = info.nestedInfo.isDefined + + def enclosingNestedClassesChain: List[ClassBType] = + if (isNestedClass) this :: info.nestedInfo.get.enclosingClass.enclosingNestedClassesChain + else Nil + + def innerClassAttributeEntry: Option[InnerClassEntry] = info.nestedInfo map { + case NestedInfo(_, outerName, innerName, isStaticNestedClass) => + InnerClassEntry( + internalName, + outerName.orNull, + innerName.orNull, + GenBCode.mkFlags( + // the static flag in the InnerClass table has a special meaning, see InnerClass comment + info.flags & ~Opcodes.ACC_STATIC, + if (isStaticNestedClass) Opcodes.ACC_STATIC else 0 + ) & ClassBType.INNER_CLASSES_FLAGS + ) + } + + def isSubtypeOf(other: ClassBType): Boolean = { + if (this == other) return true + + if (isInterface) { + if (other == ObjectReference) return true // interfaces conform to Object + if (!other.isInterface) return false // this is an interface, the other is some class other than object. interfaces cannot extend classes, so the result is false. + // else: this and other are both interfaces. continue to (*) + } else { + val sc = info.superClass + if (sc.isDefined && sc.get.isSubtypeOf(other)) return true // the superclass of this class conforms to other + if (!other.isInterface) return false // this and other are both classes, and the superclass of this does not conform + // else: this is a class, the other is an interface. continue to (*) + } + + // (*) check if some interface of this class conforms to other. + info.interfaces.exists(_.isSubtypeOf(other)) + } + + /** + * Finding the least upper bound in agreement with the bytecode verifier + * Background: + * http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf + * http://comments.gmane.org/gmane.comp.java.vm.languages/2293 + * https://issues.scala-lang.org/browse/SI-3872 + */ + def jvmWiseLUB(other: ClassBType): ClassBType = { + def isNotNullOrNothing(c: ClassBType) = !c.isNullType && !c.isNothingType + assert(isNotNullOrNothing(this) && isNotNullOrNothing(other), s"jvmWiseLub for null or nothing: $this - $other") + + val res: ClassBType = (this.isInterface, other.isInterface) match { + case (true, true) => + // exercised by test/files/run/t4761.scala + if (other.isSubtypeOf(this)) this + else if (this.isSubtypeOf(other)) other + else ObjectReference + + case (true, false) => + if (other.isSubtypeOf(this)) this else ObjectReference + + case (false, true) => + if (this.isSubtypeOf(other)) other else ObjectReference + + case _ => + // TODO @lry I don't really understand the reasoning here. + // Both this and other are classes. The code takes (transitively) all superclasses and + // finds the first common one. + // MOST LIKELY the answer can be found here, see the comments and links by Miguel: + // - https://issues.scala-lang.org/browse/SI-3872 + firstCommonSuffix(this :: this.superClassesTransitive, other :: other.superClassesTransitive) + } + + assert(isNotNullOrNothing(res), s"jvmWiseLub computed: $res") + res + } + + private def firstCommonSuffix(as: List[ClassBType], bs: List[ClassBType]): ClassBType = { + var chainA = as + var chainB = bs + var fcs: ClassBType = null + do { + if (chainB contains chainA.head) fcs = chainA.head + else if (chainA contains chainB.head) fcs = chainB.head + else { + chainA = chainA.tail + chainB = chainB.tail + } + } while (fcs == null) + fcs + } + } + + object ClassBType { + /** + * Valid flags for InnerClass attribute entry. + * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6 + */ + private val INNER_CLASSES_FLAGS = { + asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED | + asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL | asm.Opcodes.ACC_INTERFACE | + asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_SYNTHETIC | asm.Opcodes.ACC_ANNOTATION | + asm.Opcodes.ACC_ENUM + } + + // Primitive classes have no super class. A ClassBType for those is only created when + // they are actually being compiled (e.g., when compiling scala/Boolean.scala). + private val hasNoSuper = Set( + "scala/Unit", + "scala/Boolean", + "scala/Char", + "scala/Byte", + "scala/Short", + "scala/Int", + "scala/Float", + "scala/Long", + "scala/Double" + ) + + private val isInternalPhantomType = Set( + "scala/Null", + "scala/Nothing" + ) + } + + /** + * The type info for a class. Used for symboltable-independent subtype checks in the backend. + * + * @param superClass The super class, not defined for class java/lang/Object. + * @param interfaces All transitively implemented interfaces, except for those inherited + * through the superclass. + * @param flags The java flags, obtained through `javaFlags`. Used also to derive + * the flags for InnerClass entries. + * @param nestedClasses Classes nested in this class. Those need to be added to the + * InnerClass table, see the InnerClass spec summary above. + * @param nestedInfo If this describes a nested class, information for the InnerClass table. + */ + final case class ClassInfo(superClass: Option[ClassBType], interfaces: List[ClassBType], flags: Int, + nestedClasses: List[ClassBType], nestedInfo: Option[NestedInfo]) + + /** + * Information required to add a class to an InnerClass table. + * The spec summary above explains what information is required for the InnerClass entry. + * + * @param enclosingClass The enclosing class, if it is also nested. When adding a class + * to the InnerClass table, enclosing nested classes are also added. + * @param outerName The outerName field in the InnerClass entry, may be None. + * @param innerName The innerName field, may be None. + * @param isStaticNestedClass True if this is a static nested class (not inner class) (*) + * + * (*) Note that the STATIC flag in ClassInfo.flags, obtained through javaFlags(classSym), is not + * correct for the InnerClass entry, see javaFlags. The static flag in the InnerClass describes + * a source-level property: if the class is in a static context (does not have an outer pointer). + * This is checked when building the NestedInfo. + */ + final case class NestedInfo(enclosingClass: ClassBType, + outerName: Option[String], + innerName: Option[String], + isStaticNestedClass: Boolean) + + /** + * This class holds the data for an entry in the InnerClass table. See the InnerClass summary + * above in this file. + * + * There's some overlap with the class NestedInfo, but it's not exactly the same and cleaner to + * keep separate. + * @param name The internal name of the class. + * @param outerName The internal name of the outer class, may be null. + * @param innerName The simple name of the inner class, may be null. + * @param flags The flags for this class in the InnerClass entry. + */ + final case class InnerClassEntry(name: String, outerName: String, innerName: String, flags: Int) + + final case class ArrayBType(componentType: BType) extends RefBType { + def dimension: Int = componentType match { + case a: ArrayBType => 1 + a.dimension + case _ => 1 + } + + def elementType: BType = componentType match { + case a: ArrayBType => a.elementType + case t => t + } + } + + final case class MethodBType(argumentTypes: List[BType], returnType: BType) extends BType + + /* Some definitions that are required for the implementation of BTypes. They are abstract because + * initializing them requires information from types / symbols, which is not accessible here in + * BTypes. + * + * They are defs (not vals) because they are implemented using vars (see comment on CoreBTypes). + */ + + /** + * Just a named pair, used in CoreBTypes.asmBoxTo/asmUnboxTo. + */ + final case class MethodNameAndType(name: String, methodType: MethodBType) + + /** + * True if the current compilation unit is of a primitive class (scala.Boolean et al). + * Used only in assertions. Abstract here because its implementation depends on global. + */ + def isCompilingPrimitive: Boolean +} + +object BTypes { + /** + * A marker for strings that represent class internal names. + * Ideally the type would be incompatible with String, for example by making it a value class. + * But that would create overhead in a Collection[InternalName]. + */ + type InternalName = String +} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala new file mode 100644 index 000000000000..94f9b585d96d --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -0,0 +1,381 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc +package backend.jvm + +import scala.tools.asm +import opt.ByteCodeRepository +import scala.tools.asm.tree.ClassNode +import scala.tools.nsc.backend.jvm.opt.ByteCodeRepository.Source +import BTypes.InternalName + +/** + * This class mainly contains the method classBTypeFromSymbol, which extracts the necessary + * information from a symbol and its type to create the corresponding ClassBType. It requires + * access to the compiler (global parameter). + * + * The mixin CoreBTypes defines core BTypes that are used in the backend. Building these BTypes + * uses classBTypeFromSymbol, hence requires access to the compiler (global). + * + * BTypesFromSymbols extends BTypes because the implementation of BTypes requires access to some + * of the core btypes. They are declared in BTypes as abstract members. Note that BTypes does + * not have access to the compiler instance. + */ +class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { + import global._ + import definitions._ + + val bCodeICodeCommon: BCodeICodeCommon[global.type] = new BCodeICodeCommon(global) + val bCodeAsmCommon: BCodeAsmCommon[global.type] = new BCodeAsmCommon(global) + import bCodeAsmCommon._ + + // Why the proxy, see documentation of class [[CoreBTypes]]. + val coreBTypes = new CoreBTypesProxy[this.type](this) + import coreBTypes._ + + val byteCodeRepository = new ByteCodeRepository(global.classPath, recordPerRunCache(collection.concurrent.TrieMap.empty[InternalName, (ClassNode, Source)])) + + final def initializeCoreBTypes(): Unit = { + coreBTypes.setBTypes(new CoreBTypes[this.type](this)) + } + + def recordPerRunCache[T <: collection.generic.Clearable](cache: T): T = perRunCaches.recordCache(cache) + + // helpers that need access to global. + // TODO @lry create a separate component, they don't belong to BTypesFromSymbols + + final val strMODULE_INSTANCE_FIELD = nme.MODULE_INSTANCE_FIELD.toString + + private val primitiveCompilationUnits = Set( + "Unit.scala", + "Boolean.scala", + "Char.scala", + "Byte.scala", + "Short.scala", + "Int.scala", + "Float.scala", + "Long.scala", + "Double.scala" + ) + + /** + * True if the current compilation unit is of a primitive class (scala.Boolean et al). + * Used only in assertions. + */ + def isCompilingPrimitive = { + primitiveCompilationUnits(currentUnit.source.file.name) + } + + def isCompilingArray = { + currentUnit.source.file.name == "Array.scala" + } + + // end helpers + + /** + * The ClassBType for a class symbol `sym`. + */ + final def classBTypeFromSymbol(classSym: Symbol): ClassBType = { + assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol") + assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym") + assert( + (!primitiveTypeMap.contains(classSym) || isCompilingPrimitive) && + (classSym != NothingClass && classSym != NullClass), + s"Cannot create ClassBType for special class symbol ${classSym.fullName}") + + val internalName = classSym.javaBinaryName.toString + classBTypeFromInternalName.getOrElse(internalName, { + // The new ClassBType is added to the map in its constructor, before we set its info. This + // allows initializing cyclic dependencies, see the comment on variable ClassBType._info. + setClassInfo(classSym, ClassBType(internalName)) + }) + } + + private def setClassInfo(classSym: Symbol, classBType: ClassBType): ClassBType = { + val superClassSym = if (classSym.isImplClass) ObjectClass else classSym.superClass + assert( + if (classSym == ObjectClass) + superClassSym == NoSymbol + else if (classSym.isInterface) + superClassSym == ObjectClass + else + // A ClassBType for a primitive class (scala.Boolean et al) is only created when compiling these classes. + ((superClassSym != NoSymbol) && !superClassSym.isInterface) || (isCompilingPrimitive && primitiveTypeMap.contains(classSym)), + s"Bad superClass for $classSym: $superClassSym" + ) + val superClass = if (superClassSym == NoSymbol) None + else Some(classBTypeFromSymbol(superClassSym)) + + val interfaces = implementedInterfaces(classSym).map(classBTypeFromSymbol) + + val flags = javaFlags(classSym) + + /* The InnerClass table of a class C must contain all nested classes of C, even if they are only + * declared but not otherwise referenced in C (from the bytecode or a method / field signature). + * We collect them here. + * + * Nested classes that are also referenced in C will be added to the innerClassBufferASM during + * code generation, but those duplicates will be eliminated when emitting the InnerClass + * attribute. + * + * Why do we need to collect classes into innerClassBufferASM at all? To collect references to + * nested classes, but NOT nested in C, that are used within C. + */ + val nestedClassSymbols = { + // The lambdalift phase lifts all nested classes to the enclosing class, so if we collect + // member classes right after lambdalift, we obtain all nested classes, including local and + // anonymous ones. + val nestedClasses = { + val nested = exitingPhase(currentRun.lambdaliftPhase)(memberClassesOf(classSym)) + if (isTopLevelModuleClass(classSym)) { + // For Java compatibility, member classes of top-level objects are treated as members of + // the top-level companion class, see comment below. + val members = exitingPickler(memberClassesOf(classSym)) + nested diff members + } else { + nested + } + } + + // If this is a top-level class, the member classes of the companion object are added as + // members of the class. For example: + // class C { } + // object C { + // class D + // def f = { class E } + // } + // The class D is added as a member of class C. The reason is: for Java compatibility, the + // InnerClass attribute for D has "C" (NOT the module class "C$") as the outer class of D + // (done by buildNestedInfo). See comment in BTypes. + // For consistency, the InnerClass entry for D needs to be present in C - to Java it looks + // like D is a member of C, not C$. + val linkedClass = exitingPickler(classSym.linkedClassOfClass) // linkedCoC does not work properly in late phases + val companionModuleMembers = { + // phase travel to exitingPickler: this makes sure that memberClassesOf only sees member classes, + // not local classes of the companion module (E in the exmaple) that were lifted by lambdalift. + if (isTopLevelModuleClass(linkedClass)) exitingPickler(memberClassesOf(linkedClass)) + else Nil + } + + nestedClasses ++ companionModuleMembers + } + + /** + * For nested java classes, the scala compiler creates both a class and a module (and therefore + * a module class) symbol. For example, in `class A { class B {} }`, the nestedClassSymbols + * for A contain both the class B and the module class B. + * Here we get rid of the module class B, making sure that the class B is present. + */ + val nestedClassSymbolsNoJavaModuleClasses = nestedClassSymbols.filter(s => { + if (s.isJavaDefined && s.isModuleClass) { + // We could also search in nestedClassSymbols for s.linkedClassOfClass, but sometimes that + // returns NoSymbol, so it doesn't work. + val nb = nestedClassSymbols.count(mc => mc.name == s.name && mc.owner == s.owner) + assert(nb == 2, s"Java member module without member class: $s - $nestedClassSymbols") + false + } else true + }) + + val nestedClasses = nestedClassSymbolsNoJavaModuleClasses.map(classBTypeFromSymbol) + + val nestedInfo = buildNestedInfo(classSym) + + classBType.info = ClassInfo(superClass, interfaces, flags, nestedClasses, nestedInfo) + classBType + } + + private def buildNestedInfo(innerClassSym: Symbol): Option[NestedInfo] = { + assert(innerClassSym.isClass, s"Cannot build NestedInfo for non-class symbol $innerClassSym") + + val isTopLevel = innerClassSym.rawowner.isPackageClass + if (isTopLevel) None + else { + // See comment in BTypes, when is a class marked static in the InnerClass table. + val isStaticNestedClass = isOriginallyStaticOwner(innerClassSym.originalOwner) + + // After lambdalift (which is where we are), the rawowoner field contains the enclosing class. + val enclosingClass = { + // (1) Example java source: class C { static class D { } } + // The Scala compiler creates a class and a module symbol for C. Because D is a static + // nested class, the symbol for D is nested in the module class C (not in the class C). + // For the InnerClass attribute, we use the class symbol C, which represents the situation + // in the source code. + + // (2) Java compatibility. See the big comment in BTypes that summarizes the InnerClass spec. + if ((innerClassSym.isJavaDefined && innerClassSym.rawowner.isModuleClass) || // (1) + (!isAnonymousOrLocalClass(innerClassSym) && isTopLevelModuleClass(innerClassSym.rawowner))) { // (2) + // phase travel for linkedCoC - does not always work in late phases + exitingPickler(innerClassSym.rawowner.linkedClassOfClass) match { + case NoSymbol => + // For top-level modules without a companion class, see doc of mirrorClassClassBType. + mirrorClassClassBType(exitingPickler(innerClassSym.rawowner)) + + case companionClass => + classBTypeFromSymbol(companionClass) + } + } else { + classBTypeFromSymbol(innerClassSym.rawowner) + } + } + + val outerName: Option[String] = { + if (isAnonymousOrLocalClass(innerClassSym)) None + else Some(enclosingClass.internalName) + } + + val innerName: Option[String] = { + if (innerClassSym.isAnonymousClass || innerClassSym.isAnonymousFunction) None + else Some(innerClassSym.rawname + innerClassSym.moduleSuffix) // moduleSuffix for module classes + } + + Some(NestedInfo(enclosingClass, outerName, innerName, isStaticNestedClass)) + } + } + + /** + * For top-level objects without a companion class, the compilere generates a mirror class with + * static forwarders (Java compat). There's no symbol for the mirror class, but we still need a + * ClassBType (its info.nestedClasses will hold the InnerClass entries, see comment in BTypes). + */ + def mirrorClassClassBType(moduleClassSym: Symbol): ClassBType = { + assert(isTopLevelModuleClass(moduleClassSym), s"not a top-level module class: $moduleClassSym") + val internalName = moduleClassSym.javaBinaryName.dropModule.toString + classBTypeFromInternalName.getOrElse(internalName, { + val c = ClassBType(internalName) + // class info consistent with BCodeHelpers.genMirrorClass + val nested = exitingPickler(memberClassesOf(moduleClassSym)) map classBTypeFromSymbol + c.info = ClassInfo( + superClass = Some(ObjectReference), + interfaces = Nil, + flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL, + nestedClasses = nested, + nestedInfo = None + ) + c + }) + } + + /** + * True for module classes of package level objects. The backend will generate a mirror class for + * such objects. + */ + final def isTopLevelModuleClass(sym: Symbol): Boolean = exitingPickler { + // phase travel to pickler required for isNestedClass (looks at owner) + val r = sym.isModuleClass && !sym.isNestedClass + // The mixin phase adds the `lateMODULE` flag to trait implementation classes. Since the flag + // is late, it should not be visible here inside the time travel. We check this. + if (r) assert(!sym.isImplClass, s"isModuleClass should be false for impl class $sym") + r + } + + /** + * True for module classes of modules that are top-level or owned only by objects. Module classes + * for such objects will get a MODULE$ flag and a corresponding static initializer. + */ + final def isStaticModuleClass(sym: Symbol): Boolean = { + /* (1) Phase travel to to pickler is required to exclude implementation classes; they have the + * lateMODULEs after mixin, so isModuleClass would be true. + * (2) isStaticModuleClass is a source-level property. See comment on isOriginallyStaticOwner. + */ + exitingPickler { // (1) + sym.isModuleClass && + isOriginallyStaticOwner(sym.originalOwner) // (2) + } + } + + // legacy, to be removed when the @remote annotation gets removed + final def isRemote(s: Symbol) = (s hasAnnotation definitions.RemoteAttr) + final def hasPublicBitSet(flags: Int) = ((flags & asm.Opcodes.ACC_PUBLIC) != 0) + + /** + * Return the Java modifiers for the given symbol. + * Java modifiers for classes: + * - public, abstract, final, strictfp (not used) + * for interfaces: + * - the same as for classes, without 'final' + * for fields: + * - public, private (*) + * - static, final + * for methods: + * - the same as for fields, plus: + * - abstract, synchronized (not used), strictfp (not used), native (not used) + * for all: + * - deprecated + * + * (*) protected cannot be used, since inner classes 'see' protected members, + * and they would fail verification after lifted. + */ + final def javaFlags(sym: Symbol): Int = { + // constructors of module classes should be private. introduced in b06edbc, probably to prevent + // creating module instances from java. for nested modules, the constructor needs to be public + // since they are created by the outer class and stored in a field. a java client can create + // new instances via outerClassInstance.new InnerModuleClass$(). + // TODO: do this early, mark the symbol private. + val privateFlag = + sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModuleClass(sym.owner)) + + // Symbols marked in source as `final` have the FINAL flag. (In the past, the flag was also + // added to modules and module classes, not anymore since 296b706). + // Note that the presence of the `FINAL` flag on a symbol does not correspond 1:1 to emitting + // ACC_FINAL in bytecode. + // + // Top-level modules are marked ACC_FINAL in bytecode (even without the FINAL flag). Nested + // objects don't get the flag to allow overriding (under -Yoverride-objects, SI-5676). + // + // For fields, only eager val fields can receive ACC_FINAL. vars or lazy vals can't: + // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3 + // "Another problem is that the specification allows aggressive + // optimization of final fields. Within a thread, it is permissible to + // reorder reads of a final field with those modifications of a final + // field that do not take place in the constructor." + // + // A var or lazy val which is marked final still has meaning to the + // scala compiler. The word final is heavily overloaded unfortunately; + // for us it means "not overridable". At present you can't override + // vars regardless; this may change. + // + // The logic does not check .isFinal (which checks flags for the FINAL flag, + // and includes symbols marked lateFINAL) instead inspecting rawflags so + // we can exclude lateFINAL. Such symbols are eligible for inlining, but to + // avoid breaking proxy software which depends on subclassing, we do not + // emit ACC_FINAL. + + val finalFlag = ( + (((sym.rawflags & symtab.Flags.FINAL) != 0) || isTopLevelModuleClass(sym)) + && !sym.enclClass.isInterface + && !sym.isClassConstructor + && !sym.isMutable // lazy vals and vars both + ) + + // Primitives are "abstract final" to prohibit instantiation + // without having to provide any implementations, but that is an + // illegal combination of modifiers at the bytecode level so + // suppress final if abstract if present. + import asm.Opcodes._ + GenBCode.mkFlags( + if (privateFlag) ACC_PRIVATE else ACC_PUBLIC, + if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0, + if (sym.isInterface) ACC_INTERFACE else 0, + if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0, + if (sym.isStaticMember) ACC_STATIC else 0, + if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0, + if (sym.isArtifact) ACC_SYNTHETIC else 0, + if (sym.isClass && !sym.isInterface) ACC_SUPER else 0, + if (sym.hasEnumFlag) ACC_ENUM else 0, + if (sym.isVarargsMethod) ACC_VARARGS else 0, + if (sym.hasFlag(symtab.Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0, + if (sym.isDeprecated) asm.Opcodes.ACC_DEPRECATED else 0 + ) + } + + def javaFieldFlags(sym: Symbol) = { + javaFlags(sym) | GenBCode.mkFlags( + if (sym hasAnnotation TransientAttr) asm.Opcodes.ACC_TRANSIENT else 0, + if (sym hasAnnotation VolatileAttr) asm.Opcodes.ACC_VOLATILE else 0, + if (sym.isMutable) 0 else asm.Opcodes.ACC_FINAL + ) + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala new file mode 100644 index 000000000000..03306f30aae2 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala @@ -0,0 +1,24 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc +package backend.jvm + +import scala.reflect.internal.util.Statistics + +object BackendStats { + import Statistics.{newTimer, newSubTimer} + val bcodeTimer = newTimer("time in backend", "jvm") + + val bcodeInitTimer = newSubTimer("bcode initialization", bcodeTimer) + val bcodeGenStat = newSubTimer("code generation", bcodeTimer) + val methodOptTimer = newSubTimer("intra-method optimizations", bcodeTimer) + val bcodeWriteTimer = newSubTimer("classfile writing", bcodeTimer) + + def timed[T](timer: Statistics.Timer)(body: => T): T = { + val start = Statistics.startTimer(timer) + try body finally Statistics.stopTimer(timer, start) + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala index 8e6c09213fce..1d29fdee1070 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala @@ -1,6 +1,6 @@ /* NSC -- new Scala compiler * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips + * @author Martin Odersky */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala new file mode 100644 index 000000000000..246235f395af --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala @@ -0,0 +1,293 @@ +package scala.tools.nsc +package backend.jvm + +import scala.annotation.switch + +/** + * Core BTypes and some other definitions. The initialization of these definitions requires access + * to symbols / types (global). + * + * The symbols used to initialize the ClassBTypes may change from one compiler run to the next. To + * make sure the definitions are consistent with the symbols in the current run, the + * `intializeCoreBTypes` method in BTypesFromSymbols creates a new instance of CoreBTypes in each + * compiler run. + * + * The class BTypesFromSymbols does not directly reference CoreBTypes, but CoreBTypesProxy. The + * reason is that having a `var bTypes: CoreBTypes` would not allow `import bTypes._`. Instead, the + * proxy class holds a `CoreBTypes` in a variable field and forwards to this instance. + * + * The definitions in `CoreBTypes` need to be lazy vals to break an initialization cycle. When + * creating a new instance to assign to the proxy, the `classBTypeFromSymbol` invoked in the + * constructor will actually go through the proxy. The lazy vals make sure the instance is assigned + * in the proxy before the fields are initialized. + * + * Note: if we did not re-create the core BTypes on each compiler run, BType.classBTypeFromInternalNameMap + * could not be a perRunCache anymore: the classes defined here need to be in that map, they are + * added when the ClassBTypes are created. The per run cache removes them, so they would be missing + * in the second run. + */ +class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) { + import bTypes._ + import global._ + import rootMirror.{requiredClass, getClassIfDefined} + import definitions._ + + /** + * Maps primitive types to their corresponding PrimitiveBType. The map is defined lexically above + * the first use of `classBTypeFromSymbol` because that method looks at the map. + */ + lazy val primitiveTypeMap: Map[Symbol, PrimitiveBType] = Map( + UnitClass -> UNIT, + BooleanClass -> BOOL, + CharClass -> CHAR, + ByteClass -> BYTE, + ShortClass -> SHORT, + IntClass -> INT, + LongClass -> LONG, + FloatClass -> FLOAT, + DoubleClass -> DOUBLE + ) + + lazy val BOXED_UNIT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Void]) + lazy val BOXED_BOOLEAN : ClassBType = classBTypeFromSymbol(BoxedBooleanClass) + lazy val BOXED_BYTE : ClassBType = classBTypeFromSymbol(BoxedByteClass) + lazy val BOXED_SHORT : ClassBType = classBTypeFromSymbol(BoxedShortClass) + lazy val BOXED_CHAR : ClassBType = classBTypeFromSymbol(BoxedCharacterClass) + lazy val BOXED_INT : ClassBType = classBTypeFromSymbol(BoxedIntClass) + lazy val BOXED_LONG : ClassBType = classBTypeFromSymbol(BoxedLongClass) + lazy val BOXED_FLOAT : ClassBType = classBTypeFromSymbol(BoxedFloatClass) + lazy val BOXED_DOUBLE : ClassBType = classBTypeFromSymbol(BoxedDoubleClass) + + /** + * Map from primitive types to their boxed class type. Useful when pushing class literals onto the + * operand stack (ldc instruction taking a class literal), see genConstant. + */ + lazy val boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = Map( + UNIT -> BOXED_UNIT, + BOOL -> BOXED_BOOLEAN, + BYTE -> BOXED_BYTE, + SHORT -> BOXED_SHORT, + CHAR -> BOXED_CHAR, + INT -> BOXED_INT, + LONG -> BOXED_LONG, + FLOAT -> BOXED_FLOAT, + DOUBLE -> BOXED_DOUBLE + ) + + lazy val boxedClasses: Set[ClassBType] = boxedClassOfPrimitive.values.toSet + + /** + * Maps the method symbol for a box method to the boxed type of the result. For example, the + * method symbol for `Byte.box()` is mapped to the ClassBType `java/lang/Byte`. + */ + lazy val boxResultType: Map[Symbol, ClassBType] = { + for ((valueClassSym, boxMethodSym) <- currentRun.runDefinitions.boxMethod) + yield boxMethodSym -> boxedClassOfPrimitive(primitiveTypeMap(valueClassSym)) + } + + /** + * Maps the method symbol for an unbox method to the primitive type of the result. + * For example, the method symbol for `Byte.unbox()`) is mapped to the PrimitiveBType BYTE. */ + lazy val unboxResultType: Map[Symbol, PrimitiveBType] = { + for ((valueClassSym, unboxMethodSym) <- currentRun.runDefinitions.unboxMethod) + yield unboxMethodSym -> primitiveTypeMap(valueClassSym) + } + + /* + * RT_NOTHING and RT_NULL exist at run-time only. They are the bytecode-level manifestation (in + * method signatures only) of what shows up as NothingClass resp. NullClass in Scala ASTs. + * + * Therefore, when RT_NOTHING or RT_NULL are to be emitted, a mapping is needed: the internal + * names of NothingClass and NullClass can't be emitted as-is. + * TODO @lry Once there's a 2.11.3 starr, use the commented argument list. The current starr crashes on the type literal `scala.runtime.Nothing$` + */ + lazy val RT_NOTHING : ClassBType = classBTypeFromSymbol(rootMirror.getRequiredClass("scala.runtime.Nothing$")) // (requiredClass[scala.runtime.Nothing$]) + lazy val RT_NULL : ClassBType = classBTypeFromSymbol(rootMirror.getRequiredClass("scala.runtime.Null$")) // (requiredClass[scala.runtime.Null$]) + + lazy val ObjectReference : ClassBType = classBTypeFromSymbol(ObjectClass) + lazy val objArrayReference : ArrayBType = ArrayBType(ObjectReference) + + lazy val StringReference : ClassBType = classBTypeFromSymbol(StringClass) + lazy val StringBuilderReference : ClassBType = classBTypeFromSymbol(StringBuilderClass) + lazy val ThrowableReference : ClassBType = classBTypeFromSymbol(ThrowableClass) + lazy val jlCloneableReference : ClassBType = classBTypeFromSymbol(JavaCloneableClass) // java/lang/Cloneable + lazy val jlNPEReference : ClassBType = classBTypeFromSymbol(NullPointerExceptionClass) // java/lang/NullPointerException + lazy val jioSerializableReference : ClassBType = classBTypeFromSymbol(JavaSerializableClass) // java/io/Serializable + lazy val scalaSerializableReference : ClassBType = classBTypeFromSymbol(SerializableClass) // scala/Serializable + lazy val classCastExceptionReference : ClassBType = classBTypeFromSymbol(ClassCastExceptionClass) // java/lang/ClassCastException + + lazy val srBooleanRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BooleanRef]) + lazy val srByteRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.ByteRef]) + lazy val srCharRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.CharRef]) + lazy val srIntRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.IntRef]) + lazy val srLongRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LongRef]) + lazy val srFloatRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.FloatRef]) + lazy val srDoubleRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.DoubleRef]) + + lazy val hashMethodSym: Symbol = getMember(ScalaRunTimeModule, nme.hash_) + + // TODO @lry avoiding going through through missingHook for every line in the REPL: https://github.com/scala/scala/commit/8d962ed4ddd310cc784121c426a2e3f56a112540 + lazy val AndroidParcelableInterface : Symbol = getClassIfDefined("android.os.Parcelable") + lazy val AndroidCreatorClass : Symbol = getClassIfDefined("android.os.Parcelable$Creator") + + lazy val BeanInfoAttr: Symbol = requiredClass[scala.beans.BeanInfo] + + /* The Object => String overload. */ + lazy val String_valueOf: Symbol = { + getMember(StringModule, nme.valueOf) filter (sym => sym.info.paramTypes match { + case List(pt) => pt.typeSymbol == ObjectClass + case _ => false + }) + } + + // scala.FunctionX and scala.runtim.AbstractFunctionX + lazy val FunctionReference : Vector[ClassBType] = (0 to MaxFunctionArity).map(i => classBTypeFromSymbol(FunctionClass(i)))(collection.breakOut) + lazy val AbstractFunctionReference : Vector[ClassBType] = (0 to MaxFunctionArity).map(i => classBTypeFromSymbol(AbstractFunctionClass(i)))(collection.breakOut) + lazy val AbstractFunctionArityMap : Map[ClassBType, Int] = AbstractFunctionReference.zipWithIndex.toMap + + lazy val PartialFunctionReference : ClassBType = classBTypeFromSymbol(PartialFunctionClass) + lazy val AbstractPartialFunctionReference : ClassBType = classBTypeFromSymbol(AbstractPartialFunctionClass) + + lazy val BoxesRunTime: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime]) + + /** + * Methods in scala.runtime.BoxesRuntime + */ + lazy val asmBoxTo : Map[BType, MethodNameAndType] = Map( + BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), BOXED_BOOLEAN)), + BYTE -> MethodNameAndType("boxToByte", MethodBType(List(BYTE), BOXED_BYTE)), + CHAR -> MethodNameAndType("boxToCharacter", MethodBType(List(CHAR), BOXED_CHAR)), + SHORT -> MethodNameAndType("boxToShort", MethodBType(List(SHORT), BOXED_SHORT)), + INT -> MethodNameAndType("boxToInteger", MethodBType(List(INT), BOXED_INT)), + LONG -> MethodNameAndType("boxToLong", MethodBType(List(LONG), BOXED_LONG)), + FLOAT -> MethodNameAndType("boxToFloat", MethodBType(List(FLOAT), BOXED_FLOAT)), + DOUBLE -> MethodNameAndType("boxToDouble", MethodBType(List(DOUBLE), BOXED_DOUBLE)) + ) + + lazy val asmUnboxTo: Map[BType, MethodNameAndType] = Map( + BOOL -> MethodNameAndType("unboxToBoolean", MethodBType(List(ObjectReference), BOOL)), + BYTE -> MethodNameAndType("unboxToByte", MethodBType(List(ObjectReference), BYTE)), + CHAR -> MethodNameAndType("unboxToChar", MethodBType(List(ObjectReference), CHAR)), + SHORT -> MethodNameAndType("unboxToShort", MethodBType(List(ObjectReference), SHORT)), + INT -> MethodNameAndType("unboxToInt", MethodBType(List(ObjectReference), INT)), + LONG -> MethodNameAndType("unboxToLong", MethodBType(List(ObjectReference), LONG)), + FLOAT -> MethodNameAndType("unboxToFloat", MethodBType(List(ObjectReference), FLOAT)), + DOUBLE -> MethodNameAndType("unboxToDouble", MethodBType(List(ObjectReference), DOUBLE)) + ) + + lazy val typeOfArrayOp: Map[Int, BType] = { + import scalaPrimitives._ + Map( + (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++ + (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++ + (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++ + (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++ + (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT)) ++ + (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++ + (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++ + (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++ + (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectReference)) : _* + ) + } +} + +/** + * This trait make some core BTypes available that don't depend on a Global instance. Some core + * BTypes are required to be accessible in the BTypes trait, which does not have access to Global. + * + * BTypes cannot refer to CoreBTypesProxy because some of its members depend on global, for example + * the type Symbol in + * def primitiveTypeMap: Map[Symbol, PrimitiveBType] + */ +trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] { + val bTypes: BTS + import bTypes._ + + def boxedClasses: Set[ClassBType] + + def RT_NOTHING : ClassBType + def RT_NULL : ClassBType + + def ObjectReference : ClassBType + def jlCloneableReference : ClassBType + def jioSerializableReference : ClassBType +} + +/** + * See comment in class [[CoreBTypes]]. + */ +final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) extends CoreBTypesProxyGlobalIndependent[BTFS] { + import bTypes._ + import global._ + + private[this] var _coreBTypes: CoreBTypes[bTypes.type] = _ + def setBTypes(coreBTypes: CoreBTypes[BTFS]): Unit = { + _coreBTypes = coreBTypes.asInstanceOf[CoreBTypes[bTypes.type]] + } + + def primitiveTypeMap: Map[Symbol, PrimitiveBType] = _coreBTypes.primitiveTypeMap + + def BOXED_UNIT : ClassBType = _coreBTypes.BOXED_UNIT + def BOXED_BOOLEAN : ClassBType = _coreBTypes.BOXED_BOOLEAN + def BOXED_BYTE : ClassBType = _coreBTypes.BOXED_BYTE + def BOXED_SHORT : ClassBType = _coreBTypes.BOXED_SHORT + def BOXED_CHAR : ClassBType = _coreBTypes.BOXED_CHAR + def BOXED_INT : ClassBType = _coreBTypes.BOXED_INT + def BOXED_LONG : ClassBType = _coreBTypes.BOXED_LONG + def BOXED_FLOAT : ClassBType = _coreBTypes.BOXED_FLOAT + def BOXED_DOUBLE : ClassBType = _coreBTypes.BOXED_DOUBLE + + def boxedClasses: Set[ClassBType] = _coreBTypes.boxedClasses + + def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = _coreBTypes.boxedClassOfPrimitive + + def boxResultType: Map[Symbol, ClassBType] = _coreBTypes.boxResultType + + def unboxResultType: Map[Symbol, PrimitiveBType] = _coreBTypes.unboxResultType + + def RT_NOTHING : ClassBType = _coreBTypes.RT_NOTHING + def RT_NULL : ClassBType = _coreBTypes.RT_NULL + + def ObjectReference : ClassBType = _coreBTypes.ObjectReference + def objArrayReference : ArrayBType = _coreBTypes.objArrayReference + + def StringReference : ClassBType = _coreBTypes.StringReference + def StringBuilderReference : ClassBType = _coreBTypes.StringBuilderReference + def ThrowableReference : ClassBType = _coreBTypes.ThrowableReference + def jlCloneableReference : ClassBType = _coreBTypes.jlCloneableReference + def jlNPEReference : ClassBType = _coreBTypes.jlNPEReference + def jioSerializableReference : ClassBType = _coreBTypes.jioSerializableReference + def scalaSerializableReference : ClassBType = _coreBTypes.scalaSerializableReference + def classCastExceptionReference : ClassBType = _coreBTypes.classCastExceptionReference + + def srBooleanRef : ClassBType = _coreBTypes.srBooleanRef + def srByteRef : ClassBType = _coreBTypes.srByteRef + def srCharRef : ClassBType = _coreBTypes.srCharRef + def srIntRef : ClassBType = _coreBTypes.srIntRef + def srLongRef : ClassBType = _coreBTypes.srLongRef + def srFloatRef : ClassBType = _coreBTypes.srFloatRef + def srDoubleRef : ClassBType = _coreBTypes.srDoubleRef + + def hashMethodSym: Symbol = _coreBTypes.hashMethodSym + + def AndroidParcelableInterface : Symbol = _coreBTypes.AndroidParcelableInterface + def AndroidCreatorClass : Symbol = _coreBTypes.AndroidCreatorClass + + def BeanInfoAttr: Symbol = _coreBTypes.BeanInfoAttr + + def String_valueOf: Symbol = _coreBTypes.String_valueOf + + def FunctionReference : Vector[ClassBType] = _coreBTypes.FunctionReference + def AbstractFunctionReference : Vector[ClassBType] = _coreBTypes.AbstractFunctionReference + def AbstractFunctionArityMap : Map[ClassBType, Int] = _coreBTypes.AbstractFunctionArityMap + + def PartialFunctionReference : ClassBType = _coreBTypes.PartialFunctionReference + def AbstractPartialFunctionReference : ClassBType = _coreBTypes.AbstractPartialFunctionReference + + def BoxesRunTime: ClassBType = _coreBTypes.BoxesRunTime + + def asmBoxTo : Map[BType, MethodNameAndType] = _coreBTypes.asmBoxTo + def asmUnboxTo: Map[BType, MethodNameAndType] = _coreBTypes.asmUnboxTo + + def typeOfArrayOp: Map[Int, BType] = _coreBTypes.typeOfArrayOp +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index a389816caf33..abe3bc512c71 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -20,12 +20,15 @@ import scala.annotation.tailrec * * Documentation at http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/2012Q2/GenASM.pdf */ -abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { self => +abstract class GenASM extends SubComponent with BytecodeWriters { self => import global._ import icodes._ import icodes.opcodes._ import definitions._ + val bCodeAsmCommon: BCodeAsmCommon[global.type] = new BCodeAsmCommon(global) + import bCodeAsmCommon._ + // Strangely I can't find this in the asm code // 255, but reserving 1 for "this" final val MaximumJvmParameters = 254 @@ -96,24 +99,83 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } } + private def isJavaEntryPoint(icls: IClass) = { + val sym = icls.symbol + def fail(msg: String, pos: Position = sym.pos) = { + reporter.warning(sym.pos, + sym.name + " has a main method with parameter type Array[String], but " + sym.fullName('.') + " will not be a runnable program.\n" + + " Reason: " + msg + // TODO: make this next claim true, if possible + // by generating valid main methods as static in module classes + // not sure what the jvm allows here + // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead." + ) + false + } + def failNoForwarder(msg: String) = { + fail(msg + ", which means no static forwarder can be generated.\n") + } + val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil + val hasApproximate = possibles exists { m => + m.info match { + case MethodType(p :: Nil, _) => p.tpe.typeSymbol == ArrayClass + case _ => false + } + } + // At this point it's a module with a main-looking method, so either succeed or warn that it isn't. + hasApproximate && { + // Before erasure so we can identify generic mains. + enteringErasure { + val companion = sym.linkedClassOfClass + + if (hasJavaMainMethod(companion)) + failNoForwarder("companion contains its own main method") + else if (companion.tpe.member(nme.main) != NoSymbol) + // this is only because forwarders aren't smart enough yet + failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)") + else if (companion.isTrait) + failNoForwarder("companion is a trait") + // Now either succeeed, or issue some additional warnings for things which look like + // attempts to be java main methods. + else (possibles exists isJavaMainMethod) || { + possibles exists { m => + m.info match { + case PolyType(_, _) => + fail("main methods cannot be generic.") + case MethodType(params, res) => + if (res.typeSymbol :: params exists (_.isAbstractType)) + fail("main methods cannot refer to type parameters or abstract types.", m.pos) + else + isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos) + case tp => + fail("don't know what this is: " + tp, m.pos) + } + } + } + } + } + } + override def run() { if (settings.debug) inform("[running phase " + name + " on icode]") - if (settings.Xdce) - for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) { + if (settings.Xdce) { + val classes = icodes.classes.keys.toList // copy to avoid mutating the map while iterating + for (sym <- classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) { log(s"Optimizer eliminated ${sym.fullNameString}") deadCode.elidedClosures += sym icodes.classes -= sym } + } // For predictably ordered error messages. var sortedClasses = classes.values.toList sortBy (_.symbol.fullName) // Warn when classes will overwrite one another on case-insensitive systems. for ((_, v1 :: v2 :: _) <- sortedClasses groupBy (_.symbol.javaClassName.toString.toLowerCase)) { - v1.cunit.warning(v1.symbol.pos, + reporter.warning(v1.symbol.pos, s"Class ${v1.symbol.javaClassName} differs only in case from ${v2.symbol.javaClassName}. " + "Such classes will overwrite one another on case-insensitive filesystems.") } @@ -141,7 +203,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { try emitFor(c) catch { case e: FileConflictException => - c.cunit.error(c.symbol.pos, s"error writing ${c.symbol}: ${e.getMessage}") + reporter.error(c.symbol.pos, s"error writing ${c.symbol}: ${e.getMessage}") } sortedClasses = sortedClasses.tail classes -= c.symbol // GC opportunity @@ -381,6 +443,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { case "jvm-1.5" => asm.Opcodes.V1_5 case "jvm-1.6" => asm.Opcodes.V1_6 case "jvm-1.7" => asm.Opcodes.V1_7 + case "jvm-1.8" => asm.Opcodes.V1_8 } private val majorVersion: Int = (classfileVersion & 0xFF) @@ -614,13 +677,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) } - def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) { + def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor, isMirror: Boolean = false) { /* The outer name for this inner class. Note that it returns null * when the inner class should not get an index in the constant pool. * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS. */ def outerName(innerSym: Symbol): String = { - if (innerSym.originalEnclosingMethod != NoSymbol) + if (isAnonymousOrLocalClass(innerSym)) null else { val outerName = javaName(innerSym.rawowner) @@ -635,10 +698,24 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { else innerSym.rawname + innerSym.moduleSuffix - // add inner classes which might not have been referenced yet - exitingErasure { - for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass) - innerClassBuffer += m + innerClassBuffer ++= { + val members = exitingPickler(memberClassesOf(csym)) + // lambdalift makes all classes (also local, anonymous) members of their enclosing class + val allNested = exitingPhase(currentRun.lambdaliftPhase)(memberClassesOf(csym)) + + // for the mirror class, we take the members of the companion module class (Java compat, + // see doc in BTypes.scala). for module classes, we filter out those members. + if (isMirror) members + else if (isTopLevelModule(csym)) allNested diff members + else allNested + } + + // If this is a top-level class, add members of the companion object. + val linkedClass = exitingPickler(csym.linkedClassOfClass) // linkedCoC does not work properly in late phases + if (isTopLevelModule(linkedClass)) { + // phase travel to exitingPickler: this makes sure that memberClassesOf only sees member classes, + // not local classes that were lifted by lambdalift. + innerClassBuffer ++= exitingPickler(memberClassesOf(linkedClass)) } val allInners: List[Symbol] = innerClassBuffer.toList filterNot deadCode.elidedClosures @@ -652,7 +729,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ?? val flagsWithFinal: Int = mkFlags( - if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0, + // See comment in BTypes, when is a class marked static in the InnerClass table. + if (isOriginallyStaticOwner(innerSym.originalOwner)) asm.Opcodes.ACC_STATIC else 0, javaFlags(innerSym), if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED) @@ -794,15 +872,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { for (ThrownException(exc) <- excs.distinct) yield javaName(exc) - /** Whether an annotation should be emitted as a Java annotation - * .initialize: if 'annot' is read from pickle, atp might be un-initialized - */ - private def shouldEmitAnnotation(annot: AnnotationInfo) = - annot.symbol.initialize.isJavaDefined && - annot.matches(ClassfileAnnotationClass) && - annot.args.isEmpty && - !annot.matches(DeprecatedAttr) - def getCurrentCUnit(): CompilationUnit def getGenericSignature(sym: Symbol, owner: Symbol) = self.getGenericSignature(sym, owner, getCurrentCUnit()) @@ -864,7 +933,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { for(annot <- annotations; if shouldEmitAnnotation(annot)) { val AnnotationInfo(typ, args, assocs) = annot assert(args.isEmpty, args) - val av = cw.visitAnnotation(descriptor(typ), true) + val av = cw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot)) emitAssocs(av, assocs) } } @@ -873,7 +942,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { for(annot <- annotations; if shouldEmitAnnotation(annot)) { val AnnotationInfo(typ, args, assocs) = annot assert(args.isEmpty, args) - val av = mw.visitAnnotation(descriptor(typ), true) + val av = mw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot)) emitAssocs(av, assocs) } } @@ -882,7 +951,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { for(annot <- annotations; if shouldEmitAnnotation(annot)) { val AnnotationInfo(typ, args, assocs) = annot assert(args.isEmpty, args) - val av = fw.visitAnnotation(descriptor(typ), true) + val av = fw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot)) emitAssocs(av, assocs) } } @@ -894,7 +963,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { annot <- annots) { val AnnotationInfo(typ, args, assocs) = annot assert(args.isEmpty, args) - val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), true) + val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), isRuntimeVisible(annot)) emitAssocs(pannVisitor, assocs) } } @@ -975,7 +1044,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { index += jparamType.getSize() } - mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, javaType(m).getDescriptor) + mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, javaType(m).getDescriptor, false) mirrorMethod.visitInsn(jReturnType.getOpcode(asm.Opcodes.IRETURN)) mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments @@ -1061,7 +1130,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { asm.Opcodes.INVOKEVIRTUAL, moduleName, androidFieldName.toString, - asm.Type.getMethodDescriptor(creatorType, Array.empty[asm.Type]: _*) + asm.Type.getMethodDescriptor(creatorType, Array.empty[asm.Type]: _*), + false ) // PUTSTATIC `thisName`.CREATOR; @@ -1142,43 +1212,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def isParcelableClass = isAndroidParcelableClass(clasz.symbol) - def serialVUID: Option[Long] = clasz.symbol getAnnotation SerialVersionUIDAttr collect { - case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue - } - - private def getSuperInterfaces(c: IClass): Array[String] = { - - // Additional interface parents based on annotations and other cues - def newParentForAttr(ann: AnnotationInfo): Symbol = ann.symbol match { - case RemoteAttr => RemoteInterfaceClass - case _ => NoSymbol - } - - /* Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents. - * This is important on Android because there is otherwise an interface explosion. - */ - def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = { - var rest = lstIfaces - var leaves = List.empty[Symbol] - while(!rest.isEmpty) { - val candidate = rest.head - val nonLeaf = leaves exists { lsym => lsym isSubClass candidate } - if(!nonLeaf) { - leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym }) - } - rest = rest.tail - } - - leaves - } - - val ps = c.symbol.info.parents - val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses - val superInterfaces = existingSymbols(superInterfaces0 ++ c.symbol.annotations.map(newParentForAttr)).distinct - - if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY - else mkArray(minimizeInterfaces(superInterfaces) map javaName) - } + def serialVUID: Option[Long] = genBCode.serialVUID(clasz.symbol) var clasz: IClass = _ // this var must be assigned only by genClass() var jclass: asm.ClassWriter = _ // the classfile being emitted @@ -1200,7 +1234,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { val ps = c.symbol.info.parents val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol) - val ifaces = getSuperInterfaces(c) + val ifaces: Array[String] = implementedInterfaces(c.symbol).map(javaName)(collection.breakOut) val thisSignature = getGenericSignature(c.symbol, c.symbol.owner) val flags = mkFlags( @@ -1219,10 +1253,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { null /* SourceDebugExtension */) } - val enclM = getEnclosingMethodAttribute() - if(enclM != null) { - val EnclMethodEntry(className, methodName, methodType) = enclM - jclass.visitOuterClass(className, methodName, methodType.getDescriptor) + enclosingMethodAttribute(clasz.symbol, javaName, javaType(_).getDescriptor) match { + case Some(EnclosingMethodEntry(className, methodName, methodDescriptor)) => + jclass.visitOuterClass(className, methodName, methodDescriptor) + case _ => () } // typestate: entering mode with valid call sequences: @@ -1283,45 +1317,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { writeIfNotTooBig("" + c.symbol.name, thisName, jclass, c.symbol) } - /** - * @param owner internal name of the enclosing class of the class. - * - * @param name the name of the method that contains the class. - - * @param methodType the method that contains the class. - */ - case class EnclMethodEntry(owner: String, name: String, methodType: asm.Type) - - /** - * @return null if the current class is not internal to a method - * - * Quoting from JVMS 4.7.7 The EnclosingMethod Attribute - * A class must have an EnclosingMethod attribute if and only if it is a local class or an anonymous class. - * A class may have no more than one EnclosingMethod attribute. - * - */ - private def getEnclosingMethodAttribute(): EnclMethodEntry = { // JVMS 4.7.7 - var res: EnclMethodEntry = null - val clazz = clasz.symbol - val sym = clazz.originalEnclosingMethod - if (sym.isMethod) { - debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass)) - res = EnclMethodEntry(javaName(sym.enclClass), javaName(sym), javaType(sym)) - } else if (clazz.isAnonymousClass) { - val enclClass = clazz.rawowner - assert(enclClass.isClass, enclClass) - val sym = enclClass.primaryConstructor - if (sym == NoSymbol) { - log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass)) - } else { - debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass)) - res = EnclMethodEntry(javaName(enclClass), javaName(sym), javaType(sym)) - } - } - - res - } - def genField(f: IField) { debuglog("Adding field: " + f.symbol.fullName) @@ -1364,7 +1359,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return if (m.params.size > MaximumJvmParameters) { - getCurrentCUnit().error(m.symbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.") + reporter.error(m.symbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.") return } @@ -1402,7 +1397,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // TODO param names: (m.params map (p => javaName(p.sym))) - // typestate: entering mode with valid call sequences: + // typestate: entering mode with valid call sequences: (see ASM Guide, 3.2.1) // [ visitAnnotationDefault ] ( visitAnnotation | visitParameterAnnotation | visitAttribute )* emitAnnotations(jmethod, others) @@ -1447,7 +1442,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { val hasStaticBitSet = ((flags & asm.Opcodes.ACC_STATIC) != 0) genCode(m, emitVars, hasStaticBitSet) - jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments + // visitMaxs needs to be called according to the protocol. The arguments will be ignored + // since maximums (and stack map frames) are computed. See ASM Guide, Section 3.2.1, + // section "ClassWriter options" + jmethod.visitMaxs(0, 0) } jmethod.visitEnd() @@ -1523,7 +1521,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { if (isStaticModule(clasz.symbol)) { clinit.visitTypeInsn(asm.Opcodes.NEW, thisName) clinit.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, - thisName, INSTANCE_CONSTRUCTOR_NAME, mdesc_arglessvoid) + thisName, INSTANCE_CONSTRUCTOR_NAME, mdesc_arglessvoid, false) } if (isParcelableClass) { legacyAddCreatorCode(clinit) } @@ -1667,16 +1665,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def rem(tk: TypeKind) { emitPrimitive(remOpcodes, tk) } def invokespecial(owner: String, name: String, desc: String) { - jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc) + jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc, false) } def invokestatic(owner: String, name: String, desc: String) { - jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc) + jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc, false) } def invokeinterface(owner: String, name: String, desc: String) { - jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc) + jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc, true) } def invokevirtual(owner: String, name: String, desc: String) { - jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc) + jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc, false) } def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) } @@ -2806,7 +2804,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { addForwarders(isRemote(modsym), mirrorClass, mirrorName, modsym) - addInnerClasses(modsym, mirrorClass) + addInnerClasses(modsym, mirrorClass, isMirror = true) mirrorClass.visitEnd() writeIfNotTooBig("" + modsym.name, mirrorName, mirrorClass, modsym) } @@ -2926,7 +2924,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // invoke the superclass constructor, which will do the // necessary java reflection and create Method objects. - constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.getDescriptor) + constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.getDescriptor, false) constructor.visitInsn(asm.Opcodes.RETURN) constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments @@ -2941,7 +2939,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } // end of class JBeanInfoBuilder /** A namespace for utilities to normalize the code of an IMethod, over and beyond what IMethod.normalize() strives for. - * In particualr, IMethod.normalize() doesn't collapseJumpChains(). + * In particular, IMethod.normalize() doesn't collapseJumpChains(). * * TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them). */ @@ -3156,7 +3154,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } } - // remove the unusued exception handler references + // remove the unused exception handler references if (settings.debug) for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks") m.exh = m.exh filterNot unusedExceptionHandlers @@ -3243,7 +3241,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } if(!isValidSignature) { - unit.warning(sym.pos, + reporter.warning(sym.pos, """|compiler bug: created invalid generic signature for %s in %s |signature: %s |if this is reproducible, please report bug at https://issues.scala-lang.org/ @@ -3256,7 +3254,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe)) val bytecodeTpe = owner.thisType.memberInfo(sym) if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) { - unit.warning(sym.pos, + reporter.warning(sym.pos, """|compiler bug: created generic signature for %s in %s that does not conform to its erasure |signature: %s |original type: %s diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala index 193100474ccb..d5e95c47cff6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala @@ -9,10 +9,12 @@ package tools.nsc package backend package jvm -import scala.collection.{ mutable, immutable } -import scala.annotation.switch +import scala.collection.mutable +import scala.reflect.internal.util.Statistics import scala.tools.asm +import scala.tools.asm.tree.ClassNode +import scala.tools.nsc.backend.jvm.opt.LocalOpt /* * Prepare in-memory representations of classfiles using the ASM Tree API, and serialize them to disk. @@ -46,6 +48,9 @@ import scala.tools.asm abstract class GenBCode extends BCodeSyncAndTry { import global._ + import bTypes._ + import coreBTypes._ + val phaseName = "jvm" override def newPhase(prev: Phase) = new BCodePhase(prev) @@ -130,7 +135,7 @@ abstract class GenBCode extends BCodeSyncAndTry { return } else { - try { visit(item) } + try { withCurrentUnit(item.cunit)(visit(item)) } catch { case ex: Throwable => ex.printStackTrace() @@ -156,7 +161,7 @@ abstract class GenBCode extends BCodeSyncAndTry { case None => caseInsensitively.put(lowercaseJavaClassName, claszSymbol) case Some(dupClassSym) => - item.cunit.warning( + reporter.warning( claszSymbol.pos, s"Class ${claszSymbol.javaClassName} differs only in case from ${dupClassSym.javaClassName}. " + "Such classes will overwrite one another on case-insensitive filesystems." @@ -165,7 +170,7 @@ abstract class GenBCode extends BCodeSyncAndTry { // -------------- mirror class, if needed -------------- val mirrorC = - if (isStaticModule(claszSymbol) && isTopLevelModule(claszSymbol)) { + if (isTopLevelModuleClass(claszSymbol)) { if (claszSymbol.companionClass == NoSymbol) { mirrorCodeGen.genMirrorClass(claszSymbol, cunit) } else { @@ -210,6 +215,11 @@ abstract class GenBCode extends BCodeSyncAndTry { * - converting the plain ClassNode to byte array and placing it on queue-3 */ class Worker2 { + lazy val localOpt = new LocalOpt(settings) + + def localOptimizations(classNode: ClassNode): Unit = { + BackendStats.timed(BackendStats.methodOptTimer)(localOpt.methodOptimizations(classNode)) + } def run() { while (true) { @@ -219,8 +229,10 @@ abstract class GenBCode extends BCodeSyncAndTry { return } else { - try { addToQ3(item) } - catch { + try { + localOptimizations(item.plain) + addToQ3(item) + } catch { case ex: Throwable => ex.printStackTrace() error(s"Error while emitting ${item.plain.name}\n${ex.getMessage}") @@ -243,6 +255,12 @@ abstract class GenBCode extends BCodeSyncAndTry { val plainC = SubItem3(plain.name, getByteArray(plain)) val beanC = if (bean == null) null else SubItem3(bean.name, getByteArray(bean)) + if (AsmUtils.traceSerializedClassEnabled && plain.name.contains(AsmUtils.traceSerializedClassPattern)) { + if (mirrorC != null) AsmUtils.traceClass(mirrorC.jclassBytes) + AsmUtils.traceClass(plainC.jclassBytes) + if (beanC != null) AsmUtils.traceClass(beanC.jclassBytes) + } + q3 add Item3(arrivalPos, mirrorC, plainC, beanC, outFolder) } @@ -263,10 +281,13 @@ abstract class GenBCode extends BCodeSyncAndTry { * */ override def run() { + val bcodeStart = Statistics.startTimer(BackendStats.bcodeTimer) + val initStart = Statistics.startTimer(BackendStats.bcodeInitTimer) arrivalPos = 0 // just in case - scalaPrimitives.init - initBCodeTypes() + scalaPrimitives.init() + bTypes.initializeCoreBTypes() + Statistics.stopTimer(BackendStats.bcodeInitTimer, initStart) // initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated. bytecodeWriter = initBytecodeWriter(cleanup.getEntryPoints) @@ -278,6 +299,7 @@ abstract class GenBCode extends BCodeSyncAndTry { // closing output files. bytecodeWriter.close() + Statistics.stopTimer(BackendStats.bcodeTimer, bcodeStart) /* TODO Bytecode can be verified (now that all classfiles have been written to disk) * @@ -291,9 +313,6 @@ abstract class GenBCode extends BCodeSyncAndTry { * (2) if requested, check-java-signatures, over and beyond the syntactic checks in `getGenericSignature()` * */ - - // clearing maps - clearBCodeTypes() } /* @@ -306,9 +325,15 @@ abstract class GenBCode extends BCodeSyncAndTry { private def buildAndSendToDisk(needsOutFolder: Boolean) { feedPipeline1() + val genStart = Statistics.startTimer(BackendStats.bcodeGenStat) (new Worker1(needsOutFolder)).run() + Statistics.stopTimer(BackendStats.bcodeGenStat, genStart) + (new Worker2).run() + + val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) drainQ3() + Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) } @@ -379,3 +404,10 @@ abstract class GenBCode extends BCodeSyncAndTry { } // end of class BCodePhase } // end of class GenBCode + +object GenBCode { + def mkFlags(args: Int*) = args.foldLeft(0)(_ | _) + + final val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC + final val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala deleted file mode 100644 index 01c4ff5a52b7..000000000000 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala +++ /dev/null @@ -1,83 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Jason Zaugg - */ - -package scala.tools.nsc -package backend.jvm -import scala.tools.nsc.symtab._ - -/** Code shared between the erstwhile legacy backend (aka GenJVM) - * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be - * more here, but for now I'm starting with the refactorings that are either - * straightforward to review or necessary for maintenance. - */ -trait GenJVMASM { - val global: Global - import global._ - import icodes._ - import definitions._ - - val ExcludedForwarderFlags = { - import Flags._ - // Should include DEFERRED but this breaks findMember. - ( SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO ) - } - - protected def isJavaEntryPoint(icls: IClass) = { - val sym = icls.symbol - def fail(msg: String, pos: Position = sym.pos) = { - icls.cunit.warning(sym.pos, - sym.name + " has a main method with parameter type Array[String], but " + sym.fullName('.') + " will not be a runnable program.\n" + - " Reason: " + msg - // TODO: make this next claim true, if possible - // by generating valid main methods as static in module classes - // not sure what the jvm allows here - // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead." - ) - false - } - def failNoForwarder(msg: String) = { - fail(msg + ", which means no static forwarder can be generated.\n") - } - val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil - val hasApproximate = possibles exists { m => - m.info match { - case MethodType(p :: Nil, _) => p.tpe.typeSymbol == ArrayClass - case _ => false - } - } - // At this point it's a module with a main-looking method, so either succeed or warn that it isn't. - hasApproximate && { - // Before erasure so we can identify generic mains. - enteringErasure { - val companion = sym.linkedClassOfClass - - if (hasJavaMainMethod(companion)) - failNoForwarder("companion contains its own main method") - else if (companion.tpe.member(nme.main) != NoSymbol) - // this is only because forwarders aren't smart enough yet - failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)") - else if (companion.isTrait) - failNoForwarder("companion is a trait") - // Now either succeeed, or issue some additional warnings for things which look like - // attempts to be java main methods. - else (possibles exists isJavaMainMethod) || { - possibles exists { m => - m.info match { - case PolyType(_, _) => - fail("main methods cannot be generic.") - case MethodType(params, res) => - if (res.typeSymbol :: params exists (_.isAbstractType)) - fail("main methods cannot refer to type parameters or abstract types.", m.pos) - else - isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos) - case tp => - fail("don't know what this is: " + tp, m.pos) - } - } - } - } - } - } -} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala new file mode 100644 index 000000000000..7b424d2107b3 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala @@ -0,0 +1,112 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc +package backend.jvm +package opt + +import scala.tools.asm +import asm.tree._ +import scala.collection.convert.decorateAsScala._ +import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.util.ClassFileLookup +import OptimizerReporting._ +import ByteCodeRepository._ +import BTypes.InternalName + +/** + * The ByteCodeRepository provides utilities to read the bytecode of classfiles from the compilation + * classpath. Parsed classes are cached in the `classes` map. + * + * @param classPath The compiler classpath where classfiles are searched and read from. + * @param classes Cache for parsed ClassNodes. Also stores the source of the bytecode: + * [[Classfile]] if read from `classPath`, [[CompilationUnit]] if the bytecode + * corresponds to a class being compiled. + */ +class ByteCodeRepository(val classPath: ClassFileLookup[AbstractFile], val classes: collection.concurrent.Map[InternalName, (ClassNode, Source)]) { + /** + * The class node and source for an internal name. If the class node is not yet available, it is + * parsed from the classfile on the compile classpath. + */ + def classNodeAndSource(internalName: InternalName): (ClassNode, Source) = { + classes.getOrElseUpdate(internalName, (parseClass(internalName), Classfile)) + } + + /** + * The class node for an internal name. If the class node is not yet available, it is parsed from + * the classfile on the compile classpath. + */ + def classNode(internalName: InternalName) = classNodeAndSource(internalName)._1 + + /** + * The field node for a field matching `name` and `descriptor`, accessed in class `classInternalName`. + * The declaration of the field may be in one of the superclasses. + * + * @return The [[FieldNode]] of the requested field and the [[InternalName]] of its declaring class. + */ + def fieldNode(classInternalName: InternalName, name: String, descriptor: String): Option[(FieldNode, InternalName)] = { + val c = classNode(classInternalName) + c.fields.asScala.find(f => f.name == name && f.desc == descriptor).map((_, classInternalName)) orElse { + Option(c.superName).flatMap(n => fieldNode(n, name, descriptor)) + } + } + + /** + * The method node for a method matching `name` and `descriptor`, accessed in class `classInternalName`. + * The declaration of the method may be in one of the parents. + * + * @return The [[MethodNode]] of the requested method and the [[InternalName]] of its declaring class. + */ + def methodNode(classInternalName: InternalName, name: String, descriptor: String): Option[(MethodNode, InternalName)] = { + val c = classNode(classInternalName) + c.methods.asScala.find(m => m.name == name && m.desc == descriptor).map((_, classInternalName)) orElse { + val parents = Option(c.superName) ++ c.interfaces.asScala + // `view` to stop at the first result + parents.view.flatMap(methodNode(_, name, descriptor)).headOption + } + } + + private def parseClass(internalName: InternalName): ClassNode = { + val fullName = internalName.replace('/', '.') + classPath.findClassFile(fullName) map { classFile => + val classNode = new asm.tree.ClassNode() + val classReader = new asm.ClassReader(classFile.toByteArray) + // We don't need frames when inlining, but we want to keep the local variable table, so we + // don't use SKIP_DEBUG. + classReader.accept(classNode, asm.ClassReader.SKIP_FRAMES) + // SKIP_FRAMES leaves line number nodes. Remove them because they are not correct after + // inlining. + // TODO: we need to remove them also for classes that are not parsed from classfiles, why not simplify and do it once when inlining? + // OR: instead of skipping line numbers for inlined code, use write a SourceDebugExtension + // attribute that contains JSR-45 data that encodes debugging info. + // http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.7.11 + // https://jcp.org/aboutJava/communityprocess/final/jsr045/index.html + removeLineNumberNodes(classNode) + classNode + } getOrElse { + inlineFailure(s"Class file for class $fullName not found.") + } + } + + private def removeLineNumberNodes(classNode: ClassNode): Unit = { + for (method <- classNode.methods.asScala) { + val iter = method.instructions.iterator() + while (iter.hasNext) iter.next() match { + case _: LineNumberNode => iter.remove() + case _ => + } + } + } +} + +object ByteCodeRepository { + /** + * The source of a ClassNode in the ByteCodeRepository. Can be either [[CompilationUnit]] if the + * class is being compiled or [[Classfile]] if the class was parsed from the compilation classpath. + */ + sealed trait Source + object CompilationUnit extends Source + object Classfile extends Source +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala new file mode 100644 index 000000000000..6b4047c0a786 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -0,0 +1,184 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc +package backend.jvm +package opt + +import scala.annotation.{tailrec, switch} +import scala.collection.mutable +import scala.reflect.internal.util.Collections._ +import scala.tools.asm.Opcodes +import scala.tools.asm.tree._ +import scala.collection.convert.decorateAsScala._ + +object BytecodeUtils { + + object Goto { + def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = { + if (instruction.getOpcode == Opcodes.GOTO) Some(instruction.asInstanceOf[JumpInsnNode]) + else None + } + } + + object JumpNonJsr { + def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = { + if (isJumpNonJsr(instruction)) Some(instruction.asInstanceOf[JumpInsnNode]) + else None + } + } + + object ConditionalJump { + def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = { + if (isConditionalJump(instruction)) Some(instruction.asInstanceOf[JumpInsnNode]) + else None + } + } + + object VarInstruction { + def unapply(instruction: AbstractInsnNode): Option[VarInsnNode] = { + if (isVarInstruction(instruction)) Some(instruction.asInstanceOf[VarInsnNode]) + else None + } + + } + + def isJumpNonJsr(instruction: AbstractInsnNode): Boolean = { + val op = instruction.getOpcode + // JSR is deprecated in classfile version 50, disallowed in 51. historically, it was used to implement finally. + op == Opcodes.GOTO || isConditionalJump(instruction) + } + + def isConditionalJump(instruction: AbstractInsnNode): Boolean = { + val op = instruction.getOpcode + (op >= Opcodes.IFEQ && op <= Opcodes.IF_ACMPNE) || op == Opcodes.IFNULL || op == Opcodes.IFNONNULL + } + + def isReturn(instruction: AbstractInsnNode): Boolean = { + val op = instruction.getOpcode + op >= Opcodes.IRETURN && op <= Opcodes.RETURN + } + + def isVarInstruction(instruction: AbstractInsnNode): Boolean = { + val op = instruction.getOpcode + (op >= Opcodes.ILOAD && op <= Opcodes.ALOAD) || (op >= Opcodes.ISTORE && op <= Opcodes.ASTORE) + } + + def isExecutable(instruction: AbstractInsnNode): Boolean = instruction.getOpcode >= 0 + + def nextExecutableInstruction(instruction: AbstractInsnNode, alsoKeep: AbstractInsnNode => Boolean = Set()): Option[AbstractInsnNode] = { + var result = instruction + do { result = result.getNext } + while (result != null && !isExecutable(result) && !alsoKeep(result)) + Option(result) + } + + def sameTargetExecutableInstruction(a: JumpInsnNode, b: JumpInsnNode): Boolean = { + // Compare next executable instead of the the labels. Identifies a, b as the same target: + // LabelNode(a) + // LabelNode(b) + // Instr + nextExecutableInstruction(a.label) == nextExecutableInstruction(b.label) + } + + def removeJumpAndAdjustStack(method: MethodNode, jump: JumpInsnNode) { + val instructions = method.instructions + val op = jump.getOpcode + if ((op >= Opcodes.IFEQ && op <= Opcodes.IFGE) || op == Opcodes.IFNULL || op == Opcodes.IFNONNULL) { + instructions.insert(jump, getPop(1)) + } else if ((op >= Opcodes.IF_ICMPEQ && op <= Opcodes.IF_ICMPLE) || op == Opcodes.IF_ACMPEQ || op == Opcodes.IF_ACMPNE) { + instructions.insert(jump, getPop(1)) + instructions.insert(jump, getPop(1)) + } else { + // we can't remove JSR: its execution does not only jump, it also adds a return address to the stack + assert(jump.getOpcode == Opcodes.GOTO) + } + instructions.remove(jump) + } + + def finalJumpTarget(source: JumpInsnNode): LabelNode = { + @tailrec def followGoto(label: LabelNode, seenLabels: Set[LabelNode]): LabelNode = nextExecutableInstruction(label) match { + case Some(Goto(dest)) => + if (seenLabels(dest.label)) dest.label + else followGoto(dest.label, seenLabels + dest.label) + + case _ => label + } + followGoto(source.label, Set(source.label)) + } + + def negateJumpOpcode(jumpOpcode: Int): Int = (jumpOpcode: @switch) match { + case Opcodes.IFEQ => Opcodes.IFNE + case Opcodes.IFNE => Opcodes.IFEQ + + case Opcodes.IFLT => Opcodes.IFGE + case Opcodes.IFGE => Opcodes.IFLT + + case Opcodes.IFGT => Opcodes.IFLE + case Opcodes.IFLE => Opcodes.IFGT + + case Opcodes.IF_ICMPEQ => Opcodes.IF_ICMPNE + case Opcodes.IF_ICMPNE => Opcodes.IF_ICMPEQ + + case Opcodes.IF_ICMPLT => Opcodes.IF_ICMPGE + case Opcodes.IF_ICMPGE => Opcodes.IF_ICMPLT + + case Opcodes.IF_ICMPGT => Opcodes.IF_ICMPLE + case Opcodes.IF_ICMPLE => Opcodes.IF_ICMPGT + + case Opcodes.IF_ACMPEQ => Opcodes.IF_ACMPNE + case Opcodes.IF_ACMPNE => Opcodes.IF_ACMPEQ + + case Opcodes.IFNULL => Opcodes.IFNONNULL + case Opcodes.IFNONNULL => Opcodes.IFNULL + } + + def getPop(size: Int): InsnNode = { + val op = if (size == 1) Opcodes.POP else Opcodes.POP2 + new InsnNode(op) + } + + def labelReferences(method: MethodNode): Map[LabelNode, Set[AnyRef]] = { + val res = mutable.Map.empty[LabelNode, Set[AnyRef]] + def add(l: LabelNode, ref: AnyRef) = if (res contains l) res(l) = res(l) + ref else res(l) = Set(ref) + + method.instructions.iterator().asScala foreach { + case jump: JumpInsnNode => add(jump.label, jump) + case line: LineNumberNode => add(line.start, line) + case switch: LookupSwitchInsnNode => switch.labels.asScala.foreach(add(_, switch)); add(switch.dflt, switch) + case switch: TableSwitchInsnNode => switch.labels.asScala.foreach(add(_, switch)); add(switch.dflt, switch) + case _ => + } + if (method.localVariables != null) { + method.localVariables.iterator().asScala.foreach(l => { add(l.start, l); add(l.end, l) }) + } + if (method.tryCatchBlocks != null) { + method.tryCatchBlocks.iterator().asScala.foreach(l => { add(l.start, l); add(l.handler, l); add(l.end, l) }) + } + + res.toMap + } + + def substituteLabel(reference: AnyRef, from: LabelNode, to: LabelNode): Unit = { + def substList(list: java.util.List[LabelNode]) = { + foreachWithIndex(list.asScala.toList) { case (l, i) => + if (l == from) list.set(i, to) + } + } + reference match { + case jump: JumpInsnNode => jump.label = to + case line: LineNumberNode => line.start = to + case switch: LookupSwitchInsnNode => substList(switch.labels); if (switch.dflt == from) switch.dflt = to + case switch: TableSwitchInsnNode => substList(switch.labels); if (switch.dflt == from) switch.dflt = to + case local: LocalVariableNode => + if (local.start == from) local.start = to + if (local.end == from) local.end = to + case handler: TryCatchBlockNode => + if (handler.start == from) handler.start = to + if (handler.handler == from) handler.handler = to + if (handler.end == from) handler.end = to + } + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala new file mode 100644 index 000000000000..87ad715e4d1e --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -0,0 +1,562 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc +package backend.jvm +package opt + +import scala.annotation.switch +import scala.tools.asm.{Opcodes, MethodWriter, ClassWriter} +import scala.tools.asm.tree.analysis.{Analyzer, BasicValue, BasicInterpreter} +import scala.tools.asm.tree._ +import scala.collection.convert.decorateAsScala._ +import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ +import scala.tools.nsc.settings.ScalaSettings + +/** + * Optimizations within a single method. + * + * unreachable code + * - removes instructions of basic blocks to which no branch instruction points + * + enables eliminating some exception handlers and local variable descriptors + * > eliminating them is required for correctness, as explained in `removeUnreachableCode` + * + * empty exception handlers + * - removes exception handlers whose try block is empty + * + eliminating a handler where the try block is empty and reachable will turn the catch block + * unreachable. in this case "unreachable code" is invoked recursively until reaching a fixpoint. + * > for try blocks that are unreachable, "unreachable code" removes also the instructions of the + * catch block, and the recursive invocation is not necessary. + * + * simplify jumps + * - various simplifications, see doc domments of individual optimizations + * + changing or eliminating jumps may render some code unreachable, therefore "simplify jumps" is + * executed in a loop with "unreachable code" + * + * empty local variable descriptors + * - removes entries from the local variable table where the variable is not actually used + * + enables eliminating labels that the entry points to (if they are not otherwise referenced) + * + * empty line numbers + * - eliminates line number nodes that describe no executable instructions + * + enables eliminating the label of the line number node (if it's not otherwise referenced) + * + * stale labels + * - eliminate labels that are not referenced, merge sequences of label definitions. + */ +class LocalOpt(settings: ScalaSettings) { + /** + * Remove unreachable instructions from all (non-abstract) methods and apply various other + * cleanups to the bytecode. + * + * @param clazz The class whose methods are optimized + * @return `true` if unreachable code was eliminated in some method, `false` otherwise. + */ + def methodOptimizations(clazz: ClassNode): Boolean = { + !settings.YoptNone && clazz.methods.asScala.foldLeft(false) { + case (changed, method) => methodOptimizations(method, clazz.name) || changed + } + } + + /** + * Remove unreachable code from a method. + * + * We rely on dead code elimination provided by the ASM framework, as described in the ASM User + * Guide (http://asm.ow2.org/index.html), Section 8.2.1. It runs a data flow analysis, which only + * computes Frame information for reachable instructions. Instructions for which no Frame data is + * available after the analysis are unreachable. + * + * Also simplifies branching instructions, removes unused local variable descriptors, empty + * exception handlers, unnecessary label declarations and empty line number nodes. + * + * Returns `true` if the bytecode of `method` was changed. + */ + private def methodOptimizations(method: MethodNode, ownerClassName: String): Boolean = { + if (method.instructions.size == 0) return false // fast path for abstract methods + + // unreachable-code also removes unused local variable nodes and empty exception handlers. + // This is required for correctness, for example: + // + // def f = { return 0; try { 1 } catch { case _ => 2 } } + // + // The result after removeUnreachableCodeImpl: + // + // TRYCATCHBLOCK L0 L1 L2 java/lang/Exception + // L4 + // ICONST_0 + // IRETURN + // L0 + // L1 + // L2 + // + // If we don't eliminate the handler, the ClassWriter emits: + // + // TRYCATCHBLOCK L0 L0 L0 java/lang/Exception + // L1 + // ICONST_0 + // IRETURN + // L0 + // + // This triggers "ClassFormatError: Illegal exception table range in class file C". Similar + // for local variables in dead blocks. Maybe that's a bug in the ASM framework. + + var recurse = true + var codeHandlersOrJumpsChanged = false + while (recurse) { + // unreachable-code, empty-handlers and simplify-jumps run until reaching a fixpoint (see doc on class LocalOpt) + val (codeRemoved, handlersRemoved, liveHandlerRemoved) = if (settings.YoptUnreachableCode) { + val (codeRemoved, liveLabels) = removeUnreachableCodeImpl(method, ownerClassName) + val removedHandlers = removeEmptyExceptionHandlers(method) + (codeRemoved, removedHandlers.nonEmpty, removedHandlers.exists(h => liveLabels(h.start))) + } else { + (false, false, false) + } + + val jumpsChanged = if (settings.YoptSimplifyJumps) simplifyJumps(method) else false + + codeHandlersOrJumpsChanged ||= (codeRemoved || handlersRemoved || jumpsChanged) + + // The doc comment of class LocalOpt explains why we recurse if jumpsChanged || liveHandlerRemoved + recurse = settings.YoptRecurseUnreachableJumps && (jumpsChanged || liveHandlerRemoved) + } + + // (*) Removing stale local variable descriptors is required for correctness of unreachable-code + val localsRemoved = + if (settings.YoptCompactLocals) compactLocalVariables(method) + else if (settings.YoptUnreachableCode) removeUnusedLocalVariableNodes(method)() // (*) + else false + + val lineNumbersRemoved = if (settings.YoptEmptyLineNumbers) removeEmptyLineNumbers(method) else false + + val labelsRemoved = if (settings.YoptEmptyLabels) removeEmptyLabelNodes(method) else false + + // assert that local variable annotations are empty (we don't emit them) - otherwise we'd have + // to eliminate those covering an empty range, similar to removeUnusedLocalVariableNodes. + def nullOrEmpty[T](l: java.util.List[T]) = l == null || l.isEmpty + assert(nullOrEmpty(method.visibleLocalVariableAnnotations), method.visibleLocalVariableAnnotations) + assert(nullOrEmpty(method.invisibleLocalVariableAnnotations), method.invisibleLocalVariableAnnotations) + + codeHandlersOrJumpsChanged || localsRemoved || lineNumbersRemoved || labelsRemoved + } + + /** + * Removes unreachable basic blocks. + * + * TODO: rewrite, don't use computeMaxLocalsMaxStack (runs a ClassWriter) / Analyzer. Too slow. + */ + def removeUnreachableCodeImpl(method: MethodNode, ownerClassName: String): (Boolean, Set[LabelNode]) = { + // The data flow analysis requires the maxLocals / maxStack fields of the method to be computed. + computeMaxLocalsMaxStack(method) + val a = new Analyzer[BasicValue](new BasicInterpreter) + a.analyze(ownerClassName, method) + val frames = a.getFrames + + val initialSize = method.instructions.size + var i = 0 + var liveLabels = Set.empty[LabelNode] + val itr = method.instructions.iterator() + while (itr.hasNext) { + itr.next() match { + case l: LabelNode => + if (frames(i) != null) liveLabels += l + + case ins => + // label nodes are not removed: they might be referenced for example in a LocalVariableNode + if (frames(i) == null || ins.getOpcode == Opcodes.NOP) { + // Instruction iterators allow removing during iteration. + // Removing is O(1): instructions are doubly linked list elements. + itr.remove() + } + } + i += 1 + } + (method.instructions.size != initialSize, liveLabels) + } + + /** + * Remove exception handlers that cover empty code blocks. A block is considered empty if it + * consist only of labels, frames, line numbers, nops and gotos. + * + * There are no executable instructions that we can assume don't throw (eg ILOAD). The JVM spec + * basically says that a VirtualMachineError may be thrown at any time: + * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.3 + * + * Note that no instructions are eliminated. + * + * @return the set of removed handlers + */ + def removeEmptyExceptionHandlers(method: MethodNode): Set[TryCatchBlockNode] = { + /** True if there exists code between start and end. */ + def containsExecutableCode(start: AbstractInsnNode, end: LabelNode): Boolean = { + start != end && ((start.getOpcode : @switch) match { + // FrameNode, LabelNode and LineNumberNode have opcode == -1. + case -1 | Opcodes.GOTO => containsExecutableCode(start.getNext, end) + case _ => true + }) + } + + var removedHandlers = Set.empty[TryCatchBlockNode] + val handlersIter = method.tryCatchBlocks.iterator() + while(handlersIter.hasNext) { + val handler = handlersIter.next() + if (!containsExecutableCode(handler.start, handler.end)) { + removedHandlers += handler + handlersIter.remove() + } + } + removedHandlers + } + + /** + * Remove all non-parameter entries from the local variable table which denote variables that are + * not actually read or written. + * + * Note that each entry in the local variable table has a start, end and index. Two entries with + * the same index, but distinct start / end ranges are different variables, they may have not the + * same type or name. + */ + def removeUnusedLocalVariableNodes(method: MethodNode)(fistLocalIndex: Int = parametersSize(method), renumber: Int => Int = identity): Boolean = { + def variableIsUsed(start: AbstractInsnNode, end: LabelNode, varIndex: Int): Boolean = { + start != end && (start match { + case v: VarInsnNode if v.`var` == varIndex => true + case _ => variableIsUsed(start.getNext, end, varIndex) + }) + } + + val initialNumVars = method.localVariables.size + val localsIter = method.localVariables.iterator() + while (localsIter.hasNext) { + val local = localsIter.next() + val index = local.index + // parameters and `this` (the lowest indices, starting at 0) are never removed or renumbered + if (index >= fistLocalIndex) { + if (!variableIsUsed(local.start, local.end, index)) localsIter.remove() + else if (renumber(index) != index) local.index = renumber(index) + } + } + method.localVariables.size != initialNumVars + } + + /** + * The number of local variable slots used for parameters and for the `this` reference. + */ + private def parametersSize(method: MethodNode): Int = { + // Double / long fields occupy two slots, so we sum up the sizes. Since getSize returns 0 for + // void, we have to add `max 1`. + val paramsSize = scala.tools.asm.Type.getArgumentTypes(method.desc).iterator.map(_.getSize max 1).sum + val thisSize = if ((method.access & Opcodes.ACC_STATIC) == 0) 1 else 0 + paramsSize + thisSize + } + + /** + * Compact the local variable slots used in the method's implementation. This prevents having + * unused slots for example after eliminating unreachable code. + * + * This transformation reduces the size of the frame for invoking the method. For example, if the + * method has an ISTORE instruction to the local variable 3, the maxLocals of the method is at + * least 4, even if some local variable slots below 3 are not used by any instruction. + * + * This could be improved by doing proper register allocation. + */ + def compactLocalVariables(method: MethodNode): Boolean = { + // This array is built up to map local variable indices from old to new. + val renumber = collection.mutable.ArrayBuffer.empty[Int] + + // Add the index of the local variable used by `varIns` to the `renumber` array. + def addVar(varIns: VarInsnNode): Unit = { + val index = varIns.`var` + val isWide = (varIns.getOpcode: @switch) match { + case Opcodes.LLOAD | Opcodes.DLOAD | Opcodes.LSTORE | Opcodes.DSTORE => true + case _ => false + } + + // Ensure the length of `renumber`. Unused variable indices are mapped to -1. + val minLength = if (isWide) index + 2 else index + 1 + for (i <- renumber.length until minLength) renumber += -1 + + renumber(index) = index + if (isWide) renumber(index + 1) = index + } + + // first phase: collect all used local variables. if the variable at index x is used, set + // renumber(x) = x, otherwise renumber(x) = -1. if the variable is wide (long or double), set + // renumber(x+1) = x. + + val firstLocalIndex = parametersSize(method) + for (i <- 0 until firstLocalIndex) renumber += i // parameters and `this` are always used. + method.instructions.iterator().asScala foreach { + case VarInstruction(varIns) => addVar(varIns) + case _ => + } + + // assign the next free slot to each used local variable. + // for example, rewrite (0, 1, -1, 3, -1, 5) to (0, 1, -1, 2, -1, 3). + + var nextIndex = firstLocalIndex + for (i <- firstLocalIndex until renumber.length if renumber(i) != -1) { + renumber(i) = nextIndex + nextIndex += 1 + } + + // Update the local variable descriptors according to the renumber table, and eliminate stale entries + val removedLocalVariableDescriptors = removeUnusedLocalVariableNodes(method)(firstLocalIndex, renumber) + + if (nextIndex == renumber.length) removedLocalVariableDescriptors + else { + // update variable instructions according to the renumber table + method.maxLocals = nextIndex + method.instructions.iterator().asScala.foreach { + case VarInstruction(varIns) => + val oldIndex = varIns.`var` + if (oldIndex >= firstLocalIndex && renumber(oldIndex) != oldIndex) + varIns.`var` = renumber(varIns.`var`) + case _ => + } + true + } + } + + /** + * In order to run an Analyzer, the maxLocals / maxStack fields need to be available. The ASM + * framework only computes these values during bytecode generation. + * + * Since there's currently no better way, we run a bytecode generator on the method and extract + * the computed values. This required changes to the ASM codebase: + * - the [[MethodWriter]] class was made public + * - accessors for maxLocals / maxStack were added to the MethodWriter class + * + * We could probably make this faster (and allocate less memory) by hacking the ASM framework + * more: create a subclass of MethodWriter with a /dev/null byteVector. Another option would be + * to create a separate visitor for computing those values, duplicating the functionality from the + * MethodWriter. + */ + private def computeMaxLocalsMaxStack(method: MethodNode) { + val cw = new ClassWriter(ClassWriter.COMPUTE_MAXS) + val excs = method.exceptions.asScala.toArray + val mw = cw.visitMethod(method.access, method.name, method.desc, method.signature, excs).asInstanceOf[MethodWriter] + method.accept(mw) + method.maxLocals = mw.getMaxLocals + method.maxStack = mw.getMaxStack + } + + /** + * Removes LineNumberNodes that don't describe any executable instructions. + * + * This method expects (and asserts) that the `start` label of each LineNumberNode is the + * lexically preceding label declaration. + */ + def removeEmptyLineNumbers(method: MethodNode): Boolean = { + def isEmpty(node: AbstractInsnNode): Boolean = node.getNext match { + case null => true + case l: LineNumberNode => true + case n if n.getOpcode >= 0 => false + case n => isEmpty(n) + } + + val initialSize = method.instructions.size + val iterator = method.instructions.iterator() + var previousLabel: LabelNode = null + while (iterator.hasNext) { + iterator.next match { + case label: LabelNode => previousLabel = label + case line: LineNumberNode if isEmpty(line) => + assert(line.start == previousLabel) + iterator.remove() + case _ => + } + } + method.instructions.size != initialSize + } + + /** + * Removes unreferenced label declarations, also squashes sequences of label definitions. + * + * [ops]; Label(a); Label(b); [ops]; + * => subs([ops], b, a); Label(a); subs([ops], b, a); + */ + def removeEmptyLabelNodes(method: MethodNode): Boolean = { + val references = labelReferences(method) + + val initialSize = method.instructions.size + val iterator = method.instructions.iterator() + var prev: LabelNode = null + while (iterator.hasNext) { + iterator.next match { + case label: LabelNode => + if (!references.contains(label)) iterator.remove() + else if (prev != null) { + references(label).foreach(substituteLabel(_, label, prev)) + iterator.remove() + } else prev = label + + case instruction => + if (instruction.getOpcode >= 0) prev = null + } + } + method.instructions.size != initialSize + } + + /** + * Apply various simplifications to branching instructions. + */ + def simplifyJumps(method: MethodNode): Boolean = { + var changed = false + + val allHandlers = method.tryCatchBlocks.asScala.toSet + + // A set of all exception handlers that guard the current instruction, required for simplifyGotoReturn + var activeHandlers = Set.empty[TryCatchBlockNode] + + // Instructions that need to be removed. simplifyBranchOverGoto returns an instruction to be + // removed. It cannot remove it itself because the instruction may be the successor of the current + // instruction of the iterator, which is not supported in ASM. + var instructionsToRemove = Set.empty[AbstractInsnNode] + + val iterator = method.instructions.iterator() + while (iterator.hasNext) { + val instruction = iterator.next() + + instruction match { + case l: LabelNode => + activeHandlers ++= allHandlers.filter(_.start == l) + activeHandlers = activeHandlers.filter(_.end != l) + case _ => + } + + if (instructionsToRemove(instruction)) { + iterator.remove() + instructionsToRemove -= instruction + } else if (isJumpNonJsr(instruction)) { // fast path - all of the below only treat jumps + var jumpRemoved = simplifyThenElseSameTarget(method, instruction) + + if (!jumpRemoved) { + changed = collapseJumpChains(instruction) || changed + jumpRemoved = removeJumpToSuccessor(method, instruction) + + if (!jumpRemoved) { + val staleGoto = simplifyBranchOverGoto(method, instruction) + instructionsToRemove ++= staleGoto + changed ||= staleGoto.nonEmpty + changed = simplifyGotoReturn(method, instruction, inTryBlock = activeHandlers.nonEmpty) || changed + } + } + changed ||= jumpRemoved + } + } + assert(instructionsToRemove.isEmpty, "some optimization required removing a previously traversed instruction. add `instructionsToRemove.foreach(method.instructions.remove)`") + changed + } + + /** + * Removes a conditional jump if it is followed by a GOTO to the same destination. + * + * CondJump l; [nops]; GOTO l; [...] + * POP*; [nops]; GOTO l; [...] + * + * Introduces 1 or 2 POP instructions, depending on the number of values consumed by the CondJump. + */ + private def simplifyThenElseSameTarget(method: MethodNode, instruction: AbstractInsnNode): Boolean = instruction match { + case ConditionalJump(jump) => + nextExecutableInstruction(instruction) match { + case Some(Goto(elseJump)) if sameTargetExecutableInstruction(jump, elseJump) => + removeJumpAndAdjustStack(method, jump) + true + + case _ => false + } + case _ => false + } + + /** + * Replace jumps to a sequence of GOTO instructions by a jump to the final destination. + * + * Jump l; [any ops]; l: GOTO m; [any ops]; m: GOTO n; [any ops]; n: NotGOTO; [...] + * => Jump n; [rest unchaned] + * + * If there's a loop of GOTOs, the initial jump is replaced by one of the labels in the loop. + */ + private def collapseJumpChains(instruction: AbstractInsnNode): Boolean = instruction match { + case JumpNonJsr(jump) => + val target = finalJumpTarget(jump) + if (jump.label == target) false else { + jump.label = target + true + } + + case _ => false + } + + /** + * Eliminates unnecessary jump instructions + * + * Jump l; [nops]; l: [...] + * => POP*; [nops]; l: [...] + * + * Introduces 0, 1 or 2 POP instructions, depending on the number of values consumed by the Jump. + */ + private def removeJumpToSuccessor(method: MethodNode, instruction: AbstractInsnNode) = instruction match { + case JumpNonJsr(jump) if nextExecutableInstruction(jump, alsoKeep = Set(jump.label)) == Some(jump.label) => + removeJumpAndAdjustStack(method, jump) + true + case _ => false + } + + /** + * If the "else" part of a conditional branch is a simple GOTO, negates the conditional branch + * and eliminates the GOTO. + * + * CondJump l; [nops, no labels]; GOTO m; [nops]; l: [...] + * => NegatedCondJump m; [nops, no labels]; [nops]; l: [...] + * + * Note that no label definitions are allowed in the first [nops] section. Otherwise, there could + * be some other jump to the GOTO, and eliminating it would change behavior. + * + * For technical reasons, we cannot remove the GOTO here (*).Instead this method returns an Option + * containing the GOTO that needs to be eliminated. + * + * (*) The ASM instruction iterator (used in the caller [[simplifyJumps]]) has an undefined + * behavior if the successor of the current instruction is removed, which may be the case here + */ + private def simplifyBranchOverGoto(method: MethodNode, instruction: AbstractInsnNode): Option[JumpInsnNode] = instruction match { + case ConditionalJump(jump) => + // don't skip over labels, see doc comment + nextExecutableInstruction(jump, alsoKeep = _.isInstanceOf[LabelNode]) match { + case Some(Goto(goto)) => + if (nextExecutableInstruction(goto, alsoKeep = Set(jump.label)) == Some(jump.label)) { + val newJump = new JumpInsnNode(negateJumpOpcode(jump.getOpcode), goto.label) + method.instructions.set(jump, newJump) + Some(goto) + } else None + + case _ => None + } + case _ => None + } + + /** + * Inlines xRETURN and ATHROW + * + * GOTO l; [any ops]; l: xRETURN/ATHROW + * => xRETURN/ATHROW; [any ops]; l: xRETURN/ATHROW + * + * inlining is only done if the GOTO instruction is not part of a try block, otherwise the + * rewrite might change the behavior. For xRETURN, the reason is that return instructions may throw + * an IllegalMonitorStateException, as described here: + * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.return + */ + private def simplifyGotoReturn(method: MethodNode, instruction: AbstractInsnNode, inTryBlock: Boolean): Boolean = !inTryBlock && (instruction match { + case Goto(jump) => + nextExecutableInstruction(jump.label) match { + case Some(target) => + if (isReturn(target) || target.getOpcode == Opcodes.ATHROW) { + method.instructions.set(jump, target.clone(null)) + true + } else false + + case _ => false + } + case _ => false + }) +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/OptimizerReporting.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/OptimizerReporting.scala new file mode 100644 index 000000000000..7002e43d98f1 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/OptimizerReporting.scala @@ -0,0 +1,24 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc +package backend.jvm + +import scala.tools.asm +import asm.tree._ + +/** + * Reporting utilities used in the optimizer. + */ +object OptimizerReporting { + def methodSignature(className: String, methodName: String, methodDescriptor: String): String = { + className + "::" + methodName + methodDescriptor + } + + def methodSignature(className: String, method: MethodNode): String = methodSignature(className, method.name, method.desc) + + def inlineFailure(reason: String): Nothing = MissingRequirementError.signal(reason) + def assertionError(message: String): Nothing = throw new AssertionError(message) +} diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala index c49f23852f7c..a866173a8849 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala @@ -56,11 +56,8 @@ abstract class ClosureElimination extends SubComponent { case (BOX(t1), UNBOX(t2)) if (t1 == t2) => Some(Nil) - case (LOAD_FIELD(sym, isStatic), DROP(_)) if !sym.hasAnnotation(definitions.VolatileAttr) => - if (isStatic) - Some(Nil) - else - Some(DROP(REFERENCE(definitions.ObjectClass)) :: Nil) + case (LOAD_FIELD(sym, /* isStatic */false), DROP(_)) if !sym.hasAnnotation(definitions.VolatileAttr) && inliner.isClosureClass(sym.owner) => + Some(DROP(REFERENCE(definitions.ObjectClass)) :: Nil) case _ => None } diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala index 1fadcb89207a..0e6ee76eb2a7 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala @@ -7,7 +7,6 @@ package scala package tools.nsc package backend.opt -import scala.tools.nsc.backend.icode.analysis.LubException import scala.annotation.tailrec /** @@ -19,7 +18,7 @@ import scala.annotation.tailrec * * With some more work it could be extended to * - cache stable values (final fields, modules) in locals - * - replace the copy propagation in ClosureElilmination + * - replace the copy propagation in ClosureElimination * - fold constants * - eliminate unnecessary stores and loads * - propagate knowledge gathered from conditionals for further optimization @@ -438,7 +437,7 @@ abstract class ConstantOptimization extends SubComponent { // TODO if we do all that we need to be careful in the // case that success and failure are the same target block // because we're using a Map and don't want one possible state to clobber the other - // alternative mayb we should just replace the conditional with a jump if both targets are the same + // alternative maybe we should just replace the conditional with a jump if both targets are the same def mightEqual = val1 mightEqual val2 def mightNotEqual = val1 mightNotEqual val2 diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index 90c37ba0b37b..3704acb05546 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -169,9 +169,14 @@ abstract class DeadCodeElimination extends SubComponent { case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) | THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) | - LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() | CHECK_CAST(_) => + LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() | CHECK_CAST(_) | CREATE_ARRAY(_, _) => moveToWorkList() + case LOAD_FIELD(sym, isStatic) if isStatic || !inliner.isClosureClass(sym.owner) => + // static load may trigger static initization. + // non-static load can throw NPE (but we know closure fields can't be accessed via a + // null reference. + moveToWorkList() case CALL_METHOD(m1, _) if isSideEffecting(m1) => moveToWorkList() @@ -193,6 +198,8 @@ abstract class DeadCodeElimination extends SubComponent { moveToWorkListIf(necessary) case LOAD_MODULE(sym) if isLoadNeeded(sym) => moveToWorkList() // SI-4859 Module initialization might side-effect. + case CALL_PRIMITIVE(Arithmetic(DIV | REM, INT | LONG) | ArrayLength(_)) => + moveToWorkList() // SI-8601 Might divide by zero case _ => () moveToWorkListIf(cond = false) } @@ -216,7 +223,7 @@ abstract class DeadCodeElimination extends SubComponent { debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx)) val instr = bb(idx) - // adds the instrutions that define the stack values about to be consumed to the work list to + // adds the instructions that define the stack values about to be consumed to the work list to // be marked useful def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) { debuglog(s"\t${bb1(idx1)} is consumed by $instr") diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala index 235e954f8842..425c10d1536f 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala @@ -182,7 +182,7 @@ abstract class InlineExceptionHandlers extends SubComponent { // in other words: what's on the stack MUST conform to what's in the THROW(..)! if (!canReplaceHandler) { - currentClass.cunit.warning(NoPosition, "Unable to inline the exception handler inside incorrect" + + reporter.warning(NoPosition, "Unable to inline the exception handler inside incorrect" + " block:\n" + bblock.iterator.mkString("\n") + "\nwith stack: " + typeInfo + " just " + "before instruction index " + index) } @@ -383,7 +383,7 @@ abstract class InlineExceptionHandlers extends SubComponent { Some((exceptionLocal, copy)) case _ => - currentClass.cunit.warning(NoPosition, "Unable to inline the exception handler due to incorrect format:\n" + + reporter.warning(NoPosition, "Unable to inline the exception handler due to incorrect format:\n" + handler.iterator.mkString("\n")) None } diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index f6de522d09bd..8f6fc6570607 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -26,7 +26,7 @@ import scala.reflect.internal.util.NoSourceFile * where `p` is defined in a library L, and is accessed from a library C (for Client), * where C was compiled against L', an optimized version of L where the inliner made `p` public at the bytecode level. * The only such members are fields, either synthetic or isParamAccessor, and thus having a dollar sign in their name - * (the accesibility of methods and constructors isn't touched by the inliner). + * (the accessibility of methods and constructors isn't touched by the inliner). * * Thus we add one more goal to our list: * (c) Compile C (either optimized or not) against any of L or L', @@ -195,7 +195,7 @@ abstract class Inliners extends SubComponent { /** The current iclass */ private var currentIClazz: IClass = _ - private def warn(pos: Position, msg: String) = currentIClazz.cunit.inlinerWarning(pos, msg) + private def warn(pos: Position, msg: String) = currentRun.reporting.inlinerWarning(pos, msg) private def ownedName(sym: Symbol): String = exitingUncurry { val count = ( @@ -283,14 +283,14 @@ abstract class Inliners extends SubComponent { } val tfa = new analysis.MTFAGrowable() - tfa.stat = global.settings.Ystatistics.value + tfa.stat = global.settings.YstatisticsEnabled val staleOut = new mutable.ListBuffer[BasicBlock] val splicedBlocks = mutable.Set.empty[BasicBlock] val staleIn = mutable.Set.empty[BasicBlock] /** * A transformation local to the body of the IMethod received as argument. - * An linining decision consists in replacing a callsite with the body of the callee. + * An inlining decision consists in replacing a callsite with the body of the callee. * Please notice that, because `analyzeMethod()` itself may modify a method body, * the particular callee bodies that end up being inlined depend on the particular order in which methods are visited * (no topological sorting over the call-graph is attempted). diff --git a/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala new file mode 100644 index 000000000000..3f06264e3c08 --- /dev/null +++ b/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.classpath + +import java.net.URL +import scala.annotation.tailrec +import scala.collection.mutable.ArrayBuffer +import scala.reflect.io.AbstractFile +import scala.tools.nsc.util.ClassPath +import scala.tools.nsc.util.ClassRepresentation + +/** + * A classpath unifying multiple class- and sourcepath entries. + * Flat classpath can obtain entries for classes and sources independently + * so it tries to do operations quite optimally - iterating only these collections + * which are needed in the given moment and only as far as it's necessary. + * @param aggregates classpath instances containing entries which this class processes + */ +case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatClassPath { + + override def findClassFile(className: String): Option[AbstractFile] = { + @tailrec + def find(aggregates: Seq[FlatClassPath]): Option[AbstractFile] = + if (aggregates.nonEmpty) { + val classFile = aggregates.head.findClassFile(className) + if (classFile.isDefined) classFile + else find(aggregates.tail) + } else None + + find(aggregates) + } + + override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + + @tailrec + def findEntry[T <: ClassRepClassPathEntry](aggregates: Seq[FlatClassPath], getEntries: FlatClassPath => Seq[T]): Option[T] = + if (aggregates.nonEmpty) { + val entry = getEntries(aggregates.head) + .find(_.name == simpleClassName) + if (entry.isDefined) entry + else findEntry(aggregates.tail, getEntries) + } else None + + val classEntry = findEntry(aggregates, classesGetter(pkg)) + val sourceEntry = findEntry(aggregates, sourcesGetter(pkg)) + + mergeClassesAndSources(classEntry.toList, sourceEntry.toList).headOption + } + + override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs) + + override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct + + override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*) + + override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { + val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct + aggregatedPackages + } + + override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = + getDistinctEntries(classesGetter(inPackage)) + + override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = + getDistinctEntries(sourcesGetter(inPackage)) + + override private[nsc] def list(inPackage: String): FlatClassPathEntries = { + val (packages, classesAndSources) = aggregates.map(_.list(inPackage)).unzip + val distinctPackages = packages.flatten.distinct + val distinctClassesAndSources = mergeClassesAndSources(classesAndSources: _*) + FlatClassPathEntries(distinctPackages, distinctClassesAndSources) + } + + /** + * Returns only one entry for each name. If there's both a source and a class entry, it + * creates an entry containing both of them. If there would be more than one class or source + * entries for the same class it always would use the first entry of each type found on a classpath. + */ + private def mergeClassesAndSources(entries: Seq[ClassRepClassPathEntry]*): Seq[ClassRepClassPathEntry] = { + // based on the implementation from MergedClassPath + var count = 0 + val indices = collection.mutable.HashMap[String, Int]() + val mergedEntries = new ArrayBuffer[ClassRepClassPathEntry](1024) + + for { + partOfEntries <- entries + entry <- partOfEntries + } { + val name = entry.name + if (indices contains name) { + val index = indices(name) + val existing = mergedEntries(index) + + if (existing.binary.isEmpty && entry.binary.isDefined) + mergedEntries(index) = ClassAndSourceFilesEntry(entry.binary.get, existing.source.get) + if (existing.source.isEmpty && entry.source.isDefined) + mergedEntries(index) = ClassAndSourceFilesEntry(existing.binary.get, entry.source.get) + } + else { + indices(name) = count + mergedEntries += entry + count += 1 + } + } + mergedEntries.toIndexedSeq + } + + private def getDistinctEntries[EntryType <: ClassRepClassPathEntry](getEntries: FlatClassPath => Seq[EntryType]): Seq[EntryType] = { + val seenNames = collection.mutable.HashSet[String]() + val entriesBuffer = new ArrayBuffer[EntryType](1024) + for { + cp <- aggregates + entry <- getEntries(cp) if !seenNames.contains(entry.name) + } { + entriesBuffer += entry + seenNames += entry.name + } + entriesBuffer.toIndexedSeq + } + + private def classesGetter(pkg: String) = (cp: FlatClassPath) => cp.classes(pkg) + private def sourcesGetter(pkg: String) = (cp: FlatClassPath) => cp.sources(pkg) +} diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala new file mode 100644 index 000000000000..9bf4e3f77908 --- /dev/null +++ b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.classpath + +import scala.reflect.io.AbstractFile +import scala.tools.nsc.util.ClassPath + +/** + * A trait that contains factory methods for classpath elements of type T. + * + * The logic has been abstracted from ClassPath#ClassPathContext so it's possible + * to have common trait that supports both recursive and flat classpath representations. + * + * Therefore, we expect that T will be either ClassPath[U] or FlatClassPath. + */ +trait ClassPathFactory[T] { + + /** + * Create a new classpath based on the abstract file. + */ + def newClassPath(file: AbstractFile): T + + /** + * Creators for sub classpaths which preserve this context. + */ + def sourcesInPath(path: String): List[T] + + def expandPath(path: String, expandStar: Boolean = true): List[String] = ClassPath.expandPath(path, expandStar) + + def expandDir(extdir: String): List[String] = ClassPath.expandDir(extdir) + + def contentsOfDirsInPath(path: String): List[T] = + for { + dir <- expandPath(path, expandStar = false) + name <- expandDir(dir) + entry <- Option(AbstractFile.getDirectory(name)) + } yield newClassPath(entry) + + def classesInExpandedPath(path: String): IndexedSeq[T] = + classesInPathImpl(path, expand = true).toIndexedSeq + + def classesInPath(path: String) = classesInPathImpl(path, expand = false) + + def classesInManifest(useManifestClassPath: Boolean) = + if (useManifestClassPath) ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url)) + else Nil + + // Internal + protected def classesInPathImpl(path: String, expand: Boolean) = + for { + file <- expandPath(path, expand) + dir <- Option(AbstractFile.getDirectory(file)) + } yield newClassPath(dir) +} diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala new file mode 100644 index 000000000000..81d2f7320f97 --- /dev/null +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.classpath + +import java.io.File +import java.io.FileFilter +import java.net.URL +import scala.reflect.io.AbstractFile +import scala.reflect.io.PlainFile +import scala.tools.nsc.util.ClassRepresentation +import FileUtils._ + +/** + * A trait allowing to look for classpath entries of given type in directories. + * It provides common logic for classes handling class and source files. + * It makes use of the fact that in the case of nested directories it's easy to find a file + * when we have a name of a package. + */ +trait DirectoryFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClassPath { + val dir: File + assert(dir != null, "Directory file in DirectoryFileLookup cannot be null") + + override def asURLs: Seq[URL] = Seq(dir.toURI.toURL) + override def asClassPathStrings: Seq[String] = Seq(dir.getPath) + + import FlatClassPath.RootPackage + private def getDirectory(forPackage: String): Option[File] = { + if (forPackage == RootPackage) { + Some(dir) + } else { + val packageDirName = FileUtils.dirPath(forPackage) + val packageDir = new File(dir, packageDirName) + if (packageDir.exists && packageDir.isDirectory) { + Some(packageDir) + } else None + } + } + + override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { + val dirForPackage = getDirectory(inPackage) + val nestedDirs: Array[File] = dirForPackage match { + case None => Array.empty + case Some(directory) => directory.listFiles(DirectoryFileLookup.packageDirectoryFileFilter) + } + val prefix = PackageNameUtils.packagePrefix(inPackage) + val entries = nestedDirs map { file => + PackageEntryImpl(prefix + file.getName) + } + entries + } + + protected def files(inPackage: String): Seq[FileEntryType] = { + val dirForPackage = getDirectory(inPackage) + val files: Array[File] = dirForPackage match { + case None => Array.empty + case Some(directory) => directory.listFiles(fileFilter) + } + val entries = files map { file => + val wrappedFile = new scala.reflect.io.File(file) + createFileEntry(new PlainFile(wrappedFile)) + } + entries + } + + override private[nsc] def list(inPackage: String): FlatClassPathEntries = { + val dirForPackage = getDirectory(inPackage) + val files: Array[File] = dirForPackage match { + case None => Array.empty + case Some(directory) => directory.listFiles() + } + val packagePrefix = PackageNameUtils.packagePrefix(inPackage) + val packageBuf = collection.mutable.ArrayBuffer.empty[PackageEntry] + val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType] + for (file <- files) { + if (file.isPackage) { + val pkgEntry = PackageEntryImpl(packagePrefix + file.getName) + packageBuf += pkgEntry + } else if (fileFilter.accept(file)) { + val wrappedFile = new scala.reflect.io.File(file) + val abstractFile = new PlainFile(wrappedFile) + fileBuf += createFileEntry(abstractFile) + } + } + FlatClassPathEntries(packageBuf, fileBuf) + } + + protected def createFileEntry(file: AbstractFile): FileEntryType + protected def fileFilter: FileFilter +} + +object DirectoryFileLookup { + + private[classpath] object packageDirectoryFileFilter extends FileFilter { + override def accept(pathname: File): Boolean = pathname.isPackage + } +} + +case class DirectoryFlatClassPath(dir: File) + extends DirectoryFileLookup[ClassFileEntryImpl] + with NoSourcePaths { + + override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = findClassFile(className) map ClassFileEntryImpl + + override def findClassFile(className: String): Option[AbstractFile] = { + val relativePath = FileUtils.dirPath(className) + val classFile = new File(s"$dir/$relativePath.class") + if (classFile.exists) { + val wrappedClassFile = new scala.reflect.io.File(classFile) + val abstractClassFile = new PlainFile(wrappedClassFile) + Some(abstractClassFile) + } else None + } + + override protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) + override protected def fileFilter: FileFilter = DirectoryFlatClassPath.classFileFilter + + override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage) +} + +object DirectoryFlatClassPath { + + private val classFileFilter = new FileFilter { + override def accept(pathname: File): Boolean = pathname.isClass + } +} + +case class DirectoryFlatSourcePath(dir: File) + extends DirectoryFileLookup[SourceFileEntryImpl] + with NoClassPaths { + + override def asSourcePathString: String = asClassPathString + + override protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file) + override protected def fileFilter: FileFilter = DirectoryFlatSourcePath.sourceFileFilter + + override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = { + findSourceFile(className) map SourceFileEntryImpl + } + + private def findSourceFile(className: String): Option[AbstractFile] = { + val relativePath = FileUtils.dirPath(className) + val sourceFile = Stream("scala", "java") + .map(ext => new File(s"$dir/$relativePath.$ext")) + .collectFirst { case file if file.exists() => file } + + sourceFile.map { file => + val wrappedSourceFile = new scala.reflect.io.File(file) + val abstractSourceFile = new PlainFile(wrappedSourceFile) + abstractSourceFile + } + } + + override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = files(inPackage) +} + +object DirectoryFlatSourcePath { + + private val sourceFileFilter = new FileFilter { + override def accept(pathname: File): Boolean = endsScalaOrJava(pathname.getName) + } +} diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala new file mode 100644 index 000000000000..ee2528e15cb2 --- /dev/null +++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.classpath + +import java.io.{ File => JFile } +import java.net.URL +import scala.reflect.internal.FatalError +import scala.reflect.io.AbstractFile + +/** + * Common methods related to Java files and abstract files used in the context of classpath + */ +object FileUtils { + implicit class AbstractFileOps(val file: AbstractFile) extends AnyVal { + def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) + + def isClass: Boolean = !file.isDirectory && file.hasExtension("class") + + def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) + + // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip? + def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip") + + /** + * Safe method returning a sequence containing one URL representing this file, when underlying file exists, + * and returning given default value in other case + */ + def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL) + } + + implicit class FileOps(val file: JFile) extends AnyVal { + def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName) + + def isClass: Boolean = file.isFile && file.getName.endsWith(".class") + } + + def stripSourceExtension(fileName: String): String = { + if (endsScala(fileName)) stripClassExtension(fileName) + else if (endsJava(fileName)) stripJavaExtension(fileName) + else throw new FatalError("Unexpected source file ending: " + fileName) + } + + def dirPath(forPackage: String) = forPackage.replace('.', '/') + + def endsClass(fileName: String): Boolean = + fileName.length > 6 && fileName.substring(fileName.length - 6) == ".class" + + def endsScalaOrJava(fileName: String): Boolean = + endsScala(fileName) || endsJava(fileName) + + def endsJava(fileName: String): Boolean = + fileName.length > 5 && fileName.substring(fileName.length - 5) == ".java" + + def endsScala(fileName: String): Boolean = + fileName.length > 6 && fileName.substring(fileName.length - 6) == ".scala" + + def stripClassExtension(fileName: String): String = + fileName.substring(0, fileName.length - 6) // equivalent of fileName.length - ".class".length + + def stripJavaExtension(fileName: String): String = + fileName.substring(0, fileName.length - 5) + + // probably it should match a pattern like [a-z_]{1}[a-z0-9_]* but it cannot be changed + // because then some tests in partest don't pass + private def mayBeValidPackage(dirName: String): Boolean = + (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.') +} diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala new file mode 100644 index 000000000000..cb201617d2e4 --- /dev/null +++ b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.classpath + +import scala.reflect.io.AbstractFile +import scala.tools.nsc.util.{ ClassFileLookup, ClassPath, ClassRepresentation } + +/** + * A base trait for the particular flat classpath representation implementations. + * + * We call this variant of a classpath representation flat because it's possible to + * query the whole classpath using just single instance extending this trait. + * + * This is an alternative design compared to scala.tools.nsc.util.ClassPath + */ +trait FlatClassPath extends ClassFileLookup[AbstractFile] { + /** Empty string represents root package */ + private[nsc] def packages(inPackage: String): Seq[PackageEntry] + private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] + private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] + + /** Allows to get entries for packages and classes merged with sources possibly in one pass. */ + private[nsc] def list(inPackage: String): FlatClassPathEntries + + // A default implementation which should be overridden, if we can create the more efficient + // solution for a given type of FlatClassPath + override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + + val foundClassFromClassFiles = classes(pkg) + .find(_.name == simpleClassName) + + def findClassInSources = sources(pkg) + .find(_.name == simpleClassName) + + foundClassFromClassFiles orElse findClassInSources + } + + override def asClassPathString: String = ClassPath.join(asClassPathStrings: _*) + def asClassPathStrings: Seq[String] +} + +object FlatClassPath { + val RootPackage = "" +} + +case class FlatClassPathEntries(packages: Seq[PackageEntry], classesAndSources: Seq[ClassRepClassPathEntry]) + +object FlatClassPathEntries { + import scala.language.implicitConversions + // to have working unzip method + implicit def entry2Tuple(entry: FlatClassPathEntries) = (entry.packages, entry.classesAndSources) +} + +sealed trait ClassRepClassPathEntry extends ClassRepresentation[AbstractFile] + +trait ClassFileEntry extends ClassRepClassPathEntry { + def file: AbstractFile +} + +trait SourceFileEntry extends ClassRepClassPathEntry { + def file: AbstractFile +} + +trait PackageEntry { + def name: String +} + +private[nsc] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry { + override def name = FileUtils.stripClassExtension(file.name) // class name + + override def binary: Option[AbstractFile] = Some(file) + override def source: Option[AbstractFile] = None +} + +private[nsc] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry { + override def name = FileUtils.stripSourceExtension(file.name) + + override def binary: Option[AbstractFile] = None + override def source: Option[AbstractFile] = Some(file) +} + +private[nsc] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepClassPathEntry { + override def name = FileUtils.stripClassExtension(classFile.name) + + override def binary: Option[AbstractFile] = Some(classFile) + override def source: Option[AbstractFile] = Some(srcFile) +} + +private[nsc] case class PackageEntryImpl(name: String) extends PackageEntry + +private[nsc] trait NoSourcePaths { + def asSourcePathString: String = "" + private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = Seq.empty +} + +private[nsc] trait NoClassPaths { + def findClassFile(className: String): Option[AbstractFile] = None + private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = Seq.empty +} diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala new file mode 100644 index 000000000000..7f67381d4d81 --- /dev/null +++ b/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.classpath + +import scala.tools.nsc.Settings +import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.util.ClassPath +import FileUtils.AbstractFileOps + +/** + * Provides factory methods for flat classpath. When creating classpath instances for a given path, + * it uses proper type of classpath depending on a types of particular files containing sources or classes. + */ +class FlatClassPathFactory(settings: Settings) extends ClassPathFactory[FlatClassPath] { + + override def newClassPath(file: AbstractFile): FlatClassPath = + if (file.isJarOrZip) + ZipAndJarFlatClassPathFactory.create(file, settings) + else if (file.isDirectory) + new DirectoryFlatClassPath(file.file) + else + sys.error(s"Unsupported classpath element: $file") + + override def sourcesInPath(path: String): List[FlatClassPath] = + for { + file <- expandPath(path, expandStar = false) + dir <- Option(AbstractFile getDirectory file) + } yield createSourcePath(dir) + + private def createSourcePath(file: AbstractFile): FlatClassPath = + if (file.isJarOrZip) + ZipAndJarFlatSourcePathFactory.create(file, settings) + else if (file.isDirectory) + new DirectoryFlatSourcePath(file.file) + else + sys.error(s"Unsupported sourcepath element: $file") +} diff --git a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala new file mode 100644 index 000000000000..c907d565d2c5 --- /dev/null +++ b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.classpath + +import scala.tools.nsc.classpath.FlatClassPath.RootPackage + +/** + * Common methods related to package names represented as String + */ +object PackageNameUtils { + + /** + * @param fullClassName full class name with package + * @return (package, simple class name) + */ + def separatePkgAndClassNames(fullClassName: String): (String, String) = { + val lastDotIndex = fullClassName.lastIndexOf('.') + if (lastDotIndex == -1) + (RootPackage, fullClassName) + else + (fullClassName.substring(0, lastDotIndex), fullClassName.substring(lastDotIndex + 1)) + } + + def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "." +} diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala new file mode 100644 index 000000000000..84e21a3ccd4d --- /dev/null +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -0,0 +1,180 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.classpath + +import java.io.File +import java.net.URL +import scala.annotation.tailrec +import scala.reflect.io.{ AbstractFile, FileZipArchive, ManifestResources } +import scala.tools.nsc.Settings +import FileUtils._ + +/** + * A trait providing an optional cache for classpath entries obtained from zip and jar files. + * It's possible to create such a cache assuming that entries in such files won't change (at + * least will be the same each time we'll load classpath during the lifetime of JVM process) + * - unlike class and source files in directories, which can be modified and recompiled. + * It allows us to e.g. reduce significantly memory used by PresentationCompilers in Scala IDE + * when there are a lot of projects having a lot of common dependencies. + */ +sealed trait ZipAndJarFileLookupFactory { + + private val cache = collection.mutable.Map.empty[AbstractFile, FlatClassPath] + + def create(zipFile: AbstractFile, settings: Settings): FlatClassPath = { + if (settings.YdisableFlatCpCaching) createForZipFile(zipFile) + else createUsingCache(zipFile, settings) + } + + protected def createForZipFile(zipFile: AbstractFile): FlatClassPath + + private def createUsingCache(zipFile: AbstractFile, settings: Settings): FlatClassPath = cache.synchronized { + def newClassPathInstance = { + if (settings.verbose || settings.Ylogcp) + println(s"$zipFile is not yet in the classpath cache") + createForZipFile(zipFile) + } + cache.getOrElseUpdate(zipFile, newClassPathInstance) + } +} + +/** + * Manages creation of flat classpath for class files placed in zip and jar files. + * It should be the only way of creating them as it provides caching. + */ +object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory { + + private case class ZipArchiveFlatClassPath(zipFile: File) + extends ZipArchiveFileLookup[ClassFileEntryImpl] + with NoSourcePaths { + + override def findClassFile(className: String): Option[AbstractFile] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + classes(pkg).find(_.name == simpleClassName).map(_.file) + } + + override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage) + + override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file) + override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass + } + + /** + * This type of classpath is closly related to the support for JSR-223. + * Its usage can be observed e.g. when running: + * jrunscript -classpath scala-compiler.jar;scala-reflect.jar;scala-library.jar -l scala + * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry: + * Name: scala/Function2$mcFJD$sp.class + */ + private case class ManifestResourcesFlatClassPath(file: ManifestResources) + extends FlatClassPath + with NoSourcePaths { + + override def findClassFile(className: String): Option[AbstractFile] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + classes(pkg).find(_.name == simpleClassName).map(_.file) + } + + override def asClassPathStrings: Seq[String] = Seq(file.path) + + override def asURLs: Seq[URL] = file.toURLs() + + import ManifestResourcesFlatClassPath.PackageFileInfo + import ManifestResourcesFlatClassPath.PackageInfo + + /** + * A cache mapping package name to abstract file for package directory and subpackages of given package. + * + * ManifestResources can iterate through the collections of entries from e.g. remote jar file. + * We can't just specify the path to the concrete directory etc. so we can't just 'jump' into + * given package, when it's needed. On the other hand we can iterate over entries to get + * AbstractFiles, iterate over entries of these files etc. + * + * Instead of traversing a tree of AbstractFiles once and caching all entries or traversing each time, + * when we need subpackages of a given package or its classes, we traverse once and cache only packages. + * Classes for given package can be then easily loaded when they are needed. + */ + private lazy val cachedPackages: collection.mutable.HashMap[String, PackageFileInfo] = { + val packages = collection.mutable.HashMap[String, PackageFileInfo]() + + def getSubpackages(dir: AbstractFile): List[AbstractFile] = + (for (file <- dir if file.isPackage) yield file)(collection.breakOut) + + @tailrec + def traverse(packagePrefix: String, + filesForPrefix: List[AbstractFile], + subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match { + case pkgFile :: remainingFiles => + val subpackages = getSubpackages(pkgFile) + val fullPkgName = packagePrefix + pkgFile.name + packages.put(fullPkgName, PackageFileInfo(pkgFile, subpackages)) + val newPackagePrefix = fullPkgName + "." + subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages)) + traverse(packagePrefix, remainingFiles, subpackagesQueue) + case Nil if subpackagesQueue.nonEmpty => + val PackageInfo(packagePrefix, filesForPrefix) = subpackagesQueue.dequeue() + traverse(packagePrefix, filesForPrefix, subpackagesQueue) + case _ => + } + + val subpackages = getSubpackages(file) + packages.put(FlatClassPath.RootPackage, PackageFileInfo(file, subpackages)) + traverse(FlatClassPath.RootPackage, subpackages, collection.mutable.Queue()) + packages + } + + override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = cachedPackages.get(inPackage) match { + case None => Seq.empty + case Some(PackageFileInfo(_, subpackages)) => + val prefix = PackageNameUtils.packagePrefix(inPackage) + subpackages.map(packageFile => PackageEntryImpl(prefix + packageFile.name)) + } + + override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = cachedPackages.get(inPackage) match { + case None => Seq.empty + case Some(PackageFileInfo(pkg, _)) => + (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file))(collection.breakOut) + } + + override private[nsc] def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(packages(inPackage), classes(inPackage)) + } + + private object ManifestResourcesFlatClassPath { + case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile]) + case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) + } + + override protected def createForZipFile(zipFile: AbstractFile): FlatClassPath = + if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) + else ZipArchiveFlatClassPath(zipFile.file) + + private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match { + case manifestRes: ManifestResources => + ManifestResourcesFlatClassPath(manifestRes) + case _ => + val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile" + throw new IllegalArgumentException(errorMsg) + } +} + +/** + * Manages creation of flat classpath for source files placed in zip and jar files. + * It should be the only way of creating them as it provides caching. + */ +object ZipAndJarFlatSourcePathFactory extends ZipAndJarFileLookupFactory { + + private case class ZipArchiveFlatSourcePath(zipFile: File) + extends ZipArchiveFileLookup[SourceFileEntryImpl] + with NoClassPaths { + + override def asSourcePathString: String = asClassPathString + + override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = files(inPackage) + + override protected def createFileEntry(file: FileZipArchive#Entry): SourceFileEntryImpl = SourceFileEntryImpl(file) + override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource + } + + override protected def createForZipFile(zipFile: AbstractFile): FlatClassPath = ZipArchiveFlatSourcePath(zipFile.file) +} diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala new file mode 100644 index 000000000000..1d0de577798b --- /dev/null +++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.classpath + +import java.io.File +import java.net.URL +import scala.collection.Seq +import scala.reflect.io.AbstractFile +import scala.reflect.io.FileZipArchive +import FileUtils.AbstractFileOps + +/** + * A trait allowing to look for classpath entries of given type in zip and jar files. + * It provides common logic for classes handling class and source files. + * It's aware of things like e.g. META-INF directory which is correctly skipped. + */ +trait ZipArchiveFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClassPath { + val zipFile: File + + assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null") + + override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL) + override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath) + + private val archive = new FileZipArchive(zipFile) + + override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { + val prefix = PackageNameUtils.packagePrefix(inPackage) + for { + dirEntry <- findDirEntry(inPackage).toSeq + entry <- dirEntry.iterator if entry.isPackage + } yield PackageEntryImpl(prefix + entry.name) + } + + protected def files(inPackage: String): Seq[FileEntryType] = + for { + dirEntry <- findDirEntry(inPackage).toSeq + entry <- dirEntry.iterator if isRequiredFileType(entry) + } yield createFileEntry(entry) + + override private[nsc] def list(inPackage: String): FlatClassPathEntries = { + val foundDirEntry = findDirEntry(inPackage) + + foundDirEntry map { dirEntry => + val pkgBuf = collection.mutable.ArrayBuffer.empty[PackageEntry] + val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType] + val prefix = PackageNameUtils.packagePrefix(inPackage) + + for (entry <- dirEntry.iterator) { + if (entry.isPackage) + pkgBuf += PackageEntryImpl(prefix + entry.name) + else if (isRequiredFileType(entry)) + fileBuf += createFileEntry(entry) + } + FlatClassPathEntries(pkgBuf, fileBuf) + } getOrElse FlatClassPathEntries(Seq.empty, Seq.empty) + } + + private def findDirEntry(pkg: String) = { + val dirName = s"${FileUtils.dirPath(pkg)}/" + archive.allDirs.get(dirName) + } + + protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType + protected def isRequiredFileType(file: AbstractFile): Boolean +} diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index a61ad392ee71..9433ddcf31c9 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -26,10 +26,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def freshName(prefix: String): Name = freshTermName(prefix) def freshTermName(prefix: String): TermName = unit.freshTermName(prefix) def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix) - def deprecationWarning(off: Int, msg: String) = unit.deprecationWarning(off, msg) + def deprecationWarning(off: Int, msg: String) = currentRun.reporting.deprecationWarning(off, msg) implicit def i2p(offset : Int) : Position = Position.offset(unit.source, offset) - def warning(pos : Int, msg : String) : Unit = unit.warning(pos, msg) - def syntaxError(pos: Int, msg: String) : Unit = unit.error(pos, msg) + def warning(pos : Int, msg : String) : Unit = reporter.warning(pos, msg) + def syntaxError(pos: Int, msg: String) : Unit = reporter.error(pos, msg) } abstract class JavaParser extends ParserCommon { @@ -488,7 +488,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val vparams = formalParams() if (!isVoid) rtpt = optArrayBrackets(rtpt) optThrows() - val bodyOk = !inInterface || (mods hasFlag Flags.DEFAULTMETHOD) + val isStatic = mods hasFlag Flags.STATIC + val bodyOk = !inInterface || ((mods hasFlag Flags.DEFAULTMETHOD) || isStatic) val body = if (bodyOk && in.token == LBRACE) { methodBody() @@ -507,7 +508,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { EmptyTree } } - if (inInterface) mods1 |= Flags.DEFERRED + if (inInterface && !isStatic) mods1 |= Flags.DEFERRED List { atPos(pos) { DefDef(mods1, name.toTermName, tparams, List(vparams), rtpt, body) diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index c5401219dd9c..ac86dfd66510 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -860,9 +860,9 @@ trait JavaScanners extends ast.parser.ScannersCommon { class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner { in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError) init() - def error (pos: Int, msg: String) = unit. error(pos, msg) - def incompleteInputError(pos: Int, msg: String) = unit.incompleteInputError(pos, msg) - def deprecationWarning(pos: Int, msg: String) = unit.deprecationWarning(pos, msg) + def error (pos: Int, msg: String) = reporter.error(pos, msg) + def incompleteInputError(pos: Int, msg: String) = currentRun.parsing.incompleteInputError(pos, msg) + def deprecationWarning(pos: Int, msg: String) = currentRun.reporting.deprecationWarning(pos, msg) implicit def g2p(pos: Int): Position = Position.offset(unit.source, pos) } } diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 12f9aeba2728..4b1805479d83 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -7,7 +7,7 @@ package scala.tools.nsc package plugins -import scala.reflect.io.{ File, Path } +import scala.reflect.io.Path import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver.Defaults @@ -33,7 +33,7 @@ trait Plugins { global: Global => } val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value) val (goods, errors) = maybes partition (_.isSuccess) - // Explicit parameterization of recover to suppress -Xlint warning about inferred Any + // Explicit parameterization of recover to avoid -Xlint warning about inferred Any errors foreach (_.recover[Any] { // legacy behavior ignores altogether, so at least warn devs case e: MissingPluginException => if (global.isDeveloper) warning(e.getMessage) diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala index 16d432438a8d..5e4914fa832e 100644 --- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala @@ -30,6 +30,7 @@ abstract class AbstractReporter extends Reporter { private def isVerbose = settings.verbose.value private def noWarnings = settings.nowarnings.value private def isPromptSet = settings.prompt.value + private def isDebug = settings.debug protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) { if (severity == INFO) { @@ -46,7 +47,7 @@ abstract class AbstractReporter extends Reporter { severity.count += 1 display(pos, msg, severity) } - else if (settings.debug) { + else if (isDebug) { severity.count += 1 display(pos, "[ suppressed ] " + msg, severity) } @@ -57,17 +58,19 @@ abstract class AbstractReporter extends Reporter { } } + /** Logs a position and returns true if it was already logged. * @note Two positions are considered identical for logging if they have the same point. */ private def testAndLog(pos: Position, severity: Severity, msg: String): Boolean = pos != null && pos.isDefined && { - val fpos = pos.focus + val fpos = pos.focus val suppress = positions(fpos) match { - case ERROR => true // already error at position - case highest if highest > severity => true // already message higher than present severity - case `severity` => messages(fpos) contains msg // already issued this exact message - case _ => false // good to go + case ERROR => true // already error at position + case highest + if highest.id > severity.id => true // already message higher than present severity + case `severity` => messages(fpos) contains msg // already issued this exact message + case _ => false // good to go } suppress || { diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala index 3f210a543c10..5bf611a7b0dc 100644 --- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala @@ -12,8 +12,7 @@ import scala.reflect.internal.util._ import StringOps._ /** - * This class implements a Reporter that displays messages on a text - * console. + * This class implements a Reporter that displays messages on a text console. */ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: PrintWriter) extends AbstractReporter { def this(settings: Settings) = this(settings, Console.in, new PrintWriter(Console.err, true)) @@ -30,7 +29,7 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr case INFO => null } - private def clabel(severity: Severity): String = { + protected def clabel(severity: Severity): String = { val label0 = label(severity) if (label0 eq null) "" else label0 + ": " } diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala index 68362c066d80..3d688efae1bc 100644 --- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala @@ -8,76 +8,50 @@ package reporters import scala.reflect.internal.util._ -/** - * This interface provides methods to issue information, warning and - * error messages. +/** Report information, warnings and errors. + * + * This describes the internal interface for issuing information, warnings and errors. + * The only abstract method in this class must be info0. + * + * TODO: Move external clients (sbt/ide/partest) to reflect.internal.Reporter, + * and remove this class. */ -abstract class Reporter { - protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit - - object severity extends Enumeration - class Severity(val id: Int) extends severity.Value { - var count: Int = 0 - } - val INFO = new Severity(0) { - override def toString: String = "INFO" - } - val WARNING = new Severity(1) { - override def toString: String = "WARNING" - } - val ERROR = new Severity(2) { - override def toString: String = "ERROR" - } - - /** Whether very long lines can be truncated. This exists so important - * debugging information (like printing the classpath) is not rendered - * invisible due to the max message length. - */ - private var _truncationOK: Boolean = true - def truncationOK = _truncationOK - def withoutTruncating[T](body: => T): T = { - val saved = _truncationOK - _truncationOK = false - try body - finally _truncationOK = saved - } - - private var incompleteHandler: (Position, String) => Unit = null - def incompleteHandled = incompleteHandler != null - def withIncompleteHandler[T](handler: (Position, String) => Unit)(thunk: => T) = { - val saved = incompleteHandler - incompleteHandler = handler - try thunk - finally incompleteHandler = saved - } - - var cancelled = false - def hasErrors = ERROR.count > 0 || cancelled - def hasWarnings = WARNING.count > 0 +abstract class Reporter extends scala.reflect.internal.Reporter { + /** Informational messages. If `!force`, they may be suppressed. */ + final def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force) /** For sending a message which should not be labeled as a warning/error, * but also shouldn't require -verbose to be visible. */ - def echo(msg: String): Unit = info(NoPosition, msg, force = true) - def echo(pos: Position, msg: String): Unit = info(pos, msg, force = true) + def echo(msg: String): Unit = info(NoPosition, msg, force = true) - /** Informational messages, suppressed unless -verbose or force=true. */ - def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force) + // overridden by sbt, IDE -- should not be in the reporting interface + // (IDE receives comments from ScaladocAnalyzer using this hook method) + // TODO: IDE should override a hook method in the parser instead + def comment(pos: Position, msg: String): Unit = {} - /** Warnings and errors. */ - def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, force = false)) - def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, force = false)) - def incompleteInputError(pos: Position, msg: String): Unit = { - if (incompleteHandled) incompleteHandler(pos, msg) - else error(pos, msg) - } + // used by sbt (via unit.cancel) to cancel a compile (see hasErrors) + // TODO: figure out how sbt uses this, come up with a separate interface for controlling the build + var cancelled: Boolean = false - def comment(pos: Position, msg: String) { } - def flush() { } - def reset() { - INFO.count = 0 - ERROR.count = 0 - WARNING.count = 0 - cancelled = false + override def hasErrors: Boolean = super.hasErrors || cancelled + + override def reset(): Unit = { + super.reset() + cancelled = false } + + // the below is copy/pasted from ReporterImpl for now + // partest expects this inner class + // TODO: rework partest to use the scala.reflect.internal interface, + // remove duplication here, and consolidate reflect.internal.{ReporterImpl & ReporterImpl} + class Severity(val id: Int)(name: String) { var count: Int = 0 ; override def toString = name} + object INFO extends Severity(0)("INFO") + object WARNING extends Severity(1)("WARNING") + // reason for copy/paste: this is used by partest (must be a val, not an object) + // TODO: use count(ERROR) in scala.tools.partest.nest.DirectCompiler#errorCount, rather than ERROR.count + lazy val ERROR = new Severity(2)("ERROR") + + def count(severity: Severity): Int = severity.count + def resetCount(severity: Severity): Unit = severity.count = 0 } diff --git a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala index 04c5bdf82438..24a61cb17158 100644 --- a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala @@ -10,8 +10,7 @@ import scala.collection.mutable import scala.reflect.internal.util.Position /** - * This class implements a Reporter that displays messages on a text - * console. + * This class implements a Reporter that stores its reports in the set `infos`. */ class StoreReporter extends Reporter { case class Info(pos: Position, msg: String, severity: Severity) { diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala index 8b897b83b2ae..6b339b2a6da4 100644 --- a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala @@ -7,19 +7,24 @@ package scala package tools.nsc package settings +import scala.language.higherKinds + trait AbsScalaSettings { self: AbsSettings => + type MultiChoiceEnumeration <: Enumeration + type Setting <: AbsSetting - type BooleanSetting <: Setting { type T = Boolean } - type ChoiceSetting <: Setting { type T = String } - type IntSetting <: Setting { type T = Int } - type MultiStringSetting <: Setting { type T = List[String] } - type PathSetting <: Setting { type T = String } - type PhasesSetting <: Setting { type T = List[String] } - type StringSetting <: Setting { type T = String } - type PrefixSetting <: Setting { type T = List[String] } + type BooleanSetting <: Setting { type T = Boolean } + type ChoiceSetting <: Setting { type T = String } + type IntSetting <: Setting { type T = Int } + type MultiStringSetting <: Setting { type T = List[String] } + type MultiChoiceSetting[E <: MultiChoiceEnumeration] <: Setting { type T <: E#ValueSet } + type PathSetting <: Setting { type T = String } + type PhasesSetting <: Setting { type T = List[String] } + type StringSetting <: Setting { type T = String } + type PrefixSetting <: Setting { type T = List[String] } type OutputDirs type OutputSetting <: Setting @@ -28,6 +33,7 @@ trait AbsScalaSettings { def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String): ChoiceSetting def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]): IntSetting def MultiStringSetting(name: String, helpArg: String, descr: String): MultiStringSetting + def MultiChoiceSetting[E <: MultiChoiceEnumeration](name: String, helpArg: String, descr: String, domain: E, default: Option[List[String]]): MultiChoiceSetting[E] def OutputSetting(outputDirs: OutputDirs, default: String): OutputSetting def PathSetting(name: String, descr: String, default: String): PathSetting def PhasesSetting(name: String, descr: String, default: String): PhasesSetting diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala index 4727e6d86737..060a24d8d498 100644 --- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala @@ -35,7 +35,11 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { case s: AbsSettings => this.userSetSettings == s.userSetSettings case _ => false } - override def toString() = "Settings {\n%s}\n" format (userSetSettings map (" " + _ + "\n")).mkString + override def toString() = { + val uss = userSetSettings + val indent = if (uss.nonEmpty) " " * 2 else "" + uss.mkString(f"Settings {%n$indent", f"%n$indent", f"%n}%n") + } def toConciseString = userSetSettings.mkString("(", " ", ")") def checkDependencies = diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala index 8c2b510bfd91..fffbb4333f62 100644 --- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala @@ -22,13 +22,15 @@ class FscSettings(error: String => Unit) extends Settings(error) { val reset = BooleanSetting("-reset", "Reset compile server caches") val shutdown = BooleanSetting("-shutdown", "Shutdown compile server") val server = StringSetting ("-server", "hostname:portnumber", "Specify compile server socket", "") + val port = IntSetting ("-port", "Search and start compile server in given port only", + 0, Some((0, Int.MaxValue)), (_: String) => None) val preferIPv4 = BooleanSetting("-ipv4", "Use IPv4 rather than IPv6 for the server socket") val idleMins = IntSetting ("-max-idle", "Set idle timeout in minutes for fsc (use 0 for no timeout)", 30, Some((0, Int.MaxValue)), (_: String) => None) // For improved help output, separating fsc options from the others. def fscSpecific = Set[Settings#Setting]( - currentDir, reset, shutdown, server, preferIPv4, idleMins + currentDir, reset, shutdown, server, port, preferIPv4, idleMins ) val isFscSpecific: String => Boolean = fscSpecific map (_.name) diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 35902541284b..b4987e124038 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -125,14 +125,26 @@ class MutableSettings(val errorFn: String => Unit) case Some(cmd) => setter(cmd)(args) } - // if arg is of form -Xfoo:bar,baz,quux - def parseColonArg(s: String): Option[List[String]] = { - val (p, args) = StringOps.splitWhere(s, _ == ':', doDropIndex = true) getOrElse (return None) - - // any non-Nil return value means failure and we return s unmodified - tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _) + // -Xfoo: clears Clearables + def clearIfExists(cmd: String): Option[List[String]] = lookupSetting(cmd) match { + case Some(c: Clearable) => c.clear() ; Some(Nil) + case Some(s) => s.errorAndValue(s"Missing argument to $cmd", None) + case None => None } + // if arg is of form -Xfoo:bar,baz,quux + // the entire arg is consumed, so return None for failure + // any non-Nil return value means failure and we return s unmodified + def parseColonArg(s: String): Option[List[String]] = + if (s endsWith ":") { + clearIfExists(s.init) + } else { + for { + (p, args) <- StringOps.splitWhere(s, _ == ':', doDropIndex = true) + rest <- tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _) + } yield rest + } + // if arg is of form -Xfoo or -Xfoo bar (name = "-Xfoo") def parseNormalArg(p: String, args: List[String]): Option[List[String]] = tryToSetIfExists(p, args, (s: Setting) => s.tryToSet _) @@ -209,12 +221,16 @@ class MutableSettings(val errorFn: String => Unit) def BooleanSetting(name: String, descr: String) = add(new BooleanSetting(name, descr)) def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String) = add(new ChoiceSetting(name, helpArg, descr, choices, default)) - def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]) = add(new IntSetting(name, descr, default, range, parser)) + def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]) = + add(new IntSetting(name, descr, default, range, parser)) def MultiStringSetting(name: String, arg: String, descr: String) = add(new MultiStringSetting(name, arg, descr)) + def MultiChoiceSetting[E <: MultiChoiceEnumeration](name: String, helpArg: String, descr: String, domain: E, default: Option[List[String]] = None) = + add(new MultiChoiceSetting[E](name, helpArg, descr, domain, default)) def OutputSetting(outputDirs: OutputDirs, default: String) = add(new OutputSetting(outputDirs, default)) def PhasesSetting(name: String, descr: String, default: String = "") = add(new PhasesSetting(name, descr, default)) def StringSetting(name: String, arg: String, descr: String, default: String) = add(new StringSetting(name, arg, descr, default)) - def ScalaVersionSetting(name: String, arg: String, descr: String, default: ScalaVersion) = add(new ScalaVersionSetting(name, arg, descr, default)) + def ScalaVersionSetting(name: String, arg: String, descr: String, initial: ScalaVersion, default: Option[ScalaVersion] = None) = + add(new ScalaVersionSetting(name, arg, descr, initial, default)) def PathSetting(name: String, descr: String, default: String): PathSetting = { val prepend = StringSetting(name + "/p", "", "", "").internalOnly() val append = StringSetting(name + "/a", "", "", "").internalOnly() @@ -363,7 +379,7 @@ class MutableSettings(val errorFn: String => Unit) def withDeprecationMessage(msg: String): this.type = { _deprecationMessage = Some(msg) ; this } } - /** A setting represented by an integer */ + /** A setting represented by an integer. */ class IntSetting private[nsc]( name: String, descr: String, @@ -439,7 +455,7 @@ class MutableSettings(val errorFn: String => Unit) value = s.equalsIgnoreCase("true") } override def tryToSetColon(args: List[String]) = args match { - case Nil => tryToSet(Nil) + case Nil => tryToSet(Nil) case List(x) => if (x.equalsIgnoreCase("true")) { value = true @@ -447,7 +463,8 @@ class MutableSettings(val errorFn: String => Unit) } else if (x.equalsIgnoreCase("false")) { value = false Some(Nil) - } else errorAndValue("'" + x + "' is not a valid choice for '" + name + "'", None) + } else errorAndValue(s"'$x' is not a valid choice for '$name'", None) + case _ => errorAndValue(s"'$name' accepts only one boolean value", None) } } @@ -490,28 +507,35 @@ class MutableSettings(val errorFn: String => Unit) withHelpSyntax(name + " <" + arg + ">") } - /** A setting represented by a Scala version, (`default` unless set) */ + /** A setting represented by a Scala version. + * The `initial` value is used if the setting is not specified. + * The `default` value is used if the option is specified without argument (e.g., `-Xmigration`). + */ class ScalaVersionSetting private[nsc]( name: String, val arg: String, descr: String, - default: ScalaVersion) + initial: ScalaVersion, + default: Option[ScalaVersion]) extends Setting(name, descr) { type T = ScalaVersion - protected var v: T = NoScalaVersion + protected var v: T = initial + // This method is invoked if there are no colonated args. In this case the default value is + // used. No arguments are consumed. override def tryToSet(args: List[String]) = { - value = default + default match { + case Some(d) => value = d + case None => errorFn(s"$name requires an argument, the syntax is $helpSyntax") + } Some(args) } override def tryToSetColon(args: List[String]) = args match { - case Nil => value = default; Some(Nil) - case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs) + case x :: xs => value = ScalaVersion(x, errorFn); Some(xs) + case nil => Some(nil) } - override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s)) - def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}") withHelpSyntax(s"${name}:<${arg}>") @@ -528,6 +552,7 @@ class MutableSettings(val errorFn: String => Unit) def prepend(s: String) = prependPath.value = join(s, prependPath.value) def append(s: String) = appendPath.value = join(appendPath.value, s) + override def isDefault = super.isDefault && prependPath.isDefault && appendPath.isDefault override def value = join( prependPath.value, super.value, @@ -548,8 +573,198 @@ class MutableSettings(val errorFn: String => Unit) } } + /** + * Each [[MultiChoiceSetting]] takes a MultiChoiceEnumeration as domain. The enumeration may + * use the Choice class to define values, or simply use the default `Value` constructor: + * + * object SettingDomain extends MultiChoiceEnumeration { val arg1, arg2 = Value } + * + * Or + * + * object SettingDomain extends MultiChoiceEnumeration { + * val arg1 = Choice("arg1", "help") + * val arg2 = Choice("arg2", "help") + * } + * + * Choices with a non-empty `expandsTo` enable other options. Note that expanding choices are + * not present in the multiChoiceSetting.value set, only their expansion. + */ + abstract class MultiChoiceEnumeration extends Enumeration { + case class Choice(name: String, help: String = "", expandsTo: List[Choice] = Nil) extends Val(name) + } + + /** + * A Setting that collects string-valued settings from an enumerated domain. + * - These choices can be turned on or off: "-option:on,-off" + * - If an option is set both on and off, then the option is on + * - The choice "_" enables all choices that have not been explicitly disabled + * + * Arguments can be provided in colonated or non-colonated mode, i.e. "-option a b" or + * "-option:a,b". Note that arguments starting with a "-" can only be provided in colonated mode, + * otherwise they are interpreted as a new option. + * + * In non-colonated mode, the setting stops consuming arguments at the first non-choice, + * i.e. "-option a b c" only consumes "a" and "b" if "c" is not a valid choice. + * + * @param name command-line setting name, eg "-Xlint" + * @param helpArg help description for the kind of arguments it takes, eg "warning" + * @param descr description of the setting + * @param domain enumeration of choices implementing MultiChoice, or the string value is + * taken for the name + * @param default If Some(args), the default options if none are provided. If None, an + * error is printed if there are no arguments. + */ + class MultiChoiceSetting[E <: MultiChoiceEnumeration] private[nsc]( + name: String, + helpArg: String, + descr: String, + val domain: E, + val default: Option[List[String]] + ) extends Setting(name, s"$descr: `_' for all, `$name:help' to list") with Clearable { + + withHelpSyntax(s"$name:<_,$helpArg,-$helpArg>") + + object ChoiceOrVal { + def unapply(a: domain.Value): Option[(String, String, List[domain.Choice])] = a match { + case c: domain.Choice => Some((c.name, c.help, c.expandsTo)) + case v: domain.Value => Some((v.toString, "", Nil)) + } + } + + type T = domain.ValueSet + protected var v: T = domain.ValueSet.empty + + // Explicitly enabled or disabled. Yeas may contain expanding options, nays may not. + private var yeas = domain.ValueSet.empty + private var nays = domain.ValueSet.empty + + // Asked for help + private var sawHelp = false + // Wildcard _ encountered + private var sawAll = false + + private def badChoice(s: String) = errorFn(s"'$s' is not a valid choice for '$name'") + private def isChoice(s: String) = (s == "_") || (choices contains pos(s)) + + private def pos(s: String) = s stripPrefix "-" + private def isPos(s: String) = !(s startsWith "-") + + override val choices: List[String] = domain.values.toList map { + case ChoiceOrVal(name, _, _) => name + } + + def descriptions: List[String] = domain.values.toList map { + case ChoiceOrVal(_, "", x :: xs) => "Enables the options "+ (x :: xs).map(_.name).mkString(", ") + case ChoiceOrVal(_, descr, _) => descr + case _ => "" + } + + /** (Re)compute from current yeas, nays, wildcard status. */ + def compute() = { + def simple(v: domain.Value) = v match { + case ChoiceOrVal(_, _, Nil) => true + case _ => false + } + + /** + * Expand an expanding option, if necessary recursively. Expanding options are not included in + * the result (consistent with "_", which is not in `value` either). + * + * Note: by precondition, options in nays are not expanding, they can only be leaves. + */ + def expand(vs: domain.ValueSet): domain.ValueSet = vs flatMap { + case c @ ChoiceOrVal(_, _, Nil) => domain.ValueSet(c) + case ChoiceOrVal(_, _, others) => expand(domain.ValueSet(others: _*)) + } + + // yeas from _ or expansions are weak: an explicit nay will disable them + val weakYeas = if (sawAll) domain.values filter simple else expand(yeas filterNot simple) + value = (yeas filter simple) | (weakYeas &~ nays) + } + + /** Add a named choice to the multichoice value. */ + def add(arg: String) = arg match { + case _ if !isChoice(arg) => + badChoice(arg) + case "_" => + sawAll = true + compute() + case _ if isPos(arg) => + yeas += domain withName arg + compute() + case _ => + val choice = domain withName pos(arg) + choice match { + case ChoiceOrVal(_, _, _ :: _) => errorFn(s"'${pos(arg)}' cannot be negated, it enables other arguments") + case _ => + } + nays += choice + compute() + } + + def tryToSet(args: List[String]) = tryToSetArgs(args, halting = true) + override def tryToSetColon(args: List[String]) = tryToSetArgs(args, halting = false) + override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide + + /** Try to set args, handling "help" and default. + * The "halting" parameter means args were "-option a b c -else" so halt + * on "-else" or other non-choice. Otherwise, args were "-option:a,b,c,d", + * so process all and report non-choices as errors. + * @param args args to process + * @param halting stop on non-arg + */ + private def tryToSetArgs(args: List[String], halting: Boolean) = { + val added = collection.mutable.ListBuffer.empty[String] + + def tryArg(arg: String) = arg match { + case "help" => sawHelp = true + case s if isChoice(s) => added += s // this case also adds "_" + case s => badChoice(s) + } + def loop(args: List[String]): List[String] = args match { + case arg :: _ if halting && (!isPos(arg) || !isChoice(arg)) => args + case arg :: rest => tryArg(arg) ; loop(rest) + case Nil => Nil + } + val rest = loop(args) + + // if no arg consumed, use defaults or error; otherwise, add what they added + if (rest.size == args.size) default match { + case Some(defaults) => defaults foreach add + case None => errorFn(s"'$name' requires an option. See '$name:help'.") + } else { + added foreach add + } + + Some(rest) + } + + def contains(choice: domain.Value): Boolean = value contains choice + + def isHelping: Boolean = sawHelp + + def help: String = { + val choiceLength = choices.map(_.length).max + 1 + val formatStr = s" %-${choiceLength}s %s" + choices.zipAll(descriptions, "", "").map { + case (arg, descr) => formatStr.format(arg, descr) + } mkString (f"$descr%n", f"%n", "") + } + + def clear(): Unit = { + v = domain.ValueSet.empty + yeas = domain.ValueSet.empty + nays = domain.ValueSet.empty + sawAll = false + sawHelp = false + } + def unparse: List[String] = value.toList map (s => s"$name:$s") + def contains(s: String) = domain.values.find(_.toString == s).exists(value.contains) + } + /** A setting that accumulates all strings supplied to it, - * until it encounters one starting with a '-'. */ + * until it encounters one starting with a '-'. + */ class MultiStringSetting private[nsc]( name: String, val arg: String, @@ -557,18 +772,23 @@ class MutableSettings(val errorFn: String => Unit) extends Setting(name, descr) with Clearable { type T = List[String] protected var v: T = Nil - def appendToValue(str: String) { value ++= List(str) } - - def tryToSet(args: List[String]) = { - val (strings, rest) = args span (x => !x.startsWith("-")) - strings foreach appendToValue + def appendToValue(str: String) = value ++= List(str) - Some(rest) + // try to set. halting means halt at first non-arg + protected def tryToSetArgs(args: List[String], halting: Boolean) = { + def loop(args: List[String]): List[String] = args match { + case arg :: rest => if (halting && (arg startsWith "-")) args else { appendToValue(arg) ; loop(rest) } + case Nil => Nil + } + Some(loop(args)) } - override def tryToSetColon(args: List[String]) = tryToSet(args) + def tryToSet(args: List[String]) = tryToSetArgs(args, halting = true) + override def tryToSetColon(args: List[String]) = tryToSetArgs(args, halting = false) override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide - def clear(): Unit = (v = Nil) + + def clear(): Unit = (v = Nil) def unparse: List[String] = value map (name + ":" + _) + def contains(s: String) = value contains s withHelpSyntax(name + ":<" + arg + ">") } @@ -587,10 +807,8 @@ class MutableSettings(val errorFn: String => Unit) protected var v: T = default def indexOfChoice: Int = choices indexOf value - private def usageErrorMessage = { - "Usage: %s:<%s>\n where <%s> choices are %s (default: %s)\n".format( - name, helpArg, helpArg, choices mkString ", ", default) - } + private def usageErrorMessage = f"Usage: $name:<$helpArg>%n where <$helpArg> choices are ${choices mkString ", "} (default: $default)%n" + def tryToSet(args: List[String]) = errorAndValue(usageErrorMessage, None) override def tryToSetColon(args: List[String]) = args match { diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index a643a0861435..a5b722612d76 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -42,10 +42,13 @@ trait ScalaSettings extends AbsScalaSettings def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce, YconstOptimization) /** If any of these settings is enabled, the compiler should print a message and exit. */ - def infoSettings = List[Setting](help, Xhelp, Yhelp, showPlugins, showPhases, genPhaseGraph) + def infoSettings = List[Setting](version, help, Xhelp, Yhelp, showPlugins, showPhases, genPhaseGraph) + + /** Any -multichoice:help? Nicer if any option could report that it had help to offer. */ + private def multihelp = allSettings exists { case s: MultiChoiceSetting[_] => s.isHelping case _ => false } /** Is an info setting set? */ - def isInfo = infoSettings exists (_.isSetByUser) + def isInfo = (infoSettings exists (_.isSetByUser)) || multihelp /** Disable a setting */ def disable(s: Setting) = allSettings -= s @@ -62,14 +65,35 @@ trait ScalaSettings extends AbsScalaSettings /*val argfiles = */ BooleanSetting ("@", "A text file containing compiler arguments (options and source files)") val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp" val d = OutputSetting (outputDirs, ".") - val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.") - val language = MultiStringSetting("-language", "feature", "Enable one or more language features.") + val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.") + + // Would be nice to build this dynamically from scala.languageFeature. + // The two requirements: delay error checking until you have symbols, and let compiler command build option-specific help. + object languageFeatures extends MultiChoiceEnumeration { + val dynamics = Choice("dynamics", "Allow direct or indirect subclasses of scala.Dynamic") + val postfixOps = Choice("postfixOps", "Allow postfix operator notation, such as `1 to 10 toList'") + val reflectiveCalls = Choice("reflectiveCalls", "Allow reflective access to members of structural types") + val implicitConversions = Choice("implicitConversions", "Allow definition of implicit functions called views") + val higherKinds = Choice("higherKinds", "Allow higher-kinded types") + val existentials = Choice("existentials", "Existential types (besides wildcard types) can be written and inferred") + val macros = Choice("experimental.macros", "Allow macro defintion (besides implementation and application)") + } + val language = { + val description = "Enable or disable language features" + MultiChoiceSetting( + name = "-language", + helpArg = "feature", + descr = description, + domain = languageFeatures + ) + } /* * The previous "-source" option is intended to be used mainly * though this helper. */ - lazy val isScala211: Boolean = (source.value >= ScalaVersion("2.11.0")) + def isScala211: Boolean = source.value >= ScalaVersion("2.11.0") + def isScala212: Boolean = source.value >= ScalaVersion("2.12.0") /** * -X "Advanced" settings @@ -88,7 +112,7 @@ trait ScalaSettings extends AbsScalaSettings val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.") val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.") val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None) - val Xmigration = ScalaVersionSetting ("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion) + val Xmigration = ScalaVersionSetting ("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", initial = NoScalaVersion, default = Some(AnyScalaVersion)) val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.") val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.") val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)") @@ -110,7 +134,7 @@ trait ScalaSettings extends AbsScalaSettings val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.") val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "") val strictInference = BooleanSetting ("-Xstrict-inference", "Don't infer known-unsound types") - val source = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see SI-8126.", ScalaVersion("2.11")) withPostSetHook ( _ => isScala211) + val source = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see SI-8126.", initial = ScalaVersion("2.11")) val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.") val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.") @@ -144,7 +168,7 @@ trait ScalaSettings extends AbsScalaSettings val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") val inline = BooleanSetting ("-Yinline", "Perform inlining when possible.") val inlineHandlers = BooleanSetting ("-Yinline-handlers", "Perform exception handler inlining when possible.") - val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally surpressed due to high volume)") + val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally suppressed due to high volume)") val Xlinearizer = ChoiceSetting ("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo") val log = PhasesSetting ("-Ylog", "Log operations during") val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.") @@ -165,7 +189,6 @@ trait ScalaSettings extends AbsScalaSettings val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "") val Ygenasmp = StringSetting ("-Ygen-asmp", "dir", "Generate a parallel output directory of .asmp files (ie ASM Textifier output).", "") val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") - val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (scala.reflect.internal.util.Statistics.enabled = _) val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat val stopBefore = PhasesSetting ("-Ystop-before", "Stop before") val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.") @@ -176,10 +199,12 @@ trait ScalaSettings extends AbsScalaSettings val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup") val Yreplclassbased = BooleanSetting ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects") val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") - val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.") + val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overridden methods.") val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").withDeprecationMessage(removalIn212) val inferByName = BooleanSetting ("-Yinfer-by-name", "Allow inference of by-name types. This is a temporary option to ease transition. See SI-7899.").withDeprecationMessage(removalIn212) - val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "") + val YclasspathImpl = ChoiceSetting ("-YclasspathImpl", "implementation", "Choose classpath scanning method.", List(ClassPathRepresentationType.Recursive, ClassPathRepresentationType.Flat), ClassPathRepresentationType.Recursive) + val YdisableFlatCpCaching = BooleanSetting ("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") + val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes") val YdisableUnreachablePrevention = BooleanSetting("-Ydisable-unreachable-prevention", "Disable the prevention of unreachable blocks in code generation.") val YnoLoadImplClass = BooleanSetting ("-Yno-load-impl-class", "Do not load $class.class files.") @@ -188,8 +213,59 @@ trait ScalaSettings extends AbsScalaSettings // the current standard is "inline" but we are moving towards "method" val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "inline") + object YoptChoices extends MultiChoiceEnumeration { + val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers protecting no instructions, debug information of eliminated variables.") + val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessary ones.") + val recurseUnreachableJumps = Choice("recurse-unreachable-jumps", "Recursively apply unreachable-code and simplify-jumps (if enabled) until reaching a fixpoint.") + val emptyLineNumbers = Choice("empty-line-numbers", "Eliminate unnecessary line number information.") + val emptyLabels = Choice("empty-labels", "Eliminate and collapse redundant labels in the bytecode.") + val compactLocals = Choice("compact-locals", "Eliminate empty slots in the sequence of local variables.") + + val lNone = Choice("l:none", "Don't enable any optimizations.") + + private val defaultChoices = List(unreachableCode) + val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString(","), expandsTo = defaultChoices) + + private val methodChoices = List(unreachableCode, simplifyJumps, recurseUnreachableJumps, emptyLineNumbers, emptyLabels, compactLocals) + val lMethod = Choice("l:method", "Enable intra-method optimizations: "+ methodChoices.mkString(","), expandsTo = methodChoices) + + private val projectChoices = List(lMethod) + val lProject = Choice("l:project", "Enable cross-method optimizations within the current project: "+ projectChoices.mkString(","), expandsTo = projectChoices) + + private val classpathChoices = List(lProject) + val lClasspath = Choice("l:classpath", "Enable cross-method optimizations across the entire classpath: "+ classpathChoices.mkString(","), expandsTo = classpathChoices) + } + + val Yopt = MultiChoiceSetting( + name = "-Yopt", + helpArg = "optimization", + descr = "Enable optimizations", + domain = YoptChoices) + + def YoptNone = Yopt.isSetByUser && Yopt.value.isEmpty + def YoptUnreachableCode = !Yopt.isSetByUser || Yopt.contains(YoptChoices.unreachableCode) + def YoptSimplifyJumps = Yopt.contains(YoptChoices.simplifyJumps) + def YoptRecurseUnreachableJumps = Yopt.contains(YoptChoices.recurseUnreachableJumps) + def YoptEmptyLineNumbers = Yopt.contains(YoptChoices.emptyLineNumbers) + def YoptEmptyLabels = Yopt.contains(YoptChoices.emptyLabels) + def YoptCompactLocals = Yopt.contains(YoptChoices.compactLocals) + private def removalIn212 = "This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug." + object YstatisticsPhases extends MultiChoiceEnumeration { val parser, typer, patmat, erasure, cleanup, jvm = Value } + val Ystatistics = { + val description = "Print compiler statistics for specific phases" + MultiChoiceSetting( + name = "-Ystatistics", + helpArg = "phase", + descr = description, + domain = YstatisticsPhases, + default = Some(List("_")) + ) withPostSetHook { _ => scala.reflect.internal.util.Statistics.enabled = true } + } + + def YstatisticsEnabled = Ystatistics.value.nonEmpty + /** Area-specific debug output. */ val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.") @@ -201,6 +277,8 @@ trait ScalaSettings extends AbsScalaSettings val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.") val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.") val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.") + val YpatmatExhaustdepth = IntSetting("-Ypatmat-exhaust-depth", "off", 20, Some((10, Int.MaxValue)), + str => Some(if(str.equalsIgnoreCase("off")) Int.MaxValue else str.toInt)) val Yquasiquotedebug = BooleanSetting("-Yquasiquote-debug", "Trace quasiquote-related activities.") // TODO 2.12 Remove @@ -255,3 +333,8 @@ trait ScalaSettings extends AbsScalaSettings val Discard = "discard" } } + +object ClassPathRepresentationType { + val Flat = "flat" + val Recursive = "recursive" +} diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala index 4f45043c5eaa..43bdad588259 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala @@ -34,7 +34,7 @@ case object NoScalaVersion extends ScalaVersion { * to segregate builds */ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion { - def unparse = s"${major}.${minor}.${rev}.${build.unparse}" + def unparse = s"${major}.${minor}.${rev}${build.unparse}" def compare(that: ScalaVersion): Int = that match { case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) => diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index 37dfafb01c06..d42c0dd730d8 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -39,7 +39,7 @@ trait StandardScalaSettings { val optimise: BooleanSetting // depends on post hook which mutates other settings val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.") val target = ChoiceSetting ("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.", - List("jvm-1.5", "jvm-1.6", "jvm-1.7"), "jvm-1.6") + List("jvm-1.5", "jvm-1.6", "jvm-1.7", "jvm-1.8"), "jvm-1.6") val unchecked = BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions.") val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.") val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.") diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index 1509ad13b852..d174dc86c7a8 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -17,41 +17,96 @@ trait Warnings { // Warning semantics. val fatalWarnings = BooleanSetting("-Xfatal-warnings", "Fail the compilation if there are any warnings.") - // These warnings are all so noisy as to be useless in their - // present form, but have the potential to offer useful info. - protected def allWarnings = lintWarnings ++ List( - warnDeadCode, - warnValueDiscard, - warnNumericWiden - ) - // These warnings should be pretty quiet unless you're doing - // something inadvisable. - protected def lintWarnings = List( - warnInaccessible, - warnNullaryOverride, - warnNullaryUnit, - warnAdaptedArgs, - warnInferAny - // warnUnused SI-7712, SI-7707 warnUnused not quite ready for prime-time - // warnUnusedImport currently considered too noisy for general use - ) + // Non-lint warnings - private lazy val warnSelectNullable = BooleanSetting("-Xcheck-null", "This option is obsolete and does nothing.") + val warnDeadCode = BooleanSetting("-Ywarn-dead-code", "Warn when dead code is identified.") + val warnValueDiscard = BooleanSetting("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.") + val warnNumericWiden = BooleanSetting("-Ywarn-numeric-widen", "Warn when numerics are widened.") + // SI-7712, SI-7707 warnUnused not quite ready for prime-time + val warnUnused = BooleanSetting("-Ywarn-unused", "Warn when local and private vals, vars, defs, and types are are unused.") + // currently considered too noisy for general use + val warnUnusedImport = BooleanSetting("-Ywarn-unused-import", "Warn when imports are unused.") + + // Experimental lint warnings that are turned off, but which could be turned on programmatically. + // These warnings are said to blind those who dare enable them. + // They are not activated by -Xlint and can't be enabled on the command line. + val warnValueOverrides = { // Currently turned off as experimental. Created using constructor (new BS), so not available on the command line. + val flag = new BooleanSetting("value-overrides", "Generated value class method overrides an implementation") + flag.value = false + flag + } + + // Lint warnings + + object LintWarnings extends MultiChoiceEnumeration { + class LintWarning(name: String, help: String, val yAliased: Boolean) extends Choice(name, help) + def LintWarning(name: String, help: String, yAliased: Boolean = false) = new LintWarning(name, help, yAliased) + + val AdaptedArgs = LintWarning("adapted-args", "Warn if an argument list is modified to match the receiver.", true) + val NullaryUnit = LintWarning("nullary-unit", "Warn when nullary methods return Unit.", true) + val Inaccessible = LintWarning("inaccessible", "Warn about inaccessible types in method signatures.", true) + val NullaryOverride = LintWarning("nullary-override", "Warn when non-nullary `def f()' overrides nullary `def f'.", true) + val InferAny = LintWarning("infer-any", "Warn when a type argument is inferred to be `Any`.", true) + val MissingInterpolator = LintWarning("missing-interpolator", "A string literal appears to be missing an interpolator id.") + val DocDetached = LintWarning("doc-detached", "A ScalaDoc comment appears to be detached from its element.") + val PrivateShadow = LintWarning("private-shadow", "A private field (or class parameter) shadows a superclass field.") + val TypeParameterShadow = LintWarning("type-parameter-shadow", "A local type parameter shadows a type already in scope.") + val PolyImplicitOverload = LintWarning("poly-implicit-overload", "Parameterized overloaded implicit methods are not visible as view bounds.") + val OptionImplicit = LintWarning("option-implicit", "Option.apply used implicit view.") + val DelayedInitSelect = LintWarning("delayedinit-select", "Selecting member of DelayedInit") + val ByNameRightAssociative = LintWarning("by-name-right-associative", "By-name parameter of right associative operator.") + val PackageObjectClasses = LintWarning("package-object-classes", "Class or object defined in package object.") + val UnsoundMatch = LintWarning("unsound-match", "Pattern match may not be typesafe.") + + def allLintWarnings = values.toSeq.asInstanceOf[Seq[LintWarning]] + } + import LintWarnings._ - // Individual warnings. - val warnAdaptedArgs = BooleanSetting ("-Ywarn-adapted-args", "Warn if an argument list is modified to match the receiver.") - val warnDeadCode = BooleanSetting ("-Ywarn-dead-code", "Warn when dead code is identified.") - val warnValueDiscard = BooleanSetting ("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.") - val warnNumericWiden = BooleanSetting ("-Ywarn-numeric-widen", "Warn when numerics are widened.") - val warnNullaryUnit = BooleanSetting ("-Ywarn-nullary-unit", "Warn when nullary methods return Unit.") - val warnInaccessible = BooleanSetting ("-Ywarn-inaccessible", "Warn about inaccessible types in method signatures.") - val warnNullaryOverride = BooleanSetting ("-Ywarn-nullary-override", "Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.") - val warnInferAny = BooleanSetting ("-Ywarn-infer-any", "Warn when a type argument is inferred to be `Any`.") - val warnUnused = BooleanSetting ("-Ywarn-unused", "Warn when local and private vals, vars, defs, and types are are unused") - val warnUnusedImport = BooleanSetting ("-Ywarn-unused-import", "Warn when imports are unused") - - // Warning groups. - val lint = BooleanSetting("-Xlint", "Enable recommended additional warnings.") enablingIfNotSetByUser lintWarnings + def warnAdaptedArgs = lint contains AdaptedArgs + def warnNullaryUnit = lint contains NullaryUnit + def warnInaccessible = lint contains Inaccessible + def warnNullaryOverride = lint contains NullaryOverride + def warnInferAny = lint contains InferAny + def warnMissingInterpolator = lint contains MissingInterpolator + def warnDocDetached = lint contains DocDetached + def warnPrivateShadow = lint contains PrivateShadow + def warnTypeParameterShadow = lint contains TypeParameterShadow + def warnPolyImplicitOverload = lint contains PolyImplicitOverload + def warnOptionImplicit = lint contains OptionImplicit + def warnDelayedInit = lint contains DelayedInitSelect + def warnByNameRightAssociative = lint contains ByNameRightAssociative + def warnPackageObjectClasses = lint contains PackageObjectClasses + def warnUnsoundMatch = lint contains UnsoundMatch + + // Lint warnings that are currently -Y, but deprecated in that usage + @deprecated("Use warnAdaptedArgs", since="2.11.2") + def YwarnAdaptedArgs = warnAdaptedArgs + @deprecated("Use warnNullaryUnit", since="2.11.2") + def YwarnNullaryUnit = warnNullaryUnit + @deprecated("Use warnInaccessible", since="2.11.2") + def YwarnInaccessible = warnInaccessible + @deprecated("Use warnNullaryOverride", since="2.11.2") + def YwarnNullaryOverride = warnNullaryOverride + @deprecated("Use warnInferAny", since="2.11.2") + def YwarnInferAny = warnInferAny + + // The Xlint warning group. + val lint = MultiChoiceSetting( + name = "-Xlint", + helpArg = "warning", + descr = "Enable or disable specific warnings", + domain = LintWarnings, + default = Some(List("_"))) + + allLintWarnings foreach { + case w if w.yAliased => + BooleanSetting(s"-Ywarn-${w.name}", {w.help}) withPostSetHook { s => + lint.add(if (s) w.name else s"-${w.name}") + } // withDeprecationMessage s"Enable -Xlint:${c._1}" + case _ => + } + + private lazy val warnSelectNullable = BooleanSetting("-Xcheck-null", "This option is obsolete and does nothing.") // Backward compatibility. @deprecated("Use fatalWarnings", "2.11.0") def Xwarnfatal = fatalWarnings // used by sbt diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 8b739958ff0a..8fd2ea45e464 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -6,13 +6,15 @@ package scala.tools.nsc package symtab +import classfile.ClassfileParser import java.io.IOException import scala.compat.Platform.currentTime -import scala.tools.nsc.util.{ ClassPath } -import classfile.ClassfileParser import scala.reflect.internal.MissingRequirementError import scala.reflect.internal.util.Statistics import scala.reflect.io.{ AbstractFile, NoAbstractFile } +import scala.tools.nsc.classpath.FlatClassPath +import scala.tools.nsc.settings.ClassPathRepresentationType +import scala.tools.nsc.util.{ ClassPath, ClassRepresentation } /** This class ... * @@ -86,8 +88,7 @@ abstract class SymbolLoaders { // require yjp.jar at runtime. See SI-2089. if (settings.termConflict.isDefault) throw new TypeError( - root+" contains object and package with same name: "+ - name+"\none of them needs to be removed from classpath" + s"$root contains object and package with same name: $name\none of them needs to be removed from classpath" ) else if (settings.termConflict.value == "package") { warning( @@ -154,7 +155,7 @@ abstract class SymbolLoaders { /** Initialize toplevel class and module symbols in `owner` from class path representation `classRep` */ - def initializeFromClassPath(owner: Symbol, classRep: ClassPath[AbstractFile]#ClassRep) { + def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation[AbstractFile]) { ((classRep.binary, classRep.source) : @unchecked) match { case (Some(bin), Some(src)) if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) => @@ -169,7 +170,7 @@ abstract class SymbolLoaders { } /** Create a new loader from a binary classfile. - * This is intented as a hook allowing to support loading symbols from + * This is intended as a hook allowing to support loading symbols from * files other than .class files. */ protected def newClassLoader(bin: AbstractFile): SymbolLoader = @@ -240,24 +241,68 @@ abstract class SymbolLoaders { } } + private def phaseBeforeRefchecks: Phase = { + var resPhase = phase + while (resPhase.refChecked) resPhase = resPhase.prev + resPhase + } + /** * Load contents of a package */ class PackageLoader(classpath: ClassPath[AbstractFile]) extends SymbolLoader with FlagAgnosticCompleter { - protected def description = "package loader "+ classpath.name + protected def description = s"package loader ${classpath.name}" protected def doComplete(root: Symbol) { assert(root.isPackageClass, root) - root.setInfo(new PackageClassInfoType(newScope, root)) + // Time travel to a phase before refchecks avoids an initialization issue. `openPackageModule` + // creates a module symbol and invokes invokes `companionModule` while the `infos` field is + // still null. This calls `isModuleNotMethod`, which forces the `info` if run after refchecks. + enteringPhase(phaseBeforeRefchecks) { + root.setInfo(new PackageClassInfoType(newScope, root)) + + if (!root.isRoot) { + for (classRep <- classpath.classes) { + initializeFromClassPath(root, classRep) + } + } + if (!root.isEmptyPackageClass) { + for (pkg <- classpath.packages) { + enterPackage(root, pkg.name, new PackageLoader(pkg)) + } - if (!root.isRoot) { - for (classRep <- classpath.classes if platform.doLoad(classRep)) { - initializeFromClassPath(root, classRep) + openPackageModule(root) } } + } + } + + /** + * Loads contents of a package + */ + class PackageLoaderUsingFlatClassPath(packageName: String, classPath: FlatClassPath) extends SymbolLoader with FlagAgnosticCompleter { + protected def description = { + val shownPackageName = if (packageName == FlatClassPath.RootPackage) "" else packageName + s"package loader $shownPackageName" + } + + protected def doComplete(root: Symbol) { + assert(root.isPackageClass, root) + root.setInfo(new PackageClassInfoType(newScope, root)) + + val classPathEntries = classPath.list(packageName) + + if (!root.isRoot) + for (entry <- classPathEntries.classesAndSources) initializeFromClassPath(root, entry) if (!root.isEmptyPackageClass) { - for (pkg <- classpath.packages) { - enterPackage(root, pkg.name, new PackageLoader(pkg)) + for (pkg <- classPathEntries.packages) { + val fullName = pkg.name + + val name = + if (packageName == FlatClassPath.RootPackage) fullName + else fullName.substring(packageName.length + 1) + val packageLoader = new PackageLoaderUsingFlatClassPath(fullName, classPath) + enterPackage(root, name, packageLoader) } openPackageModule(root) @@ -282,15 +327,26 @@ abstract class SymbolLoaders { * */ private type SymbolLoadersRefined = SymbolLoaders { val symbolTable: classfileParser.symbolTable.type } + val loaders = SymbolLoaders.this.asInstanceOf[SymbolLoadersRefined] - val classPath = platform.classPath + + override def classFileLookup: util.ClassFileLookup[AbstractFile] = settings.YclasspathImpl.value match { + case ClassPathRepresentationType.Recursive => platform.classPath + case ClassPathRepresentationType.Flat => platform.flatClassPath + } } protected def description = "class file "+ classfile.toString protected def doComplete(root: Symbol) { val start = if (Statistics.canEnable) Statistics.startTimer(classReadNanos) else null - classfileParser.parse(classfile, root) + + // Running the classfile parser after refchecks can lead to "illegal class file dependency" + // errors. More concretely, the classfile parser calls "sym.companionModule", which calls + // "isModuleNotMethod" on the companion. After refchecks, this method forces the info, which + // may run the classfile parser. This produces the error. + enteringPhase(phaseBeforeRefchecks)(classfileParser.parse(classfile, root)) + if (root.associatedFile eq NoAbstractFile) { root match { // In fact, the ModuleSymbol forwards its setter to the module class diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index ea600bc586d0..4d08be3c24b5 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -16,8 +16,7 @@ import scala.annotation.switch import scala.reflect.internal.{ JavaAccFlags } import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs} import scala.tools.nsc.io.AbstractFile - -import util.ClassPath +import scala.tools.nsc.util.ClassFileLookup /** This abstract class implements a class file parser. * @@ -43,8 +42,8 @@ abstract class ClassfileParser { */ protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol - /** The compiler classpath. */ - def classPath: ClassPath[AbstractFile] + /** The way of the class file lookup used by the compiler. */ + def classFileLookup: ClassFileLookup[AbstractFile] import definitions._ import scala.reflect.internal.ClassfileConstants._ @@ -352,13 +351,17 @@ abstract class ClassfileParser { } private def loadClassSymbol(name: Name): Symbol = { - val file = classPath findSourceFile ("" +name) getOrElse { + val file = classFileLookup findClassFile name.toString getOrElse { // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects // that are not in their correct place (see bug for details) - if (!settings.isScaladoc) - warning(s"Class $name not found - continuing with a stub.") - return NoSymbol.newClass(name.toTypeName) + + // TODO More consistency with use of stub symbols in `Unpickler` + // - better owner than `NoSymbol` + // - remove eager warning + val msg = s"Class $name not found - continuing with a stub." + if (!settings.isScaladoc) warning(msg) + return NoSymbol.newStubSymbol(name.toTypeName, msg) } val completer = new loaders.ClassfileLoader(file) var owner: Symbol = rootMirror.RootClass @@ -584,7 +587,7 @@ abstract class ClassfileParser { info = MethodType(newParams, clazz.tpe) } - // Note: the info may be overrwritten later with a generic signature + // Note: the info may be overwritten later with a generic signature // parsed from SignatureATTR sym setInfo info propagatePackageBoundary(jflags, sym) @@ -765,7 +768,7 @@ abstract class ClassfileParser { classTParams = tparams val parents = new ListBuffer[Type]() while (index < end) { - parents += sig2type(tparams, skiptvs = false) // here the variance doesnt'matter + parents += sig2type(tparams, skiptvs = false) // here the variance doesn't matter } ClassInfoType(parents.toList, instanceScope, sym) } @@ -1043,8 +1046,8 @@ abstract class ClassfileParser { for (entry <- innerClasses.entries) { // create a new class member for immediate inner classes if (entry.outerName == currentClass) { - val file = classPath.findSourceFile(entry.externalName.toString) getOrElse { - throw new AssertionError(entry.externalName) + val file = classFileLookup.findClassFile(entry.externalName.toString) getOrElse { + throw new AssertionError(s"Class file for ${entry.externalName} not found") } enterClassAndModule(entry, file) } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index 6ca2205881d7..bd1fa4e70746 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -130,7 +130,7 @@ abstract class ICodeReader extends ClassfileParser { log("ICodeReader reading " + cls) val name = cls.javaClassName - classPath.findSourceFile(name) match { + classFileLookup.findClassFile(name) match { case Some(classFile) => parse(classFile, cls) case _ => MissingRequirementError.notFound("Could not find bytecode for " + cls) } @@ -780,32 +780,40 @@ abstract class ICodeReader extends ClassfileParser { bb = otherBlock // Console.println("\t> entering bb: " + bb) } - instr match { - case LJUMP(target) => - otherBlock = blocks(target) - bb.emitOnly(JUMP(otherBlock)) - case LCJUMP(success, failure, cond, kind) => - otherBlock = blocks(success) - val failBlock = blocks(failure) - bb.emitOnly(CJUMP(otherBlock, failBlock, cond, kind)) + if (bb.closed) { + // the basic block is closed, i.e. the previous instruction was a jump, return or throw, + // but the next instruction is not a jump target. this means that the next instruction is + // dead code. we can therefore advance until the next jump target. + debuglog(s"ICode reader skipping dead instruction $instr in classfile $instanceCode") + } else { + instr match { + case LJUMP(target) => + otherBlock = blocks(target) + bb.emitOnly(JUMP(otherBlock)) + + case LCJUMP(success, failure, cond, kind) => + otherBlock = blocks(success) + val failBlock = blocks(failure) + bb.emitOnly(CJUMP(otherBlock, failBlock, cond, kind)) - case LCZJUMP(success, failure, cond, kind) => - otherBlock = blocks(success) - val failBlock = blocks(failure) - bb.emitOnly(CZJUMP(otherBlock, failBlock, cond, kind)) + case LCZJUMP(success, failure, cond, kind) => + otherBlock = blocks(success) + val failBlock = blocks(failure) + bb.emitOnly(CZJUMP(otherBlock, failBlock, cond, kind)) - case LSWITCH(tags, targets) => - bb.emitOnly(SWITCH(tags, targets map blocks)) + case LSWITCH(tags, targets) => + bb.emitOnly(SWITCH(tags, targets map blocks)) - case RETURN(_) => - bb emitOnly instr + case RETURN(_) => + bb emitOnly instr - case THROW(clasz) => - bb emitOnly instr + case THROW(clasz) => + bb emitOnly instr - case _ => - bb emit instr + case _ => + bb emit instr + } } } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 592c5497b552..25e13a131485 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -69,7 +69,7 @@ abstract class Pickler extends SubComponent { // OPT: do this only as a recovery after fatal error. Checking in advance was expensive. if (t.isErroneous) { if (settings.debug) e.printStackTrace() - unit.error(t.pos, "erroneous or inaccessible type") + reporter.error(t.pos, "erroneous or inaccessible type") return } } @@ -186,7 +186,16 @@ abstract class Pickler extends SubComponent { val (locals, globals) = sym.children partition (_.isLocalClass) val children = if (locals.isEmpty) globals - else globals + sym.newClassWithInfo(tpnme.LOCAL_CHILD, List(sym.tpe), EmptyScope, pos = sym.pos) + else { + // The LOCAL_CHILD was introduced in 12a2b3b to fix Aladdin bug 1055. When a sealed + // class/trait has local subclasses, a single class symbol is added + // as pickled child (instead of a reference to the anonymous class; that was done + // initially, but seems not to work, as the bug shows). + // Adding the LOCAL_CHILD is necessary to retain exhaustivity warnings under separate + // compilation. See test neg/aladdin1055. + val parents = (if (sym.isTrait) List(definitions.ObjectTpe) else Nil) ::: List(sym.tpe) + globals + sym.newClassWithInfo(tpnme.LOCAL_CHILD, parents, EmptyScope, pos = sym.pos) + } putChildren(sym, children.toList sortBy (_.sealedSortName)) } diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 2b7c6cca2ca2..f786ffb8f3c7 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -8,6 +8,7 @@ package transform import symtab._ import Flags._ +import scala.tools.nsc.util.ClassPath abstract class AddInterfaces extends InfoTransform { self: Erasure => import global._ // the global environment @@ -67,25 +68,30 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => val implName = tpnme.implClassName(iface.name) val implFlags = (iface.flags & ~(INTERFACE | lateINTERFACE)) | IMPLCLASS - val impl0 = ( + val impl0 = { if (!inClass) NoSymbol - else iface.owner.info.decl(implName) match { - case NoSymbol => NoSymbol - case implSym => - // Unlink a pre-existing symbol only if the implementation class is - // visible on the compilation classpath. In general this is true under - // -optimise and not otherwise, but the classpath can use arbitrary - // logic so the classpath must be queried. - if (classPath.context.isValidName(implName + ".class")) { - iface.owner.info.decls unlink implSym - NoSymbol - } - else { - log(s"not unlinking $iface's existing implClass ${implSym.name} because it is not on the classpath.") - implSym - } + else { + val typeInfo = iface.owner.info + typeInfo.decl(implName) match { + case NoSymbol => NoSymbol + case implSym => + // Unlink a pre-existing symbol only if the implementation class is + // visible on the compilation classpath. In general this is true under + // -optimise and not otherwise, but the classpath can use arbitrary + // logic so the classpath must be queried. + // TODO this is not taken into account by flat classpath yet + classPath match { + case cp: ClassPath[_] if !cp.context.isValidName(implName + ".class") => + log(s"not unlinking $iface's existing implClass ${implSym.name} because it is not on the classpath.") + implSym + case _ => + typeInfo.decls unlink implSym + NoSymbol + } + } } - ) + } + val impl = impl0 orElse { val impl = iface.owner.newImplClass(implName, iface.pos, implFlags) if (iface.thisSym != iface) { @@ -345,6 +351,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => while (owner != sym && owner != impl) owner = owner.owner; if (owner == impl) This(impl) setPos tree.pos else tree + //TODO what about this commented out code? /* !!! case Super(qual, mix) => val mix1 = mix diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index f14fce5de972..c29826551bdd 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -76,7 +76,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { val qual0 = ad.qual val params = ad.args if (settings.logReflectiveCalls) - unit.echo(ad.pos, "method invocation uses reflection") + reporter.echo(ad.pos, "method invocation uses reflection") val typedPos = typedWithPos(ad.pos) _ @@ -360,13 +360,13 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { assert(params.length == mparams.length, ((params, mparams))) (mparams, resType) case tpe @ OverloadedType(pre, alts) => - unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe)) + reporter.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe)) alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match { case mt @ MethodType(mparams, resType) :: Nil => - unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt) + reporter.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt) (mparams, resType) case _ => - unit.error(ad.pos, "Cannot resolve overload.") + reporter.error(ad.pos, "Cannot resolve overload.") (Nil, NoType) } } @@ -520,7 +520,9 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { * And, finally, be advised - Scala's Symbol literal (scala.Symbol) and the Symbol class of the compiler * have little in common. */ - case Apply(fn, (arg @ Literal(Constant(symname: String))) :: Nil) if fn.symbol == Symbol_apply => + case Apply(fn @ Select(qual, _), (arg @ Literal(Constant(symname: String))) :: Nil) + if treeInfo.isQualifierSafeToElide(qual) && fn.symbol == Symbol_apply && !currentClass.isTrait => + def transformApply = { // add the symbol name to a map if it's not there already val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 391bce5abbf6..362cbde04fc8 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -54,7 +54,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { def check(tree: Tree) = { for (t <- tree) t match { case t: RefTree if uninitializedVals(t.symbol.accessedOrSelf) && t.qualifier.symbol == clazz => - unit.warning(t.pos, s"Reference to uninitialized ${t.symbol.accessedOrSelf}") + reporter.warning(t.pos, s"Reference to uninitialized ${t.symbol.accessedOrSelf}") case _ => } } @@ -535,7 +535,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { * whether `sym` denotes a param-accessor (ie a field) that fulfills all of: * (a) has stationary value, ie the same value provided via the corresponding ctor-arg; and * (b) isn't subject to specialization. We might be processing statements for: - * (b.1) the constructur in the generic (super-)class; or + * (b.1) the constructor in the generic (super-)class; or * (b.2) the constructor in the specialized (sub-)class. * (c) isn't part of a DelayedInit subclass. */ @@ -685,7 +685,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { // mangling before we introduce more of it. val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait) if (conflict ne NoSymbol) - unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString)) + reporter.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString)) copyParam(acc, parameter(acc)) } diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 1468680fe0a8..d2c511a2d1fd 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -9,7 +9,7 @@ import scala.reflect.internal.Symbols import scala.collection.mutable.LinkedHashMap /** - * This transformer is responisble for turning lambdas into anonymous classes. + * This transformer is responsible for turning lambdas into anonymous classes. * The main assumption it makes is that a lambda {args => body} has been turned into * {args => liftedBody()} where lifted body is a top level method that implements the body of the lambda. * Currently Uncurry is responsible for that transformation. @@ -17,7 +17,7 @@ import scala.collection.mutable.LinkedHashMap * From a lambda, Delambdafy will create * 1) a static forwarder at the top level of the class that contained the lambda * 2) a new top level class that - a) has fields and a constructor taking the captured environment (including possbily the "this" + a) has fields and a constructor taking the captured environment (including possibly the "this" * reference) * b) an apply method that calls the static forwarder * c) if needed a bridge method for the apply method @@ -30,13 +30,21 @@ import scala.collection.mutable.LinkedHashMap abstract class Delambdafy extends Transform with TypingTransformers with ast.TreeDSL with TypeAdaptingTransformer { import global._ import definitions._ - import CODE._ val analyzer: global.analyzer.type = global.analyzer /** the following two members override abstract members in Transform */ val phaseName: String = "delambdafy" + override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = { + if (settings.Ydelambdafy.value == "method") new Phase(prev) + else new SkipPhase(prev) + } + + class SkipPhase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) { + def apply(unit: global.CompilationUnit): Unit = () + } + protected def newTransformer(unit: CompilationUnit): Transformer = new DelambdafyTransformer(unit) @@ -91,7 +99,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre super.transform(newExpr) // when we encounter a template (basically the thing that holds body of a class/trait) - // we need to updated it to include newly created accesor methods after transforming it + // we need to updated it to include newly created accessor methods after transforming it case Template(_, _, _) => try { // during this call accessorMethods will be populated from the Function case @@ -132,7 +140,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre } val params = ((optionSymbol(thisProxy) map {proxy:Symbol => ValDef(proxy)}) ++ (target.paramss.flatten map ValDef.apply)).toList - val methSym = oldClass.newMethod(unit.freshTermName(nme.accessor.toString()), target.pos, FINAL | BRIDGE | SYNTHETIC | PROTECTED | STATIC) + val methSym = oldClass.newMethod(unit.freshTermName(nme.accessor.toString() + "$"), target.pos, FINAL | BRIDGE | SYNTHETIC | PROTECTED | STATIC) val paramSyms = params map {param => methSym.newSyntheticValueParam(param.symbol.tpe, param.name) } @@ -236,55 +244,59 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // - make `anonClass.isAnonymousClass` true. // - use `newAnonymousClassSymbol` or push the required variations into a similar factory method // - reinstate the assertion in `Erasure.resolveAnonymousBridgeClash` - val suffix = "$lambda$" + ( + val suffix = nme.DELAMBDAFY_LAMBDA_CLASS_NAME + "$" + ( if (funOwner.isPrimaryConstructor) "" - else "$" + funOwner.name + else "$" + funOwner.name + "$" ) - val name = unit.freshTypeName(s"${oldClass.name.decode}$suffix") + val oldClassPart = oldClass.name.decode + // make sure the class name doesn't contain $anon, otherwise isAnonymousClass/Function may be true + val name = unit.freshTypeName(s"$oldClassPart$suffix".replace("$anon", "$nestedInAnon")) - val anonClass = pkg newClassSymbol(name, originalFunction.pos, FINAL | SYNTHETIC) addAnnotation SerialVersionUIDAnnotation - anonClass setInfo ClassInfoType(parents, newScope, anonClass) + val lambdaClass = pkg newClassSymbol(name, originalFunction.pos, FINAL | SYNTHETIC) addAnnotation SerialVersionUIDAnnotation + lambdaClass setInfo ClassInfoType(parents, newScope, lambdaClass) + assert(!lambdaClass.isAnonymousClass && !lambdaClass.isAnonymousFunction, "anonymous class name: "+ lambdaClass.name) + assert(lambdaClass.isDelambdafyFunction, "not lambda class name: " + lambdaClass.name) val captureProxies2 = new LinkedHashMap[Symbol, TermSymbol] captures foreach {capture => - val sym = anonClass.newVariable(capture.name.toTermName, capture.pos, SYNTHETIC) + val sym = lambdaClass.newVariable(unit.freshTermName(capture.name.toString + "$"), capture.pos, SYNTHETIC) sym setInfo capture.info captureProxies2 += ((capture, sym)) } - // the Optional proxy that will hold a reference to the 'this' - // object used by the lambda, if any. NoSymbol if there is no this proxy - val thisProxy = { - val target = targetMethod(originalFunction) - if (thisReferringMethods contains target) { - val sym = anonClass.newVariable(nme.FAKE_LOCAL_THIS, originalFunction.pos, SYNTHETIC) - sym.info = oldClass.tpe - sym - } else NoSymbol - } + // the Optional proxy that will hold a reference to the 'this' + // object used by the lambda, if any. NoSymbol if there is no this proxy + val thisProxy = { + val target = targetMethod(originalFunction) + if (thisReferringMethods contains target) { + val sym = lambdaClass.newVariable(nme.FAKE_LOCAL_THIS, originalFunction.pos, SYNTHETIC) + sym.info = oldClass.tpe + sym + } else NoSymbol + } - val decapturify = new DeCapturifyTransformer(captureProxies2, unit, oldClass, anonClass, originalFunction.symbol.pos, thisProxy) + val decapturify = new DeCapturifyTransformer(captureProxies2, unit, oldClass, lambdaClass, originalFunction.symbol.pos, thisProxy) - val accessorMethod = createAccessorMethod(thisProxy, originalFunction) + val accessorMethod = createAccessorMethod(thisProxy, originalFunction) - val decapturedFunction = decapturify.transform(originalFunction).asInstanceOf[Function] + val decapturedFunction = decapturify.transform(originalFunction).asInstanceOf[Function] - val members = (optionSymbol(thisProxy).toList ++ (captureProxies2 map (_._2))) map {member => - anonClass.info.decls enter member - ValDef(member, gen.mkZero(member.tpe)) setPos decapturedFunction.pos - } + val members = (optionSymbol(thisProxy).toList ++ (captureProxies2 map (_._2))) map {member => + lambdaClass.info.decls enter member + ValDef(member, gen.mkZero(member.tpe)) setPos decapturedFunction.pos + } - // constructor - val constr = createConstructor(anonClass, members) + // constructor + val constr = createConstructor(lambdaClass, members) - // apply method with same arguments and return type as original lambda. - val applyMethodDef = createApplyMethod(anonClass, decapturedFunction, accessorMethod, thisProxy) + // apply method with same arguments and return type as original lambda. + val applyMethodDef = createApplyMethod(lambdaClass, decapturedFunction, accessorMethod, thisProxy) - val bridgeMethod = createBridgeMethod(anonClass, originalFunction, applyMethodDef) + val bridgeMethod = createBridgeMethod(lambdaClass, originalFunction, applyMethodDef) - def fulldef(sym: Symbol) = - if (sym == NoSymbol) sym.toString - else s"$sym: ${sym.tpe} in ${sym.owner}" + def fulldef(sym: Symbol) = + if (sym == NoSymbol) sym.toString + else s"$sym: ${sym.tpe} in ${sym.owner}" bridgeMethod foreach (bm => // TODO SI-6260 maybe just create the apply method with the signature (Object => Object) in all cases @@ -296,7 +308,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre val body = members ++ List(constr, applyMethodDef) ++ bridgeMethod // TODO if member fields are private this complains that they're not accessible - (localTyper.typedPos(decapturedFunction.pos)(ClassDef(anonClass, body)).asInstanceOf[ClassDef], thisProxy, accessorMethod) + (localTyper.typedPos(decapturedFunction.pos)(ClassDef(lambdaClass, body)).asInstanceOf[ClassDef], thisProxy, accessorMethod) } val (anonymousClassDef, thisProxy, accessorMethod) = makeAnonymousClass @@ -422,7 +434,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre } /** - * Get the symbol of the target lifted lambad body method from a function. I.e. if + * Get the symbol of the target lifted lambda body method from a function. I.e. if * the function is {args => anonfun(args)} then this method returns anonfun's symbol */ private def targetMethod(fun: Function): Symbol = fun match { diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index bd2f6f00186c..5c72bb32588b 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -98,7 +98,7 @@ abstract class Erasure extends AddInterfaces val len = sig.length val copy: Array[Char] = sig.toCharArray var changed = false - while (i < sig.length) { + while (i < len) { val ch = copy(i) if (ch == '.' && last != '>') { copy(i) = '$' @@ -185,6 +185,25 @@ abstract class Erasure extends AddInterfaces private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType] + /* Drop redundant types (ones which are implemented by some other parent) from the immediate parents. + * This is important on Android because there is otherwise an interface explosion. + */ + def minimizeParents(parents: List[Type]): List[Type] = { + var rest = parents + var leaves = collection.mutable.ListBuffer.empty[Type] + while(rest.nonEmpty) { + val candidate = rest.head + val nonLeaf = leaves exists { t => t.typeSymbol isSubClass candidate.typeSymbol } + if(!nonLeaf) { + leaves = leaves filterNot { t => candidate.typeSymbol isSubClass t.typeSymbol } + leaves += candidate + } + rest = rest.tail + } + leaves.toList + } + + /** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return * type for constructors. */ @@ -192,16 +211,24 @@ abstract class Erasure extends AddInterfaces val isTraitSignature = sym0.enclClass.isTrait def superSig(parents: List[Type]) = { - val ps = ( - if (isTraitSignature) { + def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait + + // a signature should always start with a class + def ensureClassAsFirstParent(tps: List[Type]) = tps match { + case Nil => ObjectTpe :: Nil + case head :: tail if isInterfaceOrTrait(head.typeSymbol) => ObjectTpe :: tps + case _ => tps + } + + val minParents = minimizeParents(parents) + val validParents = + if (isTraitSignature) // java is unthrilled about seeing interfaces inherit from classes - val ok = parents filter (p => p.typeSymbol.isTrait || p.typeSymbol.isInterface) - // traits should always list Object. - if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectTpe :: ok - else ok - } - else parents - ) + minParents filter (p => isInterfaceOrTrait(p.typeSymbol)) + else minParents + + val ps = ensureClassAsFirstParent(validParents) + (ps map boxedSig).mkString } def boxedSig(tp: Type) = jsig(tp, primitiveOK = false) @@ -403,14 +430,13 @@ abstract class Erasure extends AddInterfaces * a name clash. The present method guards against these name clashes. * * @param member The original member - * @param other The overidden symbol for which the bridge was generated + * @param other The overridden symbol for which the bridge was generated * @param bridge The bridge */ def checkBridgeOverrides(member: Symbol, other: Symbol, bridge: Symbol): Seq[(Position, String)] = { def fulldef(sym: Symbol) = if (sym == NoSymbol) sym.toString else s"$sym: ${sym.tpe} in ${sym.owner}" - var noclash = true val clashErrors = mutable.Buffer[(Position, String)]() def clashError(what: String) = { val pos = if (member.owner == root) member.pos else root.pos @@ -468,8 +494,12 @@ abstract class Erasure extends AddInterfaces if (!bridgeNeeded) return - val newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED) - val bridge = other.cloneSymbolImpl(root, newFlags) setPos root.pos + var newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED) + // If `member` is a ModuleSymbol, the bridge should not also be a ModuleSymbol. Otherwise we + // end up with two module symbols with the same name in the same scope, which is surprising + // when implementing later phases. + if (member.isModule) newFlags = (newFlags | METHOD) & ~(MODULE | lateMETHOD | STABLE) + val bridge = other.cloneSymbolImpl(root, newFlags) setPos root.pos debuglog("generating bridge from %s (%s): %s to %s: %s".format( other, flagsToString(newFlags), @@ -488,7 +518,7 @@ abstract class Erasure extends AddInterfaces || (checkBridgeOverrides(member, other, bridge) match { case Nil => true case es if member.owner.isAnonymousClass => resolveAnonymousBridgeClash(member, bridge); true - case es => for ((pos, msg) <- es) unit.error(pos, msg); false + case es => for ((pos, msg) <- es) reporter.error(pos, msg); false }) ) @@ -600,7 +630,7 @@ abstract class Erasure extends AddInterfaces if (tree.symbol == NoSymbol) { tree } else if (name == nme.CONSTRUCTOR) { - if (tree.symbol.owner == AnyValClass) tree.symbol = ObjectClass.info.decl(nme.CONSTRUCTOR) + if (tree.symbol.owner == AnyValClass) tree.symbol = ObjectClass.primaryConstructor tree } else if (tree.symbol == Any_asInstanceOf) adaptMember(atPos(tree.pos)(Select(qual, Object_asInstanceOf))) @@ -724,7 +754,7 @@ abstract class Erasure extends AddInterfaces ) val when = if (exitingRefchecks(lowType matches highType)) "" else " after erasure: " + exitingPostErasure(highType) - unit.error(pos, + reporter.error(pos, s"""|$what: |${exitingRefchecks(highString)} and |${exitingRefchecks(lowString)} @@ -865,7 +895,7 @@ abstract class Erasure extends AddInterfaces fn match { case TypeApply(sel @ Select(qual, name), List(targ)) => if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefTpe) - unit.error(sel.pos, "isInstanceOf cannot test if value types are references.") + reporter.error(sel.pos, "isInstanceOf cannot test if value types are references.") def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree = Apply( @@ -952,7 +982,7 @@ abstract class Erasure extends AddInterfaces case nme.length => nme.array_length case nme.update => nme.array_update case nme.clone_ => nme.array_clone - case _ => unit.error(tree.pos, "Unexpected array member, no translation exists.") ; nme.NO_NAME + case _ => reporter.error(tree.pos, "Unexpected array member, no translation exists.") ; nme.NO_NAME } gen.mkRuntimeCall(arrayMethodName, qual :: args) } @@ -1050,20 +1080,18 @@ abstract class Erasure extends AddInterfaces } } - def isAccessible(sym: Symbol) = localTyper.context.isAccessible(sym, sym.owner.thisType) - if (!isAccessible(owner) && qual.tpe != null) { + def isJvmAccessible(sym: Symbol) = (sym.isClass && !sym.isJavaDefined) || localTyper.context.isAccessible(sym, sym.owner.thisType) + if (!isJvmAccessible(owner) && qual.tpe != null) { qual match { case Super(_, _) => - // Insert a cast here at your peril -- see SI-5162. Bail out if the target method is defined in - // Java, otherwise, we'd get an IllegalAccessError at runtime. If the target method is defined in - // Scala, however, we should have access. - if (owner.isJavaDefined) unit.error(tree.pos, s"Unable to access ${tree.symbol.fullLocationString} with a super reference.") + // Insert a cast here at your peril -- see SI-5162. + reporter.error(tree.pos, s"Unable to access ${tree.symbol.fullLocationString} with a super reference.") tree case _ => // Todo: Figure out how qual.tpe could be null in the check above (it does appear in build where SwingWorker.this // has a null type). val qualSym = qual.tpe.widen.typeSymbol - if (isAccessible(qualSym) && !qualSym.isPackageClass && !qualSym.isPackageObjectClass) { + if (isJvmAccessible(qualSym) && !qualSym.isPackageClass && !qualSym.isPackageObjectClass) { // insert cast to prevent illegal access error (see #4283) // util.trace("insert erasure cast ") (*/ treeCopy.Select(tree, gen.mkAttributedCast(qual, qual.tpe.widen), name) //) @@ -1125,7 +1153,7 @@ abstract class Erasure extends AddInterfaces } } - /** The main transform function: Pretransfom the tree, and then + /** The main transform function: Pretransform the tree, and then * re-type it at phase erasure.next. */ override def transform(tree: Tree): Tree = { @@ -1135,7 +1163,7 @@ abstract class Erasure extends AddInterfaces val tree2 = mixinTransformer.transform(tree1) // debuglog("tree after addinterfaces: \n" + tree2) - newTyper(rootContext(unit, tree, erasedTypes = true)).typed(tree2) + newTyper(rootContextPostTyper(unit, tree)).typed(tree2) } } } diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 0447e23e9eed..f3cab8184c89 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -441,13 +441,15 @@ abstract class ExplicitOuter extends InfoTransform else atPos(tree.pos)(outerPath(outerValue, currentClass.outerClass, sym)) // (5) case Select(qual, name) => - // make not private symbol acessed from inner classes, as well as + // make not private symbol accessed from inner classes, as well as // symbols accessed from @inline methods // // See SI-6552 for an example of why `sym.owner.enclMethod hasAnnotation ScalaInlineClass` // is not suitable; if we make a method-local class non-private, it mangles outer pointer names. - if (currentClass != sym.owner || - (closestEnclMethod(currentOwner) hasAnnotation ScalaInlineClass)) + def enclMethodIsInline = closestEnclMethod(currentOwner) hasAnnotation ScalaInlineClass + // SI-8710 The extension method condition reflects our knowledge that a call to `new Meter(12).privateMethod` + // with later be rewritten (in erasure) to `Meter.privateMethod$extension(12)`. + if ((currentClass != sym.owner || enclMethodIsInline) && !sym.isMethodWithExtension) sym.makeNotPrivate(sym.owner) val qsym = qual.tpe.widen.typeSymbol @@ -481,7 +483,7 @@ abstract class ExplicitOuter extends InfoTransform // since we can't fix SI-4440 properly (we must drop the outer accessors of final classes when there's no immediate reference to them in sight) // at least don't crash... this duplicates maybeOmittable from constructors (acc.owner.isEffectivelyFinal && !acc.isOverridingSymbol)) { - unit.uncheckedWarning(tree.pos, "The outer reference in this type test cannot be checked at run time.") + currentRun.reporting.uncheckedWarning(tree.pos, "The outer reference in this type test cannot be checked at run time.") transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors) } else { // println("(base, acc)= "+(base, acc)) diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index 2235a93ca442..6349fc3fb948 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -127,7 +127,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit = if (seen contains clazz) - unit.error(pos, "value class may not unbox to itself") + reporter.error(pos, "value class may not unbox to itself") else { val unboxed = definitions.underlyingOfValueClass(clazz).typeSymbol if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed) @@ -208,7 +208,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { def makeExtensionMethodSymbol = { val extensionName = extensionNames(origMeth).head.toTermName val extensionMeth = ( - companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL) + companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED & ~PRIVATE & ~LOCAL | FINAL) setAnnotations origMeth.annotations ) origMeth.removeAnnotation(TailrecClass) // it's on the extension method, now. diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index c3fbfae322ff..6149e40fa7f7 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -76,8 +76,20 @@ abstract class Flatten extends InfoTransform { for (sym <- decls) { if (sym.isTerm && !sym.isStaticModule) { decls1 enter sym - if (sym.isModule) + if (sym.isModule) { + // In theory, we could assert(sym.isMethod), because nested, non-static modules are + // transformed to methods (lateMETHOD flag added in RefChecks). But this requires + // forcing sym.info (see comment on isModuleNotMethod), which forces stub symbols + // too eagerly (SI-8907). + + // Note that module classes are not entered into the 'decls' of the ClassInfoType + // of the outer class, only the module symbols are. So the current loop does + // not visit module classes. Therefore we set the LIFTED flag here for module + // classes. + // TODO: should we also set the LIFTED flag for static, nested module classes? + // currently they don't get the flag, even though they are lifted to the package sym.moduleClass setFlag LIFTED + } } else if (sym.isClass) liftSymbol(sym) } diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index e38c034f4dc2..fa0c1f797bc3 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -339,7 +339,7 @@ abstract class LambdaLift extends InfoTransform { if (clazz.isStaticOwner) clazz.fullLocationString else s"the unconstructed `this` of ${clazz.fullLocationString}" val msg = s"Implementation restriction: access of ${sym.fullLocationString} from ${currentClass.fullLocationString}, would require illegal premature access to $what" - currentUnit.error(curTree.pos, msg) + reporter.error(curTree.pos, msg) } val qual = if (clazz == currentClass) gen.mkAttributedThis(clazz) @@ -402,7 +402,7 @@ abstract class LambdaLift extends InfoTransform { } /* SI-6231: Something like this will be necessary to eliminate the implementation - * restiction from paramGetter above: + * restriction from paramGetter above: * We need to pass getters to the interface of an implementation class. private def fixTraitGetters(lifted: List[Tree]): List[Tree] = for (stat <- lifted) yield stat match { @@ -449,6 +449,8 @@ abstract class LambdaLift extends InfoTransform { if (sym.isClass) sym.owner = sym.owner.toInterface if (sym.isMethod) sym setFlag LIFTED liftedDefs(sym.owner) ::= tree + // TODO: this modifies the ClassInfotype of the enclosing class, which is associated with another phase (explicitouter). + // This breaks type history: in a phase travel to before lambda lift, the ClassInfoType will contain lifted classes. sym.owner.info.decls enterUnique sym debuglog("lifted: " + sym + " from " + oldOwner + " to " + sym.owner) EmptyTree diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala index b71d14a04f5a..38671ebaaee4 100644 --- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala +++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala @@ -192,13 +192,15 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree], retVal: Tree): Tree = { + // Q: is there a reason to first set owner to `clazz` (by using clazz.newMethod), and then + // changing it to lzyVal.owner very soon after? Could we just do lzyVal.owner.newMethod? val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, STABLE | PRIVATE) defSym setInfo MethodType(List(), lzyVal.tpe.resultType) defSym.owner = lzyVal.owner debuglog(s"crete slow compute path $defSym with owner ${defSym.owner} for lazy val $lzyVal") if (bitmaps.contains(lzyVal)) bitmaps(lzyVal).map(_.owner = defSym) - val rhs: Tree = (gen.mkSynchronizedCheck(clazz, cond, syncBody, stats)).changeOwner(currentOwner -> defSym) + val rhs: Tree = gen.mkSynchronizedCheck(clazz, cond, syncBody, stats).changeOwner(currentOwner -> defSym) DefDef(defSym, addBitmapDefs(lzyVal, BLOCK(rhs, retVal))) } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 673bc04bd947..792787558378 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -336,7 +336,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { rebindSuper(clazz, mixinMember.alias, mixinClass) match { case NoSymbol => - unit.error(clazz.pos, "Member %s of mixin %s is missing a concrete super implementation.".format( + reporter.error(clazz.pos, "Member %s of mixin %s is missing a concrete super implementation.".format( mixinMember.alias, mixinClass)) case alias1 => superAccessor.asInstanceOf[TermSymbol] setAlias alias1 @@ -391,7 +391,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { else { sourceModule setPos sym.pos if (sourceModule.flags != MODULE) { - log("!!! Directly setting sourceModule flags from %s to MODULE".format(sourceModule.flagString)) + log(s"!!! Directly setting sourceModule flags for $sourceModule from ${sourceModule.flagString} to MODULE") sourceModule.flags = MODULE } } @@ -1004,24 +1004,56 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { buildBitmapOffsets() var stats1 = addCheckedGetters(clazz, stats) - def accessedReference(sym: Symbol) = sym.tpe match { - case MethodType(Nil, ConstantType(c)) => Literal(c) - case _ => - // if it is a mixed-in lazy value, complete the accessor - if (sym.isLazy && sym.isGetter) { - val isUnit = sym.tpe.resultType.typeSymbol == UnitClass - val initCall = Apply(staticRef(initializer(sym)), gen.mkAttributedThis(clazz) :: Nil) - val selection = Select(This(clazz), sym.accessed) - val init = if (isUnit) initCall else atPos(sym.pos)(Assign(selection, initCall)) - val returns = if (isUnit) UNIT else selection - - mkLazyDef(clazz, sym, List(init), returns, fieldOffset(sym)) - } - else sym.getter(sym.owner).tpe.resultType.typeSymbol match { - case UnitClass => UNIT - case _ => Select(This(clazz), sym.accessed) - } + def getterBody(getter: Symbol) = { + assert(getter.isGetter) + val readValue = getter.tpe match { + // A field "final val f = const" in a trait generates a getter with a ConstantType. + case MethodType(Nil, ConstantType(c)) => + Literal(c) + case _ => + // if it is a mixed-in lazy value, complete the accessor + if (getter.isLazy) { + val isUnit = isUnitGetter(getter) + val initCall = Apply(staticRef(initializer(getter)), gen.mkAttributedThis(clazz) :: Nil) + val selection = fieldAccess(getter) + val init = if (isUnit) initCall else atPos(getter.pos)(Assign(selection, initCall)) + val returns = if (isUnit) UNIT else selection + mkLazyDef(clazz, getter, List(init), returns, fieldOffset(getter)) + } + // For a field of type Unit in a trait, no actual field is generated when being mixed in. + else if (isUnitGetter(getter)) UNIT + else fieldAccess(getter) + } + if (!needsInitFlag(getter)) readValue + else mkCheckedAccessor(clazz, readValue, fieldOffset(getter), getter.pos, getter) + } + + def setterBody(setter: Symbol) = { + val getter = setter.getterIn(clazz) + + // A trait with a field of type Unit creates a trait setter (invoked by the + // implementation class constructor), like for any other trait field. + // However, no actual field is created in the class that mixes in the trait. + // Therefore the setter does nothing (except setting the -Xcheckinit flag). + + val setInitFlag = + if (!needsInitFlag(getter)) Nil + else List(mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter))) + + val fieldInitializer = + if (isUnitGetter(getter)) Nil + else List(Assign(fieldAccess(setter), Ident(setter.firstParam))) + + (fieldInitializer ::: setInitFlag) match { + case Nil => UNIT + // If there's only one statement, the Block factory does not actually create a Block. + case stats => Block(stats: _*) + } } + + def isUnitGetter(getter: Symbol) = getter.tpe.resultType.typeSymbol == UnitClass + def fieldAccess(accessor: Symbol) = Select(This(clazz), accessor.accessed) + def isOverriddenSetter(sym: Symbol) = nme.isTraitSetterName(sym.name) && { val other = sym.nextOverriddenSymbol @@ -1036,27 +1068,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } // if class is not a trait add accessor definitions else if (!clazz.isTrait) { - // This needs to be a def to avoid sharing trees - def accessedRef = accessedReference(sym) if (isConcreteAccessor(sym)) { // add accessor definitions addDefDef(sym, { if (sym.isSetter) { + // If this is a setter of a mixed-in field which is overridden by another mixin, + // the trait setter of the overridden one does not need to do anything - the + // trait setter of the overriding field will initialize the field. if (isOverriddenSetter(sym)) UNIT - else accessedRef match { - case ref @ Literal(_) => ref - case ref => - val init = Assign(ref, Ident(sym.firstParam)) - val getter = sym.getter(clazz) - - if (!needsInitFlag(getter)) init - else Block(init, mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)), UNIT) - } + else setterBody(sym) } - else if (needsInitFlag(sym)) - mkCheckedAccessor(clazz, accessedRef, fieldOffset(sym), sym.pos, sym) - else - accessedRef + else getterBody(sym) }) } else if (sym.isModule && !(sym hasFlag LIFTED | BRIDGE)) { diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index bbd11efa7e03..e4082eb3769f 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package transform -import symtab.Flags._ import scala.reflect.internal.SymbolPairs /** A class that yields a kind of iterator (`Cursor`), @@ -36,7 +35,7 @@ abstract class OverridingPairs extends SymbolPairs { */ override protected def matches(lo: Symbol, high: Symbol) = lo.isType || ( (lo.owner != high.owner) // don't try to form pairs from overloaded members - && !high.isPrivate // private or private[this] members never are overriden + && !high.isPrivate // private or private[this] members never are overridden && !exclude(lo) // this admits private, as one can't have a private member that matches a less-private member. && relatively.matches(lo, high) ) // TODO we don't call exclude(high), should we? diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 02e55241b314..1691b01e3efd 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -538,6 +538,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { bytecodeClazz.info val sClass = clazz.owner.newClass(clazzName, clazz.pos, (clazz.flags | SPECIALIZED) & ~CASE) + sClass.setAnnotations(clazz.annotations) // SI-8574 important that the subclass picks up @SerialVersionUID, @strictfp, etc. def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long, newName: Name = null) = member.cloneSymbol(sClass, flagFn(member.flags | SPECIALIZED), newName) @@ -860,11 +861,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { debuglog("%s expands to %s in %s".format(sym, specMember.name.decode, pp(env))) info(specMember) = NormalizedMember(sym) newOverload(sym, specMember, env) - // if this is a class, we insert the normalized member in scope, - // if this is a method, there's no attached scope for it (EmptyScope) - val decls = owner.info.decls - if (decls != EmptyScope) - decls.enter(specMember) specMember } } @@ -898,7 +894,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec)) - owner.info.decls.enter(specMember) typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s } @@ -1295,7 +1290,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * // even in the specialized variant, the local X class * // doesn't extend Parent$mcI$sp, since its symbol has * // been created after specialization and was not seen - * // by specialzation's info transformer. + * // by specialization's info transformer. * ... * } * } @@ -1503,20 +1498,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val residualTargs = symbol.info.typeParams zip baseTargs collect { case (tvar, targ) if !env.contains(tvar) || !isPrimitiveValueClass(env(tvar).typeSymbol) => targ } - // See SI-5583. Don't know why it happens now if it didn't before. - if (specMember.info.typeParams.isEmpty && residualTargs.nonEmpty) { - devWarning("Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs))) - baseTree - } - else { - ifDebug(assert(residualTargs.length == specMember.info.typeParams.length, - "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env)) - ) + ifDebug(assert(residualTargs.length == specMember.info.typeParams.length, + "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env)) + ) - val tree1 = gen.mkTypeApply(specTree, residualTargs) - debuglog("rewrote " + tree + " to " + tree1) - localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method - } + val tree1 = gen.mkTypeApply(specTree, residualTargs) + debuglog("rewrote " + tree + " to " + tree1) + localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method } curTree = tree diff --git a/src/compiler/scala/tools/nsc/transform/Statics.scala b/src/compiler/scala/tools/nsc/transform/Statics.scala index e2508b8d08df..4673be6de73a 100644 --- a/src/compiler/scala/tools/nsc/transform/Statics.scala +++ b/src/compiler/scala/tools/nsc/transform/Statics.scala @@ -1,9 +1,6 @@ package scala.tools.nsc package transform -import symtab._ -import Flags._ - import collection.mutable.Buffer abstract class Statics extends Transform with ast.TreeDSL { diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index 714f189ead05..16ea3ea90f14 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -96,7 +96,7 @@ abstract class TailCalls extends Transform { val failReason = failReasons(ctx) val failPos = failPositions(ctx) - unit.error(failPos, s"could not optimize @tailrec annotated $method: $failReason") + reporter.error(failPos, s"could not optimize @tailrec annotated $method: $failReason") } /** Has the label been accessed? Then its symbol is in this set. */ @@ -129,6 +129,13 @@ abstract class TailCalls extends Transform { } override def toString = s"${method.name} tparams=$tparams tailPos=$tailPos label=$label label info=${label.info}" + final def noTailContext() = clonedTailContext(false) + final def yesTailContext() = clonedTailContext(true) + protected def clonedTailContext(tailPos: Boolean): TailContext = this match { + case _ if this.tailPos == tailPos => this + case clone: ClonedTailContext => clone.that.clonedTailContext(tailPos) + case _ => new ClonedTailContext(this, tailPos) + } } object EmptyTailContext extends TailContext { @@ -174,7 +181,7 @@ abstract class TailCalls extends Transform { } def containsRecursiveCall(t: Tree) = t exists isRecursiveCall } - class ClonedTailContext(that: TailContext, override val tailPos: Boolean) extends TailContext { + class ClonedTailContext(val that: TailContext, override val tailPos: Boolean) extends TailContext { def method = that.method def tparams = that.tparams def methodPos = that.methodPos @@ -183,9 +190,6 @@ abstract class TailCalls extends Transform { } private var ctx: TailContext = EmptyTailContext - private def noTailContext() = new ClonedTailContext(ctx, tailPos = false) - private def yesTailContext() = new ClonedTailContext(ctx, tailPos = true) - override def transformUnit(unit: CompilationUnit): Unit = { try { @@ -206,16 +210,16 @@ abstract class TailCalls extends Transform { finally this.ctx = saved } - def yesTailTransform(tree: Tree): Tree = transform(tree, yesTailContext()) - def noTailTransform(tree: Tree): Tree = transform(tree, noTailContext()) + def yesTailTransform(tree: Tree): Tree = transform(tree, ctx.yesTailContext()) + def noTailTransform(tree: Tree): Tree = transform(tree, ctx.noTailContext()) def noTailTransforms(trees: List[Tree]) = { - val nctx = noTailContext() - trees map (t => transform(t, nctx)) + val nctx = ctx.noTailContext() + trees mapConserve (t => transform(t, nctx)) } override def transform(tree: Tree): Tree = { /* A possibly polymorphic apply to be considered for tail call transformation. */ - def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree]) = { + def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree], mustTransformArgs: Boolean = true) = { val receiver: Tree = fun match { case Select(qual, _) => qual case _ => EmptyTree @@ -223,7 +227,7 @@ abstract class TailCalls extends Transform { def receiverIsSame = ctx.enclosingType.widen =:= receiver.tpe.widen def receiverIsSuper = ctx.enclosingType.widen <:< receiver.tpe.widen def isRecursiveCall = (ctx.method eq fun.symbol) && ctx.tailPos - def transformArgs = noTailTransforms(args) + def transformArgs = if (mustTransformArgs) noTailTransforms(args) else args def matchesTypeArgs = ctx.tparams sameElements (targs map (_.tpe.typeSymbol)) /* Records failure reason in Context for reporting. @@ -265,17 +269,21 @@ abstract class TailCalls extends Transform { !(sym.hasAccessorFlag || sym.isConstructor) } + // intentionally shadowing imports from definitions for performance + val runDefinitions = currentRun.runDefinitions + import runDefinitions.{Boolean_or, Boolean_and} + tree match { case ValDef(_, _, _, _) => if (tree.symbol.isLazy && tree.symbol.hasAnnotation(TailrecClass)) - unit.error(tree.pos, "lazy vals are not tailcall transformed") + reporter.error(tree.pos, "lazy vals are not tailcall transformed") super.transform(tree) case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if isEligible(dd) => val newCtx = new DefDefTailContext(dd) if (newCtx.isMandatory && !(newCtx containsRecursiveCall rhs0)) - unit.error(tree.pos, "@tailrec annotated method contains no recursive calls") + reporter.error(tree.pos, "@tailrec annotated method contains no recursive calls") debuglog(s"Considering $name for tailcalls, with labels in tailpos: ${newCtx.tailLabels}") val newRHS = transform(rhs0, newCtx) @@ -312,8 +320,13 @@ abstract class TailCalls extends Transform { // the assumption is once we encounter a case, the remainder of the block will consist of cases // the prologue may be empty, usually it is the valdef that stores the scrut val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) + val transformedPrologue = noTailTransforms(prologue) + val transformedCases = transformTrees(cases) + val transformedStats = + if ((prologue eq transformedPrologue) && (cases eq transformedCases)) stats // allow reuse of `tree` if the subtransform was an identity + else transformedPrologue ++ transformedCases treeCopy.Block(tree, - noTailTransforms(prologue) ++ transformTrees(cases), + transformedStats, transform(expr) ) @@ -328,11 +341,14 @@ abstract class TailCalls extends Transform { ) case CaseDef(pat, guard, body) => + // CaseDefs are already translated and guards were moved into the body. + // If this was not the case, guards would have to be transformed here as well. + assert(guard.isEmpty) deriveCaseDef(tree)(transform) case If(cond, thenp, elsep) => treeCopy.If(tree, - cond, + noTailTransform(cond), transform(thenp), transform(elsep) ) @@ -363,7 +379,7 @@ abstract class TailCalls extends Transform { rewriteApply(tapply, fun, targs, vargs) case Apply(fun, args) if fun.symbol == Boolean_or || fun.symbol == Boolean_and => - treeCopy.Apply(tree, fun, transformTrees(args)) + treeCopy.Apply(tree, noTailTransform(fun), transformTrees(args)) // this is to detect tailcalls in translated matches // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x} @@ -377,7 +393,7 @@ abstract class TailCalls extends Transform { if (res ne arg) treeCopy.Apply(tree, fun, res :: Nil) else - rewriteApply(fun, fun, Nil, args) + rewriteApply(fun, fun, Nil, args, mustTransformArgs = false) case Apply(fun, args) => rewriteApply(fun, fun, Nil, args) @@ -418,6 +434,10 @@ abstract class TailCalls extends Transform { def traverseNoTail(tree: Tree) = traverse(tree, maybeTailNew = false) def traverseTreesNoTail(trees: List[Tree]) = trees foreach traverseNoTail + // intentionally shadowing imports from definitions for performance + private val runDefinitions = currentRun.runDefinitions + import runDefinitions.{Boolean_or, Boolean_and} + override def traverse(tree: Tree) = tree match { // we're looking for label(x){x} in tail position, since that means `a` is in tail position in a call `label(a)` case LabelDef(_, List(arg), body@Ident(_)) if arg.symbol == body.symbol => diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala index f83b6f857e9f..3b233063866c 100644 --- a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala +++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala @@ -1,7 +1,6 @@ package scala.tools.nsc package transform -import scala.reflect.internal._ import scala.tools.nsc.ast.TreeDSL import scala.tools.nsc.Global diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala index 3feadcd9b2e6..dc3313e2e43d 100644 --- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala +++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala @@ -17,9 +17,9 @@ trait TypingTransformers { abstract class TypingTransformer(unit: CompilationUnit) extends Transformer { var localTyper: analyzer.Typer = if (phase.erasedTypes) - erasure.newTyper(erasure.rootContext(unit, EmptyTree, erasedTypes = true)).asInstanceOf[analyzer.Typer] - else - analyzer.newTyper(analyzer.rootContext(unit, EmptyTree, true)) + erasure.newTyper(erasure.rootContextPostTyper(unit, EmptyTree)).asInstanceOf[analyzer.Typer] + else // TODO: AM: should some phases use a regular rootContext instead of a post-typer one?? + analyzer.newTyper(analyzer.rootContextPostTyper(unit, EmptyTree)) protected var curTree: Tree = _ override final def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans) diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 8a7d30235f15..3544dc9966d9 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -69,7 +69,6 @@ abstract class UnCurry extends InfoTransform private val byNameArgs = mutable.HashSet[Tree]() private val noApply = mutable.HashSet[Tree]() private val newMembers = mutable.Map[Symbol, mutable.Buffer[Tree]]() - private val repeatedParams = mutable.Map[Symbol, List[ValDef]]() /** Add a new synthetic member for `currentOwner` */ private def addNewMember(t: Tree): Unit = @@ -93,7 +92,7 @@ abstract class UnCurry extends InfoTransform override def transform(tree: Tree): Tree = ( try postTransform(mainTransform(tree)) catch { case ex: TypeError => - unit.error(ex.pos, ex.msg) + reporter.error(ex.pos, ex.msg) debugStack(ex) EmptyTree } @@ -174,7 +173,7 @@ abstract class UnCurry extends InfoTransform cdef <- catches if catchesThrowable(cdef) && !isSyntheticCase(cdef) } { - unit.warning(body.pos, "catch block may intercept non-local return from " + meth) + reporter.warning(body.pos, "catch block may intercept non-local return from " + meth) } Block(List(keyDef), tryCatch) @@ -221,7 +220,9 @@ abstract class UnCurry extends InfoTransform def mkMethod(owner: Symbol, name: TermName, additionalFlags: FlagSet = NoFlags): DefDef = gen.mkMethodFromFunction(localTyper)(fun, owner, name, additionalFlags) - if (inlineFunctionExpansion) { + val canUseDelamdafyMethod = (inConstructorFlag == 0) // Avoiding synthesizing code prone to SI-6666, SI-8363 by using old-style lambda translation + + if (inlineFunctionExpansion || !canUseDelamdafyMethod) { val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe)) val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation SerialVersionUIDAnnotation anonClass setInfo ClassInfoType(parents, newScope, anonClass) @@ -426,7 +427,7 @@ abstract class UnCurry extends InfoTransform treeCopy.ValDef(p, p.mods, p.name, p.tpt, EmptyTree) }) - if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd) + if (dd.symbol hasAnnotation VarargsClass) validateVarargs(dd) withNeedLift(needLift = false) { if (dd.symbol.isClassConstructor) { @@ -458,7 +459,7 @@ abstract class UnCurry extends InfoTransform case UnApply(fn, args) => val fn1 = transform(fn) val args1 = fn.symbol.name match { - case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, patmat.alignPatterns(tree).expectedTypes) + case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, patmat.alignPatterns(global.typer.context, tree).expectedTypes) case _ => args } treeCopy.UnApply(tree, fn1, args1) @@ -697,19 +698,12 @@ abstract class UnCurry extends InfoTransform } } - - /* Analyzes repeated params if method is annotated as `varargs`. - * If the repeated params exist, it saves them into the `repeatedParams` map, - * which is used later. - */ - private def saveRepeatedParams(dd: DefDef): Unit = + private def validateVarargs(dd: DefDef): Unit = if (dd.symbol.isConstructor) - unit.error(dd.symbol.pos, "A constructor cannot be annotated with a `varargs` annotation.") - else treeInfo.repeatedParams(dd) match { - case Nil => - unit.error(dd.symbol.pos, "A method without repeated parameters cannot be annotated with the `varargs` annotation.") - case reps => - repeatedParams(dd.symbol) = reps + reporter.error(dd.symbol.pos, "A constructor cannot be annotated with a `varargs` annotation.") + else { + val hasRepeated = mexists(dd.symbol.paramss)(sym => definitions.isRepeatedParamType(sym.tpe)) + if (!hasRepeated) reporter.error(dd.symbol.pos, "A method without repeated parameters cannot be annotated with the `varargs` annotation.") } /* Called during post transform, after the method argument lists have been flattened. @@ -717,7 +711,7 @@ abstract class UnCurry extends InfoTransform * varargs forwarder. */ private def addJavaVarargsForwarders(dd: DefDef, flatdd: DefDef): DefDef = { - if (!dd.symbol.hasAnnotation(VarargsClass) || !repeatedParams.contains(dd.symbol)) + if (!dd.symbol.hasAnnotation(VarargsClass) || !enteringUncurry(mexists(dd.symbol.paramss)(sym => definitions.isRepeatedParamType(sym.tpe)))) return flatdd def toArrayType(tp: Type): Type = { @@ -733,19 +727,18 @@ abstract class UnCurry extends InfoTransform ) } - val reps = repeatedParams(dd.symbol) - val rpsymbols = reps.map(_.symbol).toSet val theTyper = typer.atOwner(dd, currentClass) - val flatparams = flatdd.vparamss.head + val flatparams = flatdd.symbol.paramss.head + val isRepeated = enteringUncurry(dd.symbol.info.paramss.flatten.map(sym => definitions.isRepeatedParamType(sym.tpe))) // create the type - val forwformals = flatparams map { - case p if rpsymbols(p.symbol) => toArrayType(p.symbol.tpe) - case p => p.symbol.tpe + val forwformals = map2(flatparams, isRepeated) { + case (p, true) => toArrayType(p.tpe) + case (p, false)=> p.tpe } val forwresult = dd.symbol.tpe_*.finalResultType val forwformsyms = map2(forwformals, flatparams)((tp, oldparam) => - currentClass.newValueParameter(oldparam.name, oldparam.symbol.pos).setInfo(tp) + currentClass.newValueParameter(oldparam.name.toTermName, oldparam.pos).setInfo(tp) ) def mono = MethodType(forwformsyms, forwresult) val forwtype = dd.symbol.tpe match { @@ -759,13 +752,13 @@ abstract class UnCurry extends InfoTransform // create the tree val forwtree = theTyper.typedPos(dd.pos) { - val locals = map2(forwParams, flatparams) { - case (_, fp) if !rpsymbols(fp.symbol) => null - case (argsym, fp) => + val locals = map3(forwParams, flatparams, isRepeated) { + case (_, fp, false) => null + case (argsym, fp, true) => Block(Nil, gen.mkCast( gen.mkWrapArray(Ident(argsym), elementType(ArrayClass, argsym.tpe)), - seqType(elementType(SeqClass, fp.symbol.tpe)) + seqType(elementType(SeqClass, fp.tpe)) ) ) } @@ -780,7 +773,7 @@ abstract class UnCurry extends InfoTransform // check if the method with that name and those arguments already exists in the template currentClass.info.member(forwsym.name).alternatives.find(s => s != forwsym && s.tpe.matches(forwsym.tpe)) match { - case Some(s) => unit.error(dd.symbol.pos, + case Some(s) => reporter.error(dd.symbol.pos, "A method with a varargs annotation produces a forwarder method with the same signature " + s.tpe + " as an existing method.") case None => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index e0bc478fadb0..0b53dc37defc 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -10,15 +10,14 @@ package tools.nsc.transform.patmat import scala.language.postfixOps import scala.collection.mutable import scala.reflect.internal.util.Statistics -import scala.reflect.internal.util.Position import scala.reflect.internal.util.HashSet trait Logic extends Debugging { import PatternMatchingStats._ private def max(xs: Seq[Int]) = if (xs isEmpty) 0 else xs max - private def alignedColumns(cols: Seq[AnyRef]): Seq[String] = { - def toString(x: AnyRef) = if (x eq null) "" else x.toString + private def alignedColumns(cols: Seq[Any]): Seq[String] = { + def toString(x: Any) = if (x == null) "" else x.toString if (cols.isEmpty || cols.tails.isEmpty) cols map toString else { val colLens = cols map (c => toString(c).length) @@ -33,7 +32,7 @@ trait Logic extends Debugging { } } - def alignAcrossRows(xss: List[List[AnyRef]], sep: String, lineSep: String = "\n"): String = { + def alignAcrossRows(xss: List[List[Any]], sep: String, lineSep: String = "\n"): String = { val maxLen = max(xss map (_.length)) val padded = xss map (xs => xs ++ List.fill(maxLen - xs.length)(null)) padded.transpose.map(alignedColumns).transpose map (_.mkString(sep)) mkString(lineSep) @@ -47,7 +46,7 @@ trait Logic extends Debugging { type Tree class Prop - case class Eq(p: Var, q: Const) extends Prop + final case class Eq(p: Var, q: Const) extends Prop type Const @@ -72,6 +71,8 @@ trait Logic extends Debugging { def unapply(v: Var): Some[Tree] } + def reportWarning(message: String): Unit + // resets hash consing -- only supposed to be called by TreeMakersToProps def prepareNewAnalysis(): Unit @@ -104,43 +105,157 @@ trait Logic extends Debugging { // would be nice to statically check whether a prop is equational or pure, // but that requires typing relations like And(x: Tx, y: Ty) : (if(Tx == PureProp && Ty == PureProp) PureProp else Prop) - case class And(a: Prop, b: Prop) extends Prop - case class Or(a: Prop, b: Prop) extends Prop - case class Not(a: Prop) extends Prop + final case class And(ops: Set[Prop]) extends Prop + object And { + def apply(ops: Prop*) = new And(ops.toSet) + } + + final case class Or(ops: Set[Prop]) extends Prop + object Or { + def apply(ops: Prop*) = new Or(ops.toSet) + } + + final case class Not(a: Prop) extends Prop case object True extends Prop case object False extends Prop // symbols are propositions - abstract case class Sym(variable: Var, const: Const) extends Prop { + final class Sym private[PropositionalLogic] (val variable: Var, val const: Const) extends Prop { + + override def equals(other: scala.Any): Boolean = other match { + case that: Sym => this.variable == that.variable && + this.const == that.const + case _ => false + } + + override def hashCode(): Int = { + variable.hashCode * 41 + const.hashCode + } + private val id: Int = Sym.nextSymId - override def toString = variable +"="+ const +"#"+ id + override def toString = s"$variable=$const#$id" } - class UniqueSym(variable: Var, const: Const) extends Sym(variable, const) + object Sym { private val uniques: HashSet[Sym] = new HashSet("uniques", 512) def apply(variable: Var, const: Const): Sym = { - val newSym = new UniqueSym(variable, const) + val newSym = new Sym(variable, const) (uniques findEntryOrUpdate newSym) } - private def nextSymId = {_symId += 1; _symId}; private var _symId = 0 + def nextSymId = {_symId += 1; _symId}; private var _symId = 0 implicit val SymOrdering: Ordering[Sym] = Ordering.by(_.id) } - def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _)) - def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _)) + def /\(props: Iterable[Prop]) = if (props.isEmpty) True else And(props.toSeq: _*) + def \/(props: Iterable[Prop]) = if (props.isEmpty) False else Or(props.toSeq: _*) + + /** + * Simplifies propositional formula according to the following rules: + * - eliminate double negation (avoids unnecessary Tseitin variables) + * - flatten trees of same connectives (avoids unnecessary Tseitin variables) + * - removes constants and connectives that are in fact constant because of their operands + * - eliminates duplicate operands + * - convert formula into NNF: all sub-expressions have a positive polarity + * which makes them amenable for the subsequent Plaisted transformation + * and increases chances to figure out that the formula is already in CNF + * + * Complexity: DFS over formula tree + * + * See http://www.decision-procedures.org/slides/propositional_logic-2x3.pdf + */ + def simplify(f: Prop): Prop = { + + // limit size to avoid blow up + def hasImpureAtom(ops: Seq[Prop]): Boolean = ops.size < 10 && + ops.combinations(2).exists { + case Seq(a, Not(b)) if a == b => true + case Seq(Not(a), b) if a == b => true + case _ => false + } + + // push negation inside formula + def negationNormalFormNot(p: Prop): Prop = p match { + case And(ops) => Or(ops.map(negationNormalFormNot)) // De'Morgan + case Or(ops) => And(ops.map(negationNormalFormNot)) // De'Morgan + case Not(p) => negationNormalForm(p) + case True => False + case False => True + case s: Sym => Not(s) + } + + def negationNormalForm(p: Prop): Prop = p match { + case And(ops) => And(ops.map(negationNormalForm)) + case Or(ops) => Or(ops.map(negationNormalForm)) + case Not(negated) => negationNormalFormNot(negated) + case True + | False + | (_: Sym) => p + } + + def simplifyProp(p: Prop): Prop = p match { + case And(fv) => + // recurse for nested And (pulls all Ands up) + val ops = fv.map(simplifyProp) - True // ignore `True` + + // build up Set in order to remove duplicates + val opsFlattened = ops.flatMap { + case And(fv) => fv + case f => Set(f) + }.toSeq + + if (hasImpureAtom(opsFlattened) || opsFlattened.contains(False)) { + False + } else { + opsFlattened match { + case Seq() => True + case Seq(f) => f + case ops => And(ops: _*) + } + } + case Or(fv) => + // recurse for nested Or (pulls all Ors up) + val ops = fv.map(simplifyProp) - False // ignore `False` + + val opsFlattened = ops.flatMap { + case Or(fv) => fv + case f => Set(f) + }.toSeq + + if (hasImpureAtom(opsFlattened) || opsFlattened.contains(True)) { + True + } else { + opsFlattened match { + case Seq() => False + case Seq(f) => f + case ops => Or(ops: _*) + } + } + case Not(Not(a)) => + simplify(a) + case Not(p) => + Not(simplify(p)) + case p => + p + } + + val nnf = negationNormalForm(f) + simplifyProp(nnf) + } trait PropTraverser { def apply(x: Prop): Unit = x match { - case And(a, b) => apply(a); apply(b) - case Or(a, b) => apply(a); apply(b) + case And(ops) => ops foreach apply + case Or(ops) => ops foreach apply case Not(a) => apply(a) case Eq(a, b) => applyVar(a); applyConst(b) + case s: Sym => applySymbol(s) case _ => } def applyVar(x: Var): Unit = {} def applyConst(x: Const): Unit = {} + def applySymbol(x: Sym): Unit = {} } def gatherVariables(p: Prop): Set[Var] = { @@ -151,29 +266,27 @@ trait Logic extends Debugging { vars.toSet } + def gatherSymbols(p: Prop): Set[Sym] = { + val syms = new mutable.HashSet[Sym]() + (new PropTraverser { + override def applySymbol(s: Sym) = syms += s + })(p) + syms.toSet + } + trait PropMap { def apply(x: Prop): Prop = x match { // TODO: mapConserve - case And(a, b) => And(apply(a), apply(b)) - case Or(a, b) => Or(apply(a), apply(b)) + case And(ops) => And(ops map apply) + case Or(ops) => Or(ops map apply) case Not(a) => Not(apply(a)) case p => p } } - // to govern how much time we spend analyzing matches for unreachability/exhaustivity - object AnalysisBudget { - private val budgetProp = scala.sys.Prop[Int]("scalac.patmat.analysisBudget") - private val budgetOff = "off" - val max: Int = { - val DefaultBudget = 256 - budgetProp.option.getOrElse(if (budgetProp.get.equalsIgnoreCase("off")) Integer.MAX_VALUE else DefaultBudget) - } - - abstract class Exception(val advice: String) extends RuntimeException("CNF budget exceeded") - - object exceeded extends Exception( - s"(The analysis required more space than allowed. Please try with scalac -D${budgetProp.key}=${AnalysisBudget.max*2} or -D${budgetProp.key}=${budgetOff}.)") - + // TODO: remove since deprecated + val budgetProp = scala.sys.Prop[String]("scalac.patmat.analysisBudget") + if (budgetProp.isSet) { + reportWarning(s"Please remove -D${budgetProp.key}, it is ignored.") } // convert finite domain propositional logic with subtyping to pure boolean propositional logic @@ -194,10 +307,10 @@ trait Logic extends Debugging { // TODO: for V1 representing x1 and V2 standing for x1.head, encode that // V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable) // may throw an AnalysisBudget.Exception - def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Formula, List[Formula]) = { + def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = { val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null - val vars = new scala.collection.mutable.HashSet[Var] + val vars = new mutable.HashSet[Var] object gatherEqualities extends PropTraverser { override def apply(p: Prop) = p match { @@ -218,10 +331,10 @@ trait Logic extends Debugging { props foreach gatherEqualities.apply if (modelNull) vars foreach (_.registerNull()) - val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p))) + val pure = props map (p => rewriteEqualsToProp(p)) - val eqAxioms = formulaBuilder - @inline def addAxiom(p: Prop) = addFormula(eqAxioms, eqFreePropToSolvable(p)) + val eqAxioms = mutable.ArrayBuffer[Prop]() + @inline def addAxiom(p: Prop) = eqAxioms += p debug.patmat("removeVarEq vars: "+ vars) vars.foreach { v => @@ -247,49 +360,32 @@ trait Logic extends Debugging { } } - debug.patmat("eqAxioms:\n"+ cnfString(toFormula(eqAxioms))) - debug.patmat("pure:"+ pure.map(p => cnfString(p)).mkString("\n")) + debug.patmat(s"eqAxioms:\n${eqAxioms.mkString("\n")}") + debug.patmat(s"pure:${pure.mkString("\n")}") if (Statistics.canEnable) Statistics.stopTimer(patmatAnaVarEq, start) - (toFormula(eqAxioms), pure) + (And(eqAxioms: _*), pure) } + type Solvable - // an interface that should be suitable for feeding a SAT solver when the time comes - type Formula - type FormulaBuilder - - // creates an empty formula builder to which more formulae can be added - def formulaBuilder: FormulaBuilder - - // val f = formulaBuilder; addFormula(f, f1); ... addFormula(f, fN) - // toFormula(f) == andFormula(f1, andFormula(..., fN)) - def addFormula(buff: FormulaBuilder, f: Formula): Unit - def toFormula(buff: FormulaBuilder): Formula - - // the conjunction of formulae `a` and `b` - def andFormula(a: Formula, b: Formula): Formula - - // equivalent formula to `a`, but simplified in a lightweight way (drop duplicate clauses) - def simplifyFormula(a: Formula): Formula - - // may throw an AnalysisBudget.Exception - def propToSolvable(p: Prop): Formula = { - val (eqAxioms, pure :: Nil) = removeVarEq(List(p), modelNull = false) - andFormula(eqAxioms, pure) + def propToSolvable(p: Prop): Solvable = { + val (eqAxiom, pure :: Nil) = removeVarEq(List(p), modelNull = false) + eqFreePropToSolvable(And(eqAxiom, pure)) } - // may throw an AnalysisBudget.Exception - def eqFreePropToSolvable(p: Prop): Formula - def cnfString(f: Formula): String + def eqFreePropToSolvable(f: Prop): Solvable - type Model = collection.immutable.SortedMap[Sym, Boolean] + type Model = Map[Sym, Boolean] val EmptyModel: Model val NoModel: Model - def findModelFor(f: Formula): Model - def findAllModelsFor(f: Formula): List[Model] + final case class Solution(model: Model, unassigned: List[Sym]) + + def findModelFor(solvable: Solvable): Model + + def findAllModelsFor(solvable: Solvable): List[Solution] } } @@ -498,7 +594,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { } - import global.{ConstantType, Constant, SingletonType, Literal, Ident, singleType} + import global.{ConstantType, Constant, EmptyScope, SingletonType, Literal, Ident, refinedType, singleType, TypeBounds, NoSymbol} import global.definitions._ @@ -531,23 +627,30 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { private val trees = mutable.HashSet.empty[Tree] // hashconsing trees (modulo value-equality) - private[TreesAndTypesDomain] def uniqueTpForTree(t: Tree): Type = - // a new type for every unstable symbol -- only stable value are uniqued - // technically, an unreachable value may change between cases - // thus, the failure of a case that matches on a mutable value does not exclude the next case succeeding - // (and thuuuuus, the latter case must be considered reachable) - if (!t.symbol.isStable) t.tpe.narrow + private[TreesAndTypesDomain] def uniqueTpForTree(t: Tree): Type = { + def freshExistentialSubtype(tp: Type): Type = { + // SI-8611 tp.narrow is tempting, but unsuitable. See `testRefinedTypeSI8611` for an explanation. + NoSymbol.freshExistential("").setInfo(TypeBounds.upper(tp)).tpe + } + + if (!t.symbol.isStable) { + // Create a fresh type for each unstable value, since we can never correlate it to another value. + // For example `case X => case X =>` should not complain about the second case being unreachable, + // if X is mutable. + freshExistentialSubtype(t.tpe) + } else trees find (a => a.correspondsStructure(t)(sameValue)) match { case Some(orig) => - debug.patmat("unique tp for tree: "+ ((orig, orig.tpe))) + debug.patmat("unique tp for tree: " + ((orig, orig.tpe))) orig.tpe case _ => // duplicate, don't mutate old tree (TODO: use a map tree -> type instead?) - val treeWithNarrowedType = t.duplicate setType t.tpe.narrow + val treeWithNarrowedType = t.duplicate setType freshExistentialSubtype(t.tpe) debug.patmat("uniqued: "+ ((t, t.tpe, treeWithNarrowedType.tpe))) trees += treeWithNarrowedType treeWithNarrowedType.tpe } + } } sealed abstract class Const { diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 2893cbdf450e..34ebbc74630a 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -6,10 +6,11 @@ package scala.tools.nsc.transform.patmat +import scala.annotation.tailrec +import scala.collection.immutable.{IndexedSeq, Iterable} import scala.language.postfixOps import scala.collection.mutable import scala.reflect.internal.util.Statistics -import scala.reflect.internal.util.Position trait TreeAndTypeAnalysis extends Debugging { import global._ @@ -50,8 +51,8 @@ trait TreeAndTypeAnalysis extends Debugging { // but the annotation didn't bubble up... // This is a pretty poor approximation. def unsoundAssumptionUsed = binder.name != nme.WILDCARD && !(pt <:< pat.tpe) - if (settings.lint && unsoundAssumptionUsed) - global.currentUnit.warning(pat.pos, + if (settings.warnUnsoundMatch && unsoundAssumptionUsed) + reporter.warning(pat.pos, sm"""The value matched by $pat is bound to ${binder.name}, which may be used under the |unsound assumption that it has type ${pat.tpe}, whereas we can only safely |count on it having type $pt, as the pattern is matched using `==` (see SI-1503).""") @@ -104,11 +105,18 @@ trait TreeAndTypeAnalysis extends Debugging { // TODO case _ if tp.isTupleType => // recurse into component types case modSym: ModuleClassSymbol => Some(List(tp)) + case sym: RefinementClassSymbol => + val parentSubtypes: List[Option[List[Type]]] = tp.parents.map(parent => enumerateSubtypes(parent)) + if (parentSubtypes exists (_.isDefined)) + // If any of the parents is enumerable, then the refinement type is enumerable. + Some( + // We must only include subtypes of the parents that conform to `tp`. + // See neg/virtpatmat_exhaust_compound.scala for an example. + parentSubtypes flatMap (_.getOrElse(Nil)) filter (_ <:< tp) + ) + else None // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte - case sym if !sym.isSealed || isPrimitiveValueClass(sym) => - debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))) - None - case sym => + case sym if sym.isSealed => val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")( // symbols which are both sealed and abstract need not be covered themselves, because // all of their children must be and they cannot otherwise be created. @@ -136,6 +144,9 @@ trait TreeAndTypeAnalysis extends Debugging { else None } }) + case sym => + debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))) + None } // approximate a type to the static type that is fully checkable at run time, @@ -149,20 +160,20 @@ trait TreeAndTypeAnalysis extends Debugging { object typeArgsToWildcardsExceptArray extends TypeMap { // SI-6771 dealias would be enough today, but future proofing with the dealiasWiden. // See neg/t6771b.scala for elaboration - def apply(tp: Type): Type = tp.dealiasWiden match { + def apply(tp: Type): Type = tp.dealias match { case TypeRef(pre, sym, args) if args.nonEmpty && (sym ne ArrayClass) => TypeRef(pre, sym, args map (_ => WildcardType)) case _ => mapOver(tp) } } - debug.patmatResult(s"checkableType($tp)")(typeArgsToWildcardsExceptArray(tp)) + val result = typeArgsToWildcardsExceptArray(tp) + debug.patmatResult(s"checkableType($tp)")(result) } // a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed) // we consider tuple types with at least one component of a checkable type as a checkable type def uncheckableType(tp: Type): Boolean = { - def tupleComponents(tp: Type) = tp.normalize.typeArgs val checkable = ( (isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp))) || enumerateSubtypes(tp).nonEmpty) @@ -257,7 +268,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT // the type of the binder passed to the first invocation // determines the type of the tree that'll be returned for that binder as of then final def binderToUniqueTree(b: Symbol) = - unique(accumSubst(normalize(CODE.REF(b))), b.tpe) + unique(accumSubst(normalize(gen.mkAttributedStableRef(b))), b.tpe) // note that the sequencing of operations is important: must visit in same order as match execution // binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders @@ -354,7 +365,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT def handleUnknown(tm: TreeMaker) = handler(tm) } - // used for CSE -- rewrite all unknowns to False (the most conserative option) + // used for CSE -- rewrite all unknowns to False (the most conservative option) object conservative extends TreeMakerToProp { def handleUnknown(tm: TreeMaker) = False } @@ -387,8 +398,8 @@ trait MatchAnalysis extends MatchApproximation { import global.definitions._ trait MatchAnalyzer extends MatchApproximator { - def uncheckedWarning(pos: Position, msg: String) = global.currentUnit.uncheckedWarning(pos, msg) - def warn(pos: Position, ex: AnalysisBudget.Exception, kind: String) = uncheckedWarning(pos, s"Cannot check match for $kind.\n${ex.advice}") + def uncheckedWarning(pos: Position, msg: String) = currentRun.reporting.uncheckedWarning(pos, msg) + def reportWarning(message: String) = global.reporter.warning(typer.context.tree.pos, message) // TODO: model dependencies between variables: if V1 corresponds to (x: List[_]) and V2 is (x.hd), V2 cannot be assigned when V1 = null or V1 = Nil // right now hackily implement this by pruning counter-examples @@ -418,49 +429,44 @@ trait MatchAnalysis extends MatchApproximation { val propsCasesOk = approximate(True) map caseWithoutBodyToProp val propsCasesFail = approximate(False) map (t => Not(caseWithoutBodyToProp(t))) - try { - val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true) - val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true) - val eqAxioms = simplifyFormula(andFormula(eqAxiomsOk, eqAxiomsFail)) // I'm pretty sure eqAxiomsOk == eqAxiomsFail, but not 100% sure. - - val prefix = formulaBuilder - addFormula(prefix, eqAxioms) - - var prefixRest = symbolicCasesFail - var current = symbolicCasesOk - var reachable = true - var caseIndex = 0 - - debug.patmat("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n"))) - debug.patmat("equality axioms:\n"+ cnfString(eqAxiomsOk)) - - // invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail) - // termination: prefixRest.length decreases by 1 - while (prefixRest.nonEmpty && reachable) { - val prefHead = prefixRest.head - caseIndex += 1 - prefixRest = prefixRest.tail - if (prefixRest.isEmpty) reachable = true - else { - addFormula(prefix, prefHead) - current = current.tail - val model = findModelFor(andFormula(current.head, toFormula(prefix))) + val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true) + val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true) + val eqAxioms = simplify(And(eqAxiomsOk, eqAxiomsFail)) // I'm pretty sure eqAxiomsOk == eqAxiomsFail, but not 100% sure. - // debug.patmat("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix)) - // if (NoModel ne model) debug.patmat("reached: "+ modelString(model)) + val prefix = mutable.ArrayBuffer[Prop]() + prefix += eqAxioms - reachable = NoModel ne model - } - } + var prefixRest = symbolicCasesFail + var current = symbolicCasesOk + var reachable = true + var caseIndex = 0 + + debug.patmat("reachability, vars:\n" + ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n"))) + debug.patmat(s"equality axioms:\n$eqAxiomsOk") + + // invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail) + // termination: prefixRest.length decreases by 1 + while (prefixRest.nonEmpty && reachable) { + val prefHead = prefixRest.head + caseIndex += 1 + prefixRest = prefixRest.tail + if (prefixRest.isEmpty) reachable = true + else { + prefix += prefHead + current = current.tail + val and = And((current.head +: prefix): _*) + val model = findModelFor(eqFreePropToSolvable(and)) - if (Statistics.canEnable) Statistics.stopTimer(patmatAnaReach, start) + // debug.patmat("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix)) + // if (NoModel ne model) debug.patmat("reached: "+ modelString(model)) - if (reachable) None else Some(caseIndex) - } catch { - case ex: AnalysisBudget.Exception => - warn(prevBinder.pos, ex, "unreachability") - None // CNF budget exceeded + reachable = NoModel ne model + } } + + if (Statistics.canEnable) Statistics.stopTimer(patmatAnaReach, start) + + if (reachable) None else Some(caseIndex) } // exhaustivity @@ -508,22 +514,25 @@ trait MatchAnalysis extends MatchApproximation { // debug.patmat("\nvars:\n"+ (vars map (_.describe) mkString ("\n"))) // debug.patmat("\nmatchFails as CNF:\n"+ cnfString(propToSolvable(matchFails))) - try { - // find the models (under which the match fails) - val matchFailModels = findAllModelsFor(propToSolvable(matchFails)) + // find the models (under which the match fails) + val matchFailModels = findAllModelsFor(propToSolvable(matchFails)) - val scrutVar = Var(prevBinderTree) - val counterExamples = matchFailModels.map(modelToCounterExample(scrutVar)) + val scrutVar = Var(prevBinderTree) + val counterExamples = { + matchFailModels.flatMap { + model => + val varAssignments = expandModel(model) + varAssignments.flatMap(modelToCounterExample(scrutVar) _) + } + } - val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted + // sorting before pruning is important here in order to + // keep neg/t7020.scala stable + // since e.g. List(_, _) would cover List(1, _) + val pruned = CounterExample.prune(counterExamples.sortBy(_.toString)).map(_.toString) - if (Statistics.canEnable) Statistics.stopTimer(patmatAnaExhaust, start) - pruned - } catch { - case ex : AnalysisBudget.Exception => - warn(prevBinder.pos, ex, "exhaustivity") - Nil // CNF budget exceeded - } + if (Statistics.canEnable) Statistics.stopTimer(patmatAnaExhaust, start) + pruned } } @@ -588,6 +597,8 @@ trait MatchAnalysis extends MatchApproximation { case object WildcardExample extends CounterExample { override def toString = "_" } case object NoExample extends CounterExample { override def toString = "??" } + // returns a mapping from variable to + // equal and notEqual symbols def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] = model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs => val (trues, falses) = xs.partition(_._2) @@ -601,20 +612,110 @@ trait MatchAnalysis extends MatchApproximation { v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment }.mkString("\n") - // return constructor call when the model is a true counter example - // (the variables don't take into account type information derived from other variables, - // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _), - // since we didn't realize the tail of the outer cons was a Nil) - def modelToCounterExample(scrutVar: Var)(model: Model): CounterExample = { + /** + * The models we get from the DPLL solver need to be mapped back to counter examples. + * However there's no precalculated mapping model -> counter example. Even worse, + * not every valid model corresponds to a valid counter example. + * The reason is that restricting the valid models further would for example require + * a quadratic number of additional clauses. So to keep the optimistic case fast + * (i.e., all cases are covered in a pattern match), the infeasible counter examples + * are filtered later. + * + * The DPLL procedure keeps the literals that do not contribute to the solution + * unassigned, e.g., for `(a \/ b)` + * only {a = true} or {b = true} is required and the other variable can have any value. + * + * This function does a smart expansion of the model and avoids models that + * have conflicting mappings. + * + * For example for in case of the given set of symbols (taken from `t7020.scala`): + * "V2=2#16" + * "V2=6#19" + * "V2=5#18" + * "V2=4#17" + * "V2=7#20" + * + * One possibility would be to group the symbols by domain but + * this would only work for equality tests and would not be compatible + * with type tests. + * Another observation leads to a much simpler algorithm: + * Only one of these symbols can be set to true, + * since `V2` can at most be equal to one of {2,6,5,4,7}. + */ + def expandModel(solution: Solution): List[Map[Var, (Seq[Const], Seq[Const])]] = { + + val model = solution.model + // x1 = ... // x1.hd = ... // x1.tl = ... // x1.hd.hd = ... // ... val varAssignment = modelToVarAssignment(model) + debug.patmat("var assignment for model " + model + ":\n" + varAssignmentString(varAssignment)) + + // group symbols that assign values to the same variables (i.e., symbols are mutually exclusive) + // (thus the groups are sets of disjoint assignments to variables) + val groupedByVar: Map[Var, List[Sym]] = solution.unassigned.groupBy(_.variable) + + val expanded = for { + (variable, syms) <- groupedByVar.toList + } yield { + + val (equal, notEqual) = varAssignment.getOrElse(variable, Nil -> Nil) + + def addVarAssignment(equalTo: List[Const], notEqualTo: List[Const]) = { + Map(variable ->(equal ++ equalTo, notEqual ++ notEqualTo)) + } + + // this assignment is needed in case that + // there exists already an assign + val allNotEqual = addVarAssignment(Nil, syms.map(_.const)) + + // this assignment is conflicting on purpose: + // a list counter example could contain wildcards: e.g. `List(_,_)` + val allEqual = addVarAssignment(syms.map(_.const), Nil) + + if(equal.isEmpty) { + val oneHot = for { + s <- syms + } yield { + addVarAssignment(List(s.const), syms.filterNot(_ == s).map(_.const)) + } + allEqual :: allNotEqual :: oneHot + } else { + allEqual :: allNotEqual :: Nil + } + } + + if (expanded.isEmpty) { + List(varAssignment) + } else { + // we need the cartesian product here, + // since we want to report all missing cases + // (i.e., combinations) + val cartesianProd = expanded.reduceLeft((xs, ys) => + for {map1 <- xs + map2 <- ys} yield { + map1 ++ map2 + }) - debug.patmat("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment)) + // add expanded variables + // note that we can just use `++` + // since the Maps have disjoint keySets + for { + m <- cartesianProd + } yield { + varAssignment ++ m + } + } + } + // return constructor call when the model is a true counter example + // (the variables don't take into account type information derived from other variables, + // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _), + // since we didn't realize the tail of the outer cons was a Nil) + def modelToCounterExample(scrutVar: Var)(varAssignment: Map[Var, (Seq[Const], Seq[Const])]): Option[CounterExample] = { // chop a path into a list of symbols def chop(path: Tree): List[Symbol] = path match { case Ident(_) => List(path.symbol) @@ -663,6 +764,7 @@ trait MatchAnalysis extends MatchApproximation { private val fields: mutable.Map[Symbol, VariableAssignment] = mutable.HashMap.empty // need to prune since the model now incorporates all super types of a constant (needed for reachability) private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp))) + private lazy val inSameDomain = uniqueEqualTo forall (const => variable.domainSyms.exists(_.exists(_.const.tp =:= const.tp))) private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp) private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor private lazy val ctorParams = if (ctor.paramss.isEmpty) Nil else ctor.paramss.head @@ -683,13 +785,13 @@ trait MatchAnalysis extends MatchApproximation { // NoExample if the constructor call is ill-typed // (thus statically impossible -- can we incorporate this into the formula?) // beBrief is used to suppress negative information nested in tuples -- it tends to get too noisy - def toCounterExample(beBrief: Boolean = false): CounterExample = - if (!allFieldAssignmentsLegal) NoExample + def toCounterExample(beBrief: Boolean = false): Option[CounterExample] = + if (!allFieldAssignmentsLegal) Some(NoExample) else { debug.patmat("describing "+ ((variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))) val res = prunedEqualTo match { // a definite assignment to a value - case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq) + case List(eq: ValueConst) if fields.isEmpty => Some(ValueExample(eq)) // constructor call // or we did not gather any information about equality but we have information about the fields @@ -702,30 +804,50 @@ trait MatchAnalysis extends MatchApproximation { // figure out the constructor arguments from the field assignment val argLen = (caseFieldAccs.length min ctorParams.length) - (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse WildcardExample).toList + val examples = (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse Some(WildcardExample)).toList + sequence(examples) } cls match { - case ConsClass => ListExample(args()) - case _ if isTupleSymbol(cls) => TupleExample(args(brevity = true)) - case _ => ConstructorExample(cls, args()) + case ConsClass => + args().map { + case List(NoExample, l: ListExample) => + // special case for neg/t7020.scala: + // if we find a counter example `??::*` we report `*::*` instead + // since the `??` originates from uniqueEqualTo containing several instanced of the same type + List(WildcardExample, l) + case args => args + }.map(ListExample) + case _ if isTupleSymbol(cls) => args(brevity = true).map(TupleExample) + case _ if cls.isSealed && cls.isAbstractClass => + // don't report sealed abstract classes, since + // 1) they can't be instantiated + // 2) we are already reporting any missing subclass (since we know the full domain) + // (see patmatexhaust.scala) + None + case _ => args().map(ConstructorExample(cls, _)) } // a definite assignment to a type - case List(eq) if fields.isEmpty => TypeExample(eq) + case List(eq) if fields.isEmpty => Some(TypeExample(eq)) // negative information case Nil if nonTrivialNonEqualTo.nonEmpty => // negation tends to get pretty verbose - if (beBrief) WildcardExample + if (beBrief) Some(WildcardExample) else { val eqTo = equalTo.headOption getOrElse TypeConst(variable.staticTpCheckable) - NegativeExample(eqTo, nonTrivialNonEqualTo) + Some(NegativeExample(eqTo, nonTrivialNonEqualTo)) } + // if uniqueEqualTo contains more than one symbol of the same domain + // then we can safely ignore these counter examples since we will eventually encounter + // both counter examples separately + case _ if inSameDomain => None + // not a valid counter-example, possibly since we have a definite type but there was a field mismatch // TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive - case _ => NoExample + case _ => Some(NoExample) } debug.patmatResult("described as")(res) } @@ -741,12 +863,12 @@ trait MatchAnalysis extends MatchApproximation { } def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = { - if (!suppression.unreachable) { + if (!suppression.suppressUnreachable) { unreachableCase(prevBinder, cases, pt) foreach { caseIndex => reportUnreachable(cases(caseIndex).last.pos) } } - if (!suppression.exhaustive) { + if (!suppression.suppressExhaustive) { val counterExamples = exhaustive(prevBinder, cases, pt) if (counterExamples.nonEmpty) reportMissingCases(prevBinder.pos, counterExamples) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index 8ff782415931..b3aef8a20eaf 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -46,16 +46,16 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { val cond = test.prop def simplify(c: Prop): Set[Prop] = c match { - case And(a, b) => simplify(a) ++ simplify(b) - case Or(_, _) => Set(False) // TODO: make more precise - case Not(Eq(Var(_), NullConst)) => Set(True) // not worth remembering + case And(ops) => ops.toSet flatMap simplify + case Or(ops) => Set(False) // TODO: make more precise + case Not(Eq(Var(_), NullConst)) => Set(True) // not worth remembering case _ => Set(c) } val conds = simplify(cond) if (conds(False)) false // stop when we encounter a definite "no" or a "not sure" else { - val nonTrivial = conds filterNot (_ == True) + val nonTrivial = conds - True if (nonTrivial nonEmpty) { tested ++= nonTrivial @@ -442,7 +442,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { val distinctAlts = distinctBy(switchableAlts)(extractConst) if (distinctAlts.size < switchableAlts.size) { val duplicated = switchableAlts.groupBy(extractConst).flatMap(_._2.drop(1).take(1)) // report the first duplicated - global.currentUnit.warning(pos, s"Pattern contains duplicate alternatives: ${duplicated.mkString(", ")}") + reporter.warning(pos, s"Pattern contains duplicate alternatives: ${duplicated.mkString(", ")}") } CaseDef(Alternative(distinctAlts), guard, body) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 699e98f9636c..6302e34ac98b 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -81,14 +81,14 @@ trait MatchTranslation { object SymbolAndTypeBound { def unapply(tree: Tree): Option[(Symbol, Type)] = tree match { - case SymbolBound(sym, SymbolAndTypeBound(_, tpe)) => Some(sym -> tpe) - case TypeBound(tpe) => Some(binder -> tpe) - case _ => None + case SymbolBound(sym, TypeBound(tpe)) => Some(sym -> tpe) + case TypeBound(tpe) => Some(binder -> tpe) + case _ => None } } object TypeBound { - def unapply(tree: Tree): Option[Type] = unbind(tree) match { + def unapply(tree: Tree): Option[Type] = tree match { case Typed(Ident(_), _) if tree.tpe != null => Some(tree.tpe) case _ => None } @@ -154,7 +154,7 @@ trait MatchTranslation { case SymbolBound(sym, expr) => bindingStep(sym, expr) case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) => equalityTestStep() case Alternative(alts) => alternativesStep(alts) - case _ => context.unit.error(pos, unsupportedPatternMsg) ; noStep() + case _ => reporter.error(pos, unsupportedPatternMsg) ; noStep() } def translate(): List[TreeMaker] = nextStep() merge (_.translate()) @@ -208,7 +208,7 @@ trait MatchTranslation { case _ => (cases, None) } - checkMatchVariablePatterns(nonSyntheticCases) + if (!settings.XnoPatmatAnalysis) checkMatchVariablePatterns(nonSyntheticCases) // we don't transform after uncurry // (that would require more sophistication when generating trees, @@ -248,7 +248,10 @@ trait MatchTranslation { if (caseDefs forall treeInfo.isCatchCase) caseDefs else { val swatches = { // switch-catches - val bindersAndCases = caseDefs map { caseDef => + // SI-7459 must duplicate here as we haven't commited to switch emission, and just figuring out + // if we can ends up mutating `caseDefs` down in the use of `substituteSymbols` in + // `TypedSubstitution#Substitution`. That is called indirectly by `emitTypeSwitch`. + val bindersAndCases = caseDefs.map(_.duplicate) map { caseDef => // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there) // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this) val caseScrutSym = freshSym(pos, pureType(ThrowableTpe)) @@ -377,8 +380,8 @@ trait MatchTranslation { object ExtractorCall { // TODO: check unargs == args def apply(tree: Tree): ExtractorCall = tree match { - case UnApply(unfun, args) => new ExtractorCallRegular(alignPatterns(tree), unfun, args) // extractor - case Apply(fun, args) => new ExtractorCallProd(alignPatterns(tree), fun, args) // case class + case UnApply(unfun, args) => new ExtractorCallRegular(alignPatterns(context, tree), unfun, args) // extractor + case Apply(fun, args) => new ExtractorCallProd(alignPatterns(context, tree), fun, args) // case class } } @@ -518,7 +521,7 @@ trait MatchTranslation { // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component override protected def tupleSel(binder: Symbol)(i: Int): Tree = { val accessors = binder.caseFieldAccessors - if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1) + if (accessors isDefinedAt (i-1)) gen.mkAttributedStableRef(binder) DOT accessors(i-1) else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN } } @@ -544,10 +547,17 @@ trait MatchTranslation { // wrong when isSeq, and resultInMonad should always be correct since it comes // directly from the extractor's result type val binder = freshSym(pos, pureType(resultInMonad)) + val potentiallyMutableBinders: Set[Symbol] = + if (extractorApply.tpe.typeSymbol.isNonBottomSubClass(OptionClass) && !aligner.isSeq) + Set.empty + else + // Ensures we capture unstable bound variables eagerly. These can arise under name based patmat or by indexing into mutable Seqs. See run t9003.scala + subPatBinders.toSet ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)( subPatBinders, subPatRefs(binder), + potentiallyMutableBinders, aligner.isBool, checkedLength, patBinderOrCasted, @@ -573,7 +583,7 @@ trait MatchTranslation { // duplicated with the extractor Unapplied case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) => treeCopy.Apply(t, x, binderRef(i.pos) :: Nil) - // SI-7868 Account for numeric widening, e.g. .toInt + // SI-7868 Account for numeric widening, e.g. .toInt case Apply(x, List(i @ (sel @ Select(Ident(nme.SELECTOR_DUMMY), name)))) => treeCopy.Apply(t, x, treeCopy.Select(sel, binderRef(i.pos), name) :: Nil) case _ => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 5d8a9fecefef..b703b5bc6dc3 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -21,9 +21,10 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { import global._ import definitions._ - final case class Suppression(exhaustive: Boolean, unreachable: Boolean) + final case class Suppression(suppressExhaustive: Boolean, suppressUnreachable: Boolean) object Suppression { val NoSuppression = Suppression(false, false) + val FullSuppression = Suppression(true, true) } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -166,8 +167,17 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { val usedBinders = new mutable.HashSet[Symbol]() // all potentially stored subpat binders val potentiallyStoredBinders = stored.unzip._1.toSet + def ref(sym: Symbol) = + if (potentiallyStoredBinders(sym)) usedBinders += sym // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders - in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol) + in.foreach { + case tt: TypeTree => + tt.tpe foreach { // SI-7459 e.g. case Prod(t) => new t.u.Foo + case SingleType(_, sym) => ref(sym) + case _ => + } + case t => ref(t.symbol) + } if (usedBinders.isEmpty) in else { @@ -192,13 +202,14 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol)( val subPatBinders: List[Symbol], val subPatRefs: List[Tree], + val potentiallyMutableBinders: Set[Symbol], extractorReturnsBoolean: Boolean, val checkedLength: Option[Int], val prevBinder: Symbol, val ignoredSubPatBinders: Set[Symbol] ) extends FunTreeMaker with PreserveSubPatBinders { - def extraStoredBinders: Set[Symbol] = Set() + def extraStoredBinders: Set[Symbol] = potentiallyMutableBinders debug.patmat(s""" |ExtractorTreeMaker($extractor, $extraCond, $nextBinder) { @@ -516,7 +527,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker]) // a foldLeft to accumulate the localSubstitution left-to-right - // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution + // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fulfilled by propagateSubstitution def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = { var accumSubst: Substitution = initial treeMakers foreach { maker => @@ -541,7 +552,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}"))) val (suppression, requireSwitch): (Suppression, Boolean) = - if (settings.XnoPatmatAnalysis) (Suppression.NoSuppression, false) + if (settings.XnoPatmatAnalysis) (Suppression.FullSuppression, false) else scrut match { case Typed(tree, tpt) => val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass @@ -550,15 +561,31 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { case _ => false } val suppression = Suppression(suppressExhaustive, supressUnreachable) + val hasSwitchAnnotation = treeInfo.isSwitchAnnotation(tpt.tpe) // matches with two or fewer cases need not apply for switchiness (if-then-else will do) - val requireSwitch = treeInfo.isSwitchAnnotation(tpt.tpe) && casesNoSubstOnly.lengthCompare(2) > 0 + // `case 1 | 2` is considered as two cases. + def exceedsTwoCasesOrAlts = { + // avoids traversing the entire list if there are more than 3 elements + def lengthMax3[T](l: List[T]): Int = l match { + case a :: b :: c :: _ => 3 + case cases => + cases.map({ + case AlternativesTreeMaker(_, alts, _) :: _ => lengthMax3(alts) + case c => 1 + }).sum + } + lengthMax3(casesNoSubstOnly) > 2 + } + val requireSwitch = hasSwitchAnnotation && exceedsTwoCasesOrAlts + if (hasSwitchAnnotation && !requireSwitch) + reporter.warning(scrut.pos, "matches with two cases or fewer are emitted using if-then-else instead of switch") (suppression, requireSwitch) case _ => (Suppression.NoSuppression, false) } - emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, suppression.exhaustive).getOrElse{ - if (requireSwitch) typer.context.unit.warning(scrut.pos, "could not emit switch for @switch annotated match") + emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, unchecked = suppression.suppressExhaustive).getOrElse{ + if (requireSwitch) reporter.warning(scrut.pos, "could not emit switch for @switch annotated match") if (casesNoSubstOnly nonEmpty) { // before optimizing, check casesNoSubstOnly for presence of a default case, diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala index a7d7680db1c3..9e9372f7096e 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala @@ -67,7 +67,7 @@ trait MatchWarnings { val cdef = it.next() // If a default case has been seen, then every succeeding case is unreachable. if (vpat != null) - context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat)) + reporter.warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat)) // TODO: make configurable whether this is an error // If this is a default case and more cases follow, warn about this one so // we have a reason to mention its pattern variable name and any corresponding // symbol in scope. Errors will follow from the remaining cases, at least @@ -78,7 +78,7 @@ trait MatchWarnings { case _ => "" } vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}" - context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat)) + reporter.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat)) } } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index f6c960d089ee..d35aad964d59 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -12,7 +12,7 @@ import scala.language.postfixOps import scala.tools.nsc.transform.TypingTransformers import scala.tools.nsc.transform.Transform import scala.reflect.internal.util.Statistics -import scala.reflect.internal.Types +import scala.reflect.internal.{Mode, Types} import scala.reflect.internal.util.Position /** Translate pattern matching. @@ -65,7 +65,7 @@ trait PatternMatching extends Transform } catch { case x: (Types#TypeError) => // TODO: this should never happen; error should've been reported during type checking - unit.error(tree.pos, "error during expansion of this match (this is a scalac bug).\nThe underlying error was: "+ x.msg) + reporter.error(tree.pos, "error during expansion of this match (this is a scalac bug).\nThe underlying error was: "+ x.msg) translated } case Try(block, catches, finalizer) => @@ -175,13 +175,13 @@ trait Interface extends ast.TreeDSL { val matchOwner = typer.context.owner def pureType(tp: Type): Type = tp - def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code") + def reportUnreachable(pos: Position) = reporter.warning(pos, "unreachable code") def reportMissingCases(pos: Position, counterExamples: List[String]) = { val ceString = if (counterExamples.tail.isEmpty) "input: " + counterExamples.head else "inputs: " + counterExamples.mkString(", ") - typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString) + reporter.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString) } } @@ -198,33 +198,57 @@ trait Interface extends ast.TreeDSL { } class Substitution(val from: List[Symbol], val to: List[Tree]) { - import global.{Transformer, Ident, NoType} + import global.{Transformer, Ident, NoType, TypeTree, SingleType} // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed, // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees. def apply(tree: Tree): Tree = { // according to -Ystatistics 10% of translateMatch's time is spent in this method... // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst - if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree - else (new Transformer { + val toIdents = to.forall(_.isInstanceOf[Ident]) + val containsSym = tree.exists { + case i@Ident(_) => from contains i.symbol + case tt: TypeTree => tt.tpe.exists { + case SingleType(_, sym) => + (from contains sym) && { + if (!toIdents) global.devWarning(s"Unexpected substitution of non-Ident into TypeTree `$tt`, subst= $this") + true + } + case _ => false + } + case _ => false + } + val toSyms = to.map(_.symbol) + object substIdentsForTrees extends Transformer { private def typedIfOrigTyped(to: Tree, origTp: Type): Tree = if (origTp == null || origTp == NoType) to // important: only type when actually substing and when original tree was typed // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors) else typer.typed(to) + def typedStable(t: Tree) = typer.typed(t.shallowDuplicate, Mode.MonoQualifierModes | Mode.TYPEPATmode) + lazy val toTypes: List[Type] = to map (tree => typedStable(tree).tpe) + override def transform(tree: Tree): Tree = { def subst(from: List[Symbol], to: List[Tree]): Tree = if (from.isEmpty) tree - else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate.setPos(tree.pos), tree.tpe) + else if (tree.symbol == from.head) typedIfOrigTyped(typedStable(to.head).setPos(tree.pos), tree.tpe) else subst(from.tail, to.tail) - tree match { + val tree1 = tree match { case Ident(_) => subst(from, to) case _ => super.transform(tree) } + tree1.modifyType(_.substituteTypes(from, toTypes)) } - }).transform(tree) + } + if (containsSym) { + if (to.forall(_.isInstanceOf[Ident])) + tree.duplicate.substituteSymbols(from, to.map(_.symbol)) // SI-7459 catches `case t => new t.Foo` + else + substIdentsForTrees.transform(tree) + } + else tree } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala index d10eff1d8d19..8924394b727a 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala @@ -18,6 +18,7 @@ trait ScalacPatternExpanders { import global._ import definitions._ import treeInfo._ + import analyzer._ type PatternAligned = ScalacPatternExpander#Aligned @@ -72,9 +73,7 @@ trait ScalacPatternExpanders { * Unfortunately the MethodType does not carry the information of whether * it was unapplySeq, so we have to funnel that information in separately. */ - def unapplyMethodTypes(method: Type, isSeq: Boolean): Extractor = { - val whole = firstParamType(method) - val result = method.finalResultType + def unapplyMethodTypes(whole: Type, result: Type, isSeq: Boolean): Extractor = { val expanded = ( if (result =:= BooleanTpe) Nil else typeOfMemberNamedGet(result) match { @@ -94,7 +93,7 @@ trait ScalacPatternExpanders { def tupleExtractor(extractor: Extractor): Extractor = extractor.copy(fixed = tupleType(extractor.fixed) :: Nil) - private def validateAligned(tree: Tree, aligned: Aligned): Aligned = { + private def validateAligned(context: Context, tree: Tree, aligned: Aligned): Aligned = { import aligned._ def owner = tree.symbol.owner @@ -103,8 +102,8 @@ trait ScalacPatternExpanders { def offerString = if (extractor.isErroneous) "" else s" offering $offering" def arityExpected = ( if (extractor.hasSeq) "at least " else "" ) + productArity - def err(msg: String) = currentUnit.error(tree.pos, msg) - def warn(msg: String) = currentUnit.warning(tree.pos, msg) + def err(msg: String) = context.error(tree.pos, msg) + def warn(msg: String) = context.warning(tree.pos, msg) def arityError(what: String) = err(s"$what patterns for $owner$offerString: expected $arityExpected, found $totalArity") if (isStar && !isSeq) @@ -117,17 +116,17 @@ trait ScalacPatternExpanders { aligned } - def apply(sel: Tree, args: List[Tree]): Aligned = { + def apply(context: Context, sel: Tree, args: List[Tree]): Aligned = { val fn = sel match { case Unapplied(fn) => fn case _ => sel } val patterns = newPatterns(args) - val isSeq = sel.symbol.name == nme.unapplySeq val isUnapply = sel.symbol.name == nme.unapply + val extractor = sel.symbol.name match { - case nme.unapply => unapplyMethodTypes(fn.tpe, isSeq = false) - case nme.unapplySeq => unapplyMethodTypes(fn.tpe, isSeq = true) + case nme.unapply => unapplyMethodTypes(firstParamType(fn.tpe), sel.tpe, isSeq = false) + case nme.unapplySeq => unapplyMethodTypes(firstParamType(fn.tpe), sel.tpe, isSeq = true) case _ => applyMethodTypes(fn.tpe) } @@ -139,16 +138,18 @@ trait ScalacPatternExpanders { def acceptMessage = if (extractor.isErroneous) "" else s" to hold ${extractor.offeringString}" val requiresTupling = isUnapply && patterns.totalArity == 1 && productArity > 1 - if (requiresTupling && effectivePatternArity(args) == 1) - currentUnit.deprecationWarning(sel.pos, s"${sel.symbol.owner} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (SI-6675)") + if (requiresTupling && effectivePatternArity(args) == 1) { + val sym = sel.symbol.owner + currentRun.reporting.deprecationWarning(sel.pos, sym, s"${sym} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (SI-6675)") + } val normalizedExtractor = if (requiresTupling) tupleExtractor(extractor) else extractor - validateAligned(fn, Aligned(patterns, normalizedExtractor)) + validateAligned(context, fn, Aligned(patterns, normalizedExtractor)) } - def apply(tree: Tree): Aligned = tree match { - case Apply(fn, args) => apply(fn, args) - case UnApply(fn, args) => apply(fn, args) + def apply(context: Context, tree: Tree): Aligned = tree match { + case Apply(fn, args) => apply(context, fn, args) + case UnApply(fn, args) => apply(context, fn, args) } } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index 1902606d860e..27217f0dc205 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -6,234 +6,419 @@ package scala.tools.nsc.transform.patmat -import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer import scala.reflect.internal.util.Statistics import scala.language.postfixOps +import scala.collection.mutable import scala.reflect.internal.util.Collections._ -// naive CNF translation and simple DPLL solver +// a literal is a (possibly negated) variable +class Lit(val v: Int) extends AnyVal { + def unary_- : Lit = Lit(-v) + + def variable: Int = Math.abs(v) + + def positive = v >= 0 + + override def toString(): String = s"Lit#$v" +} + +object Lit { + def apply(v: Int): Lit = new Lit(v) + + implicit val LitOrdering: Ordering[Lit] = Ordering.by(_.v) +} + +/** Solve pattern matcher exhaustivity problem via DPLL. + */ trait Solving extends Logic { + import PatternMatchingStats._ + trait CNF extends PropositionalLogic { - import scala.collection.mutable.ArrayBuffer - type FormulaBuilder = ArrayBuffer[Clause] - def formulaBuilder = ArrayBuffer[Clause]() - def formulaBuilderSized(init: Int) = new ArrayBuffer[Clause](init) - def addFormula(buff: FormulaBuilder, f: Formula): Unit = buff ++= f - def toFormula(buff: FormulaBuilder): Formula = buff - // CNF: a formula is a conjunction of clauses - type Formula = FormulaBuilder - def formula(c: Clause*): Formula = ArrayBuffer(c: _*) + type Clause = Set[Lit] - type Clause = collection.Set[Lit] // a clause is a disjunction of distinct literals - def clause(l: Lit*): Clause = ( - // neg/t7020.scala changes output 1% of the time, the non-determinism is quelled with this linked set - mutable.LinkedHashSet(l: _*) - ) - - type Lit - def Lit(sym: Sym, pos: Boolean = true): Lit - - def andFormula(a: Formula, b: Formula): Formula = a ++ b - def simplifyFormula(a: Formula): Formula = a.distinct - - private def merge(a: Clause, b: Clause) = a ++ b - - // throws an AnalysisBudget.Exception when the prop results in a CNF that's too big - // TODO: be smarter/more efficient about this (http://lara.epfl.ch/w/sav09:tseitin_s_encoding) - def eqFreePropToSolvable(p: Prop): Formula = { - def negationNormalFormNot(p: Prop, budget: Int): Prop = - if (budget <= 0) throw AnalysisBudget.exceeded - else p match { - case And(a, b) => Or(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1)) - case Or(a, b) => And(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1)) - case Not(p) => negationNormalForm(p, budget - 1) - case True => False - case False => True - case s: Sym => Not(s) + def clause(l: Lit*): Clause = l.toSet + + /** Conjunctive normal form (of a Boolean formula). + * A formula in this form is amenable to a SAT solver + * (i.e., solver that decides satisfiability of a formula). + */ + type Cnf = Array[Clause] + + class SymbolMapping(symbols: Set[Sym]) { + val variableForSymbol: Map[Sym, Int] = { + symbols.zipWithIndex.map { + case (sym, i) => sym -> (i + 1) + }.toMap + } + + val symForVar: Map[Int, Sym] = variableForSymbol.map(_.swap) + + val relevantVars: Set[Int] = symForVar.keySet.map(math.abs) + + def lit(sym: Sym): Lit = Lit(variableForSymbol(sym)) + + def size = symbols.size + } + + case class Solvable(cnf: Cnf, symbolMapping: SymbolMapping) + + trait CnfBuilder { + private[this] val buff = ArrayBuffer[Clause]() + + var literalCount: Int + + /** + * @return new Tseitin variable + */ + def newLiteral(): Lit = { + literalCount += 1 + Lit(literalCount) + } + + lazy val constTrue: Lit = { + val constTrue = newLiteral() + addClauseProcessed(clause(constTrue)) + constTrue + } + + def constFalse: Lit = -constTrue + + def isConst(l: Lit): Boolean = l == constTrue || l == constFalse + + def addClauseProcessed(clause: Clause) { + if (clause.nonEmpty) { + buff += clause } + } + + def buildCnf: Array[Clause] = buff.toArray + + } - def negationNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Prop = - if (budget <= 0) throw AnalysisBudget.exceeded - else p match { - case And(a, b) => And(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1)) - case Or(a, b) => Or(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1)) - case Not(negated) => negationNormalFormNot(negated, budget - 1) - case True - | False - | (_ : Sym) => p + /** Plaisted transformation: used for conversion of a + * propositional formula into conjunctive normal form (CNF) + * (input format for SAT solver). + * A simple conversion into CNF via Shannon expansion would + * also be possible but it's worst-case complexity is exponential + * (in the number of variables) and thus even simple problems + * could become untractable. + * The Plaisted transformation results in an _equisatisfiable_ + * CNF-formula (it generates auxiliary variables) + * but runs with linear complexity. + * The common known Tseitin transformation uses bi-implication, + * whereas the Plaisted transformation uses implication only, thus + * the resulting CNF formula has (on average) only half of the clauses + * of a Tseitin transformation. + * The Plaisted transformation uses the polarities of sub-expressions + * to figure out which part of the bi-implication can be omitted. + * However, if all sub-expressions have positive polarity + * (e.g., after transformation into negation normal form) + * then the conversion is rather simple and the pseudo-normalization + * via NNF increases chances only one side of the bi-implication + * is needed. + */ + class TransformToCnf(symbolMapping: SymbolMapping) extends CnfBuilder { + + // new literals start after formula symbols + var literalCount: Int = symbolMapping.size + + def convertSym(sym: Sym): Lit = symbolMapping.lit(sym) + + def apply(p: Prop): Solvable = { + + def convert(p: Prop): Lit = { + p match { + case And(fv) => + and(fv.map(convert)) + case Or(fv) => + or(fv.map(convert)) + case Not(a) => + not(convert(a)) + case sym: Sym => + convertSym(sym) + case True => + constTrue + case False => + constFalse + case _: Eq => + throw new MatchError(p) + } } - val TrueF = formula() - val FalseF = formula(clause()) - def lit(s: Sym) = formula(clause(Lit(s))) - def negLit(s: Sym) = formula(clause(Lit(s, pos = false))) - - def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = { - def distribute(a: Formula, b: Formula, budget: Int): Formula = - if (budget <= 0) throw AnalysisBudget.exceeded - else - (a, b) match { - // true \/ _ = true - // _ \/ true = true - case (trueA, trueB) if trueA.size == 0 || trueB.size == 0 => TrueF - // lit \/ lit - case (a, b) if a.size == 1 && b.size == 1 => formula(merge(a(0), b(0))) - // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d)) - // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn)) - case (cs, ds) => - val (big, small) = if (cs.size > ds.size) (cs, ds) else (ds, cs) - big flatMap (c => distribute(formula(c), small, budget - (big.size*small.size))) - } + def and(bv: Set[Lit]): Lit = { + if (bv.isEmpty) { + // this case can actually happen because `removeVarEq` could add no constraints + constTrue + } else if (bv.size == 1) { + bv.head + } else if (bv.contains(constFalse)) { + constFalse + } else { + // op1 /\ op2 /\ ... /\ opx <==> + // (o -> op1) /\ (o -> op2) ... (o -> opx) /\ (!op1 \/ !op2 \/... \/ !opx \/ o) + // (!o \/ op1) /\ (!o \/ op2) ... (!o \/ opx) /\ (!op1 \/ !op2 \/... \/ !opx \/ o) + val new_bv = bv - constTrue // ignore `True` + val o = newLiteral() // auxiliary Tseitin variable + new_bv.map(op => addClauseProcessed(clause(op, -o))) + o + } + } - if (budget <= 0) throw AnalysisBudget.exceeded - - p match { - case True => TrueF - case False => FalseF - case s: Sym => lit(s) - case Not(s: Sym) => negLit(s) - case And(a, b) => - val cnfA = conjunctiveNormalForm(a, budget - 1) - val cnfB = conjunctiveNormalForm(b, budget - cnfA.size) - cnfA ++ cnfB - case Or(a, b) => - val cnfA = conjunctiveNormalForm(a) - val cnfB = conjunctiveNormalForm(b) - distribute(cnfA, cnfB, budget - (cnfA.size + cnfB.size)) + def or(bv: Set[Lit]): Lit = { + if (bv.isEmpty) { + constFalse + } else if (bv.size == 1) { + bv.head + } else if (bv.contains(constTrue)) { + constTrue + } else { + // op1 \/ op2 \/ ... \/ opx <==> + // (op1 -> o) /\ (op2 -> o) ... (opx -> o) /\ (op1 \/ op2 \/... \/ opx \/ !o) + // (!op1 \/ o) /\ (!op2 \/ o) ... (!opx \/ o) /\ (op1 \/ op2 \/... \/ opx \/ !o) + val new_bv = bv - constFalse // ignore `False` + val o = newLiteral() // auxiliary Tseitin variable + addClauseProcessed(new_bv + (-o)) + o + } } + + // no need for auxiliary variable + def not(a: Lit): Lit = -a + + // add intermediate variable since we want the formula to be SAT! + addClauseProcessed(clause(convert(p))) + + Solvable(buildCnf, symbolMapping) } + } - val start = if (Statistics.canEnable) Statistics.startTimer(patmatCNF) else null - val res = conjunctiveNormalForm(negationNormalForm(p)) + class AlreadyInCNF(symbolMapping: SymbolMapping) { - if (Statistics.canEnable) Statistics.stopTimer(patmatCNF, start) + object ToLiteral { + def unapply(f: Prop): Option[Lit] = f match { + case Not(ToLiteral(lit)) => Some(-lit) + case sym: Sym => Some(symbolMapping.lit(sym)) + case _ => None + } + } - // - if (Statistics.canEnable) patmatCNFSizes(res.size).value += 1 + object ToDisjunction { + def unapply(f: Prop): Option[Array[Clause]] = f match { + case Or(fv) => + val cl = fv.foldLeft(Option(clause())) { + case (Some(clause), ToLiteral(lit)) => + Some(clause + lit) + case (_, _) => + None + } + cl.map(Array(_)) + case True => Some(Array()) // empty, no clauses needed + case False => Some(Array(clause())) // empty clause can't be satisfied + case ToLiteral(lit) => Some(Array(clause(lit))) + case _ => None + } + } -// debug.patmat("cnf for\n"+ p +"\nis:\n"+cnfString(res)) - res + /** + * Checks if propositional formula is already in CNF + */ + object ToCnf { + def unapply(f: Prop): Option[Solvable] = f match { + case ToDisjunction(clauses) => Some(Solvable(clauses, symbolMapping) ) + case And(fv) => + val clauses = fv.foldLeft(Option(mutable.ArrayBuffer[Clause]())) { + case (Some(cnf), ToDisjunction(clauses)) => + Some(cnf ++= clauses) + case (_, _) => + None + } + clauses.map(c => Solvable(c.toArray, symbolMapping)) + case _ => None + } + } + } + + def eqFreePropToSolvable(p: Prop): Solvable = { + + // collect all variables since after simplification / CNF conversion + // they could have been removed from the formula + val symbolMapping = new SymbolMapping(gatherSymbols(p)) + + val simplified = simplify(p) + val cnfExtractor = new AlreadyInCNF(symbolMapping) + simplified match { + case cnfExtractor.ToCnf(solvable) => + // this is needed because t6942 would generate too many clauses with Tseitin + // already in CNF, just add clauses + solvable + case p => + new TransformToCnf(symbolMapping).apply(p) + } } } // simple solver using DPLL trait Solver extends CNF { - // a literal is a (possibly negated) variable - def Lit(sym: Sym, pos: Boolean = true) = new Lit(sym, pos) - class Lit(val sym: Sym, val pos: Boolean) { - override def toString = if (!pos) "-"+ sym.toString else sym.toString - override def equals(o: Any) = o match { - case o: Lit => (o.sym eq sym) && (o.pos == pos) - case _ => false - } - override def hashCode = sym.hashCode + pos.hashCode + import scala.collection.mutable.ArrayBuffer - def unary_- = Lit(sym, !pos) + def cnfString(f: Array[Clause]): String = { + val lits: Array[List[String]] = f map (_.map(_.toString).toList) + val xss: List[List[String]] = lits toList + val aligned: String = alignAcrossRows(xss, "\\/", " /\\\n") + aligned } - def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n") - // adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat) - val EmptyModel = collection.immutable.SortedMap.empty[Sym, Boolean] + + // empty set of clauses is trivially satisfied + val EmptyModel = Map.empty[Sym, Boolean] + + // no model: originates from the encounter of an empty clause, i.e., + // happens if all variables have been assigned in a way that makes the corresponding literals false + // thus there is no possibility to satisfy that clause, so the whole formula is UNSAT val NoModel: Model = null + // this model contains the auxiliary variables as well + type TseitinModel = Set[Lit] + val EmptyTseitinModel = Set.empty[Lit] + val NoTseitinModel: TseitinModel = null + // returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??) - def findAllModelsFor(f: Formula): List[Model] = { - val vars: Set[Sym] = f.flatMap(_ collect {case l: Lit => l.sym}).toSet + def findAllModelsFor(solvable: Solvable): List[Solution] = { + debug.patmat("find all models for\n"+ cnfString(solvable.cnf)) + + // we must take all vars from non simplified formula + // otherwise if we get `T` as formula, we don't expand the variables + // that are not in the formula... + val relevantVars: Set[Int] = solvable.symbolMapping.relevantVars + // debug.patmat("vars "+ vars) // the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True) - def negateModel(m: Model) = clause(m.toSeq.map{ case (sym, pos) => Lit(sym, !pos) } : _*) + // (i.e. the blocking clause - used for ALL-SAT) + def negateModel(m: TseitinModel) = { + // filter out auxiliary Tseitin variables + val relevantLits = m.filter(l => relevantVars.contains(l.variable)) + relevantLits.map(lit => -lit) + } - def findAllModels(f: Formula, models: List[Model], recursionDepthAllowed: Int = 10): List[Model]= - if (recursionDepthAllowed == 0) models - else { - debug.patmat("find all models for\n"+ cnfString(f)) - val model = findModelFor(f) + final case class TseitinSolution(model: TseitinModel, unassigned: List[Int]) { + def projectToSolution(symForVar: Map[Int, Sym]) = Solution(projectToModel(model, symForVar), unassigned map symForVar) + } + def findAllModels(clauses: Array[Clause], + models: List[TseitinSolution], + recursionDepthAllowed: Int = global.settings.YpatmatExhaustdepth.value): List[TseitinSolution]= + if (recursionDepthAllowed == 0) { + val maxDPLLdepth = global.settings.YpatmatExhaustdepth.value + reportWarning("(Exhaustivity analysis reached max recursion depth, not all missing cases are reported. " + + s"Please try with scalac -Ypatmat-exhaust-depth ${maxDPLLdepth * 2} or -Ypatmat-exhaust-depth off.)") + models + } else { + debug.patmat("find all models for\n" + cnfString(clauses)) + val model = findTseitinModelFor(clauses) // if we found a solution, conjunct the formula with the model's negation and recurse - if (model ne NoModel) { - val unassigned = (vars -- model.keySet).toList + if (model ne NoTseitinModel) { + // note that we should not expand the auxiliary variables (from Tseitin transformation) + // since they are existentially quantified in the final solution + val unassigned: List[Int] = (relevantVars -- model.map(lit => lit.variable)).toList debug.patmat("unassigned "+ unassigned +" in "+ model) - def force(lit: Lit) = { - val model = withLit(findModelFor(dropUnit(f, lit)), lit) - if (model ne NoModel) List(model) - else Nil - } - val forced = unassigned flatMap { s => - force(Lit(s, pos = true)) ++ force(Lit(s, pos = false)) - } - debug.patmat("forced "+ forced) + + val solution = TseitinSolution(model, unassigned) val negated = negateModel(model) - findAllModels(f :+ negated, model :: (forced ++ models), recursionDepthAllowed - 1) + findAllModels(clauses :+ negated, solution :: models, recursionDepthAllowed - 1) } else models } - findAllModels(f, Nil) + val tseitinSolutions = findAllModels(solvable.cnf, Nil) + tseitinSolutions.map(_.projectToSolution(solvable.symbolMapping.symForVar)) } - private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos) - private def dropUnit(f: Formula, unitLit: Lit): Formula = { + private def withLit(res: TseitinModel, l: Lit): TseitinModel = { + if (res eq NoTseitinModel) NoTseitinModel else res + l + } + + /** Drop trivially true clauses, simplify others by dropping negation of `unitLit`. + * + * Disjunctions that contain the literal we're making true in the returned model are trivially true. + * Clauses can be simplified by dropping the negation of the literal we're making true + * (since False \/ X == X) + */ + private def dropUnit(clauses: Array[Clause], unitLit: Lit): Array[Clause] = { val negated = -unitLit - // drop entire clauses that are trivially true - // (i.e., disjunctions that contain the literal we're making true in the returned model), - // and simplify clauses by dropping the negation of the literal we're making true - // (since False \/ X == X) - val dropped = formulaBuilderSized(f.size) - for { - clause <- f - if !(clause contains unitLit) - } dropped += (clause - negated) - dropped + val simplified = new ArrayBuffer[Clause](clauses.size) + clauses foreach { + case trivial if trivial contains unitLit => // drop + case clause => simplified += clause - negated + } + simplified.toArray + } + + def findModelFor(solvable: Solvable): Model = { + projectToModel(findTseitinModelFor(solvable.cnf), solvable.symbolMapping.symForVar) } - def findModelFor(f: Formula): Model = { - @inline def orElse(a: Model, b: => Model) = if (a ne NoModel) a else b + def findTseitinModelFor(clauses: Array[Clause]): TseitinModel = { + @inline def orElse(a: TseitinModel, b: => TseitinModel) = if (a ne NoTseitinModel) a else b - debug.patmat("DPLL\n"+ cnfString(f)) + debug.patmat(s"DPLL\n${cnfString(clauses)}") val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaDPLL) else null - val satisfiableWithModel: Model = - if (f isEmpty) EmptyModel - else if(f exists (_.isEmpty)) NoModel - else f.find(_.size == 1) match { + val satisfiableWithModel: TseitinModel = + if (clauses isEmpty) EmptyTseitinModel + else if (clauses exists (_.isEmpty)) NoTseitinModel + else clauses.find(_.size == 1) match { case Some(unitClause) => val unitLit = unitClause.head - // debug.patmat("unit: "+ unitLit) - withLit(findModelFor(dropUnit(f, unitLit)), unitLit) + withLit(findTseitinModelFor(dropUnit(clauses, unitLit)), unitLit) case _ => // partition symbols according to whether they appear in positive and/or negative literals - // SI-7020 Linked- for deterministic counter examples. - val pos = new mutable.LinkedHashSet[Sym]() - val neg = new mutable.LinkedHashSet[Sym]() - mforeach(f)(lit => if (lit.pos) pos += lit.sym else neg += lit.sym) + val pos = new mutable.HashSet[Int]() + val neg = new mutable.HashSet[Int]() + mforeach(clauses)(lit => if (lit.positive) pos += lit.variable else neg += lit.variable) // appearing in both positive and negative - val impures: mutable.LinkedHashSet[Sym] = pos intersect neg + val impures = pos intersect neg // appearing only in either positive/negative positions - val pures: mutable.LinkedHashSet[Sym] = (pos ++ neg) -- impures + val pures = (pos ++ neg) -- impures if (pures nonEmpty) { - val pureSym = pures.head + val pureVar = pures.head // turn it back into a literal // (since equality on literals is in terms of equality // of the underlying symbol and its positivity, simply construct a new Lit) - val pureLit = Lit(pureSym, pos(pureSym)) + val pureLit = Lit(if (neg(pureVar)) -pureVar else pureVar) // debug.patmat("pure: "+ pureLit +" pures: "+ pures +" impures: "+ impures) - val simplified = f.filterNot(_.contains(pureLit)) - withLit(findModelFor(simplified), pureLit) + val simplified = clauses.filterNot(_.contains(pureLit)) + withLit(findTseitinModelFor(simplified), pureLit) } else { - val split = f.head.head + val split = clauses.head.head // debug.patmat("split: "+ split) - orElse(findModelFor(f :+ clause(split)), findModelFor(f :+ clause(-split))) + orElse(findTseitinModelFor(clauses :+ clause(split)), findTseitinModelFor(clauses :+ clause(-split))) } } if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start) satisfiableWithModel } + + private def projectToModel(model: TseitinModel, symForVar: Map[Int, Sym]): Model = + if (model == NoTseitinModel) NoModel + else if (model == EmptyTseitinModel) EmptyModel + else { + val mappedModels = model.toList collect { + case lit if symForVar isDefinedAt lit.variable => (symForVar(lit.variable), lit.positive) + } + if (mappedModels.isEmpty) { + // could get an empty model if mappedModels is a constant like `True` + EmptyModel + } else { + mappedModels.toMap + } + } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala index 1e544e54f632..2f4d2283476a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala @@ -77,12 +77,13 @@ trait Adaptations { val msg = "Adaptation of argument list by inserting () has been deprecated: " + ( if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous." else "this is unlikely to be what you want.") - context.unit.deprecationWarning(t.pos, adaptWarningMessage(msg)) + context.deprecationWarning(t.pos, t.symbol, adaptWarningMessage(msg)) } } else if (settings.warnAdaptedArgs) context.warning(t.pos, adaptWarningMessage(s"Adapting argument list by creating a ${args.size}-tuple: this may not be what you want.")) - !settings.noAdaptedArgs || !(args.isEmpty && settings.future) + // return `true` if the adaptation should be kept + !(settings.noAdaptedArgs || (args.isEmpty && settings.future)) } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala index fa6e5399eb9d..5a70d4c524c0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala @@ -189,6 +189,16 @@ trait AnalyzerPlugins { self: Analyzer => */ def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Option[Tree] = None + /** + * Figures out whether the given macro definition is blackbox or whitebox. + * + * Default implementation provided in `self.standardIsBlackbox` loads the macro impl binding + * and fetches boxity from the "isBlackbox" field of the macro signature. + * + * $nonCumulativeReturnValueDoc. + */ + def pluginsIsBlackbox(macroDef: Symbol): Option[Boolean] = None + /** * Expands an application of a def macro (i.e. of a symbol that has the MACRO flag set), * possibly using the current typer mode and the provided prototype. @@ -375,6 +385,14 @@ trait AnalyzerPlugins { self: Analyzer => def custom(plugin: MacroPlugin) = plugin.pluginsTypedMacroBody(typer, ddef) }) + /** @see MacroPlugin.pluginsIsBlackbox */ + def pluginsIsBlackbox(macroDef: Symbol): Boolean = invoke(new NonCumulativeOp[Boolean] { + def position = macroDef.pos + def description = "compute boxity for this macro definition" + def default = standardIsBlackbox(macroDef) + def custom(plugin: MacroPlugin) = plugin.pluginsIsBlackbox(macroDef) + }) + /** @see MacroPlugin.pluginsMacroExpand */ def pluginsMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = invoke(new NonCumulativeOp[Tree] { def position = expandee.pos diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index 13884404b3be..fc632e0d0d6d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -10,13 +10,29 @@ import Checkability._ import scala.language.postfixOps /** On pattern matcher checkability: + * + * The spec says that case _: List[Int] should be always issue + * an unchecked warning: + * + * > Types which are not of one of the forms described above are + * > also accepted as type patterns. However, such type patterns + * > will be translated to their erasure (§3.7). The Scala compiler + * > will issue an “unchecked” warning for these patterns to flag + * > the possible loss of type-safety. + * + * But the implementation goes a little further to omit warnings + * based on the static type of the scrutinee. As a trivial example: + * + * def foo(s: Seq[Int]) = s match { case _: List[Int] => } + * + * need not issue this warning. * * Consider a pattern match of this form: (x: X) match { case _: P => } * * There are four possibilities to consider: * [P1] X will always conform to P * [P2] x will never conform to P - * [P3] X <: P if some runtime test is true + * [P3] X will conform to P if some runtime test is true * [P4] X cannot be checked against P * * The first two cases correspond to those when there is enough @@ -28,6 +44,11 @@ import scala.language.postfixOps * which is essentially the intersection of X and |P|, where |P| is * the erasure of P. If XR <: P, then no warning is emitted. * + * We evaluate "X with conform to P" by checking `X <: P_wild, where + * P_wild is the result of substituting wildcard types in place of + * pattern type variables. This is intentionally stricter than + * (X matchesPattern P), see SI-8597 for motivating test cases. + * * Examples of how this info is put to use: * sealed trait A[T] ; class B[T] extends A[T] * def f(x: B[Int]) = x match { case _: A[Int] if true => } @@ -100,7 +121,7 @@ trait Checkable { private def typeArgsInTopLevelType(tp: Type): List[Type] = { val tps = tp match { case RefinedType(parents, _) => parents flatMap typeArgsInTopLevelType - case TypeRef(_, ArrayClass, arg :: Nil) => typeArgsInTopLevelType(arg) + case TypeRef(_, ArrayClass, arg :: Nil) => if (arg.typeSymbol.isAbstractType) arg :: Nil else typeArgsInTopLevelType(arg) case TypeRef(pre, sym, args) => typeArgsInTopLevelType(pre) ++ args case ExistentialType(tparams, underlying) => tparams.map(_.tpe) ++ typeArgsInTopLevelType(underlying) case _ => Nil @@ -108,14 +129,31 @@ trait Checkable { tps filterNot isUnwarnableTypeArg } + private def scrutConformsToPatternType(scrut: Type, pattTp: Type): Boolean = { + def typeVarToWildcard(tp: Type) = { + // The need for typeSymbolDirect is demonstrated in neg/t8597b.scala + if (tp.typeSymbolDirect.isPatternTypeVariable) WildcardType else tp + } + val pattTpWild = pattTp.map(typeVarToWildcard) + scrut <:< pattTpWild + } + private class CheckabilityChecker(val X: Type, val P: Type) { def Xsym = X.typeSymbol def Psym = P.typeSymbol - def XR = if (Xsym == AnyClass) classExistentialType(Psym) else propagateKnownTypes(X, Psym) + def PErased = { + P match { + case erasure.GenericArray(n, core) => existentialAbstraction(core.typeSymbol :: Nil, P) + case _ => existentialAbstraction(Psym.typeParams, Psym.tpe_*) + } + } + def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym) + + // sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean] - def P1 = X matchesPattern P + def P1 = scrutConformsToPatternType(X, P) def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P) - def P3 = isNonRefinementClassType(P) && (XR matchesPattern P) + def P3 = isNonRefinementClassType(P) && scrutConformsToPatternType(XR, P) def P4 = !(P1 || P2 || P3) def summaryString = f""" @@ -275,7 +313,7 @@ trait Checkable { ; // Matching on types like case _: AnyRef { def bippy: Int } => doesn't work -- yet. case RefinedType(_, decls) if !decls.isEmpty => - getContext.unit.warning(tree.pos, s"a pattern match on a refinement type is unchecked") + reporter.warning(tree.pos, s"a pattern match on a refinement type is unchecked") case RefinedType(parents, _) => parents foreach (p => checkCheckable(tree, p, X, inPattern, canRemedy)) case _ => @@ -285,14 +323,14 @@ trait Checkable { if (checker.neverMatches) { val addendum = if (checker.neverSubClass) "" else " (but still might match its erasure)" - getContext.unit.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $PString$addendum") + reporter.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $PString$addendum") } else if (checker.isUncheckable) { val msg = ( if (checker.uncheckableType =:= P) s"abstract type $where$PString" else s"${checker.uncheckableMessage} in type $where$PString" ) - getContext.unit.warning(tree.pos, s"$msg is unchecked since it is eliminated by erasure") + reporter.warning(tree.pos, s"$msg is unchecked since it is eliminated by erasure") } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala index 56ed0ee16ca7..2f4771e9d47e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala @@ -75,7 +75,7 @@ abstract class ConstantFolder { case nme.AND => Constant(x.booleanValue & y.booleanValue) case nme.EQ => Constant(x.booleanValue == y.booleanValue) case nme.NE => Constant(x.booleanValue != y.booleanValue) - case _ => null + case _ => null } private def foldSubrangeOp(op: Name, x: Constant, y: Constant): Constant = op match { case nme.OR => Constant(x.intValue | y.intValue) @@ -95,14 +95,20 @@ abstract class ConstantFolder { case nme.MUL => Constant(x.intValue * y.intValue) case nme.DIV => Constant(x.intValue / y.intValue) case nme.MOD => Constant(x.intValue % y.intValue) - case _ => null + case _ => null } private def foldLongOp(op: Name, x: Constant, y: Constant): Constant = op match { case nme.OR => Constant(x.longValue | y.longValue) case nme.XOR => Constant(x.longValue ^ y.longValue) case nme.AND => Constant(x.longValue & y.longValue) - case nme.LSL => Constant(x.longValue << y.longValue) + case nme.LSL if x.tag <= IntTag + => Constant(x.intValue << y.longValue) + case nme.LSL => Constant(x.longValue << y.longValue) + case nme.LSR if x.tag <= IntTag + => Constant(x.intValue >>> y.longValue) case nme.LSR => Constant(x.longValue >>> y.longValue) + case nme.ASR if x.tag <= IntTag + => Constant(x.intValue >> y.longValue) case nme.ASR => Constant(x.longValue >> y.longValue) case nme.EQ => Constant(x.longValue == y.longValue) case nme.NE => Constant(x.longValue != y.longValue) @@ -115,7 +121,7 @@ abstract class ConstantFolder { case nme.MUL => Constant(x.longValue * y.longValue) case nme.DIV => Constant(x.longValue / y.longValue) case nme.MOD => Constant(x.longValue % y.longValue) - case _ => null + case _ => null } private def foldFloatOp(op: Name, x: Constant, y: Constant): Constant = op match { case nme.EQ => Constant(x.floatValue == y.floatValue) @@ -129,7 +135,7 @@ abstract class ConstantFolder { case nme.MUL => Constant(x.floatValue * y.floatValue) case nme.DIV => Constant(x.floatValue / y.floatValue) case nme.MOD => Constant(x.floatValue % y.floatValue) - case _ => null + case _ => null } private def foldDoubleOp(op: Name, x: Constant, y: Constant): Constant = op match { case nme.EQ => Constant(x.doubleValue == y.doubleValue) @@ -143,7 +149,7 @@ abstract class ConstantFolder { case nme.MUL => Constant(x.doubleValue * y.doubleValue) case nme.DIV => Constant(x.doubleValue / y.doubleValue) case nme.MOD => Constant(x.doubleValue % y.doubleValue) - case _ => null + case _ => null } private def foldBinop(op: Name, x: Constant, y: Constant): Constant = { @@ -162,7 +168,7 @@ abstract class ConstantFolder { case _ => null } catch { - case ex: ArithmeticException => null + case _: ArithmeticException => null } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index d439bb56038a..5c36bd9d284d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -27,6 +27,16 @@ trait ContextErrors { override def toString() = "[Type error at:" + errPos + "] " + errMsg } + abstract class AbsAmbiguousTypeError extends AbsTypeError + + case class AmbiguousTypeError(errPos: Position, errMsg: String) + extends AbsAmbiguousTypeError + + case class AmbiguousImplicitTypeError(underlyingTree: Tree, errMsg: String) + extends AbsAmbiguousTypeError { + def errPos = underlyingTree.pos + } + sealed abstract class TreeTypeError extends AbsTypeError { def underlyingTree: Tree def errPos = underlyingTree.pos @@ -38,9 +48,6 @@ trait ContextErrors { case class AccessTypeError(underlyingTree: Tree, errMsg: String) extends TreeTypeError - case class AmbiguousTypeError(errPos: Position, errMsg: String) - extends AbsTypeError - case class SymbolTypeError(underlyingSym: Symbol, errMsg: String) extends AbsTypeError { @@ -66,7 +73,7 @@ trait ContextErrors { // 2) provide the type of the implicit parameter for which we got diverging expansion // (pt at the point of divergence gives less information to the user) // Note: it is safe to delay error message generation in this case - // becasue we don't modify implicits' infos. + // because we don't modify implicits' infos. case class DivergentImplicitTypeError(underlyingTree: Tree, pt0: Type, sym: Symbol) extends TreeTypeError { def errMsg: String = errMsgForPt(pt0) @@ -75,8 +82,6 @@ trait ContextErrors { s"diverging implicit expansion for type ${pt}\nstarting with ${sym.fullLocationString}" } - case class AmbiguousImplicitTypeError(underlyingTree: Tree, errMsg: String) - extends TreeTypeError case class PosAndMsgTypeError(errPos: Position, errMsg: String) extends AbsTypeError @@ -90,10 +95,6 @@ trait ContextErrors { issueTypeError(SymbolTypeError(sym, msg)) } - def issueAmbiguousTypeError(pre: Type, sym1: Symbol, sym2: Symbol, err: AmbiguousTypeError)(implicit context: Context) { - context.issueAmbiguousError(pre, sym1, sym2, err) - } - def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) } def typeErrorMsg(found: Type, req: Type) = "type mismatch" + foundReqMsg(found, req) @@ -123,6 +124,36 @@ trait ContextErrors { import ErrorUtils._ + private def MacroIncompatibleEngineError(friendlyMessage: String, internalMessage: String) = { + def debugDiagnostic = s"(internal diagnostic: $internalMessage)" + val message = if (macroDebugLite || macroDebugVerbose) s"$friendlyMessage $debugDiagnostic" else friendlyMessage + // TODO: clean this up! (This is a more explicit version of what the code use to do, to reveal the issue.) + throw new TypeError(analyzer.lastTreeToTyper.pos, message) + } + + def MacroCantExpand210xMacrosError(internalMessage: String) = + MacroIncompatibleEngineError("can't expand macros compiled by previous versions of Scala", internalMessage) + + def MacroCantExpandIncompatibleMacrosError(internalMessage: String) = + MacroIncompatibleEngineError("macro cannot be expanded, because it was compiled by an incompatible macro engine", internalMessage) + + def NoImplicitFoundError(tree: Tree, param: Symbol)(implicit context: Context): Unit = { + def errMsg = { + val paramName = param.name + val paramTp = param.tpe + def evOrParam = ( + if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX) + "evidence parameter of type" + else + s"parameter $paramName:") + paramTp.typeSymbolDirect match { + case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp) + case _ => s"could not find implicit value for $evOrParam $paramTp" + } + } + issueNormalTypeError(tree, errMsg) + } + trait TyperContextErrors { self: Typer => @@ -141,24 +172,6 @@ trait ContextErrors { setError(tree) } - def NoImplicitFoundError(tree: Tree, param: Symbol) = { - def errMsg = { - val paramName = param.name - val paramTp = param.tpe - def evOrParam = ( - if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX) - "evidence parameter of type" - else - s"parameter $paramName:" - ) - paramTp.typeSymbolDirect match { - case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp) - case _ => s"could not find implicit value for $evOrParam $paramTp" - } - } - issueNormalTypeError(tree, errMsg) - } - def AdaptTypeError(tree: Tree, found: Type, req: Type) = { // SI-3971 unwrapping to the outermost Apply helps prevent confusion with the // error message point. @@ -359,6 +372,14 @@ trait ContextErrors { //setError(sel) } + def SelectWithUnderlyingError(sel: Tree, err: AbsTypeError) = { + // if there's no position, this is likely the result of a MissingRequirementError + // use the position of the selection we failed to type check to report the original message + if (err.errPos == NoPosition) issueNormalTypeError(sel, err.errMsg) + else issueTypeError(err) + setError(sel) + } + //typedNew def IsAbstractError(tree: Tree, sym: Symbol) = { issueNormalTypeError(tree, sym + " is abstract; cannot be instantiated") @@ -725,17 +746,6 @@ trait ContextErrors { NormalTypeError(expandee, "too many argument lists for " + fun) } - private def MacroIncompatibleEngineError(friendlyMessage: String, internalMessage: String) = { - def debugDiagnostic = s"(internal diagnostic: $internalMessage)" - val message = if (macroDebugLite || macroDebugVerbose) s"$friendlyMessage $debugDiagnostic" else friendlyMessage - issueNormalTypeError(lastTreeToTyper, message) - } - - def MacroCantExpand210xMacrosError(internalMessage: String) = - MacroIncompatibleEngineError("can't expand macros compiled by previous versions of Scala", internalMessage) - - def MacroCantExpandIncompatibleMacrosError(internalMessage: String) = - MacroIncompatibleEngineError("macro cannot be expanded, because it was compiled by an incompatible macro engine", internalMessage) case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable @@ -875,20 +885,31 @@ trait ContextErrors { val WrongNumber, NoParams, ArgsDoNotConform = Value } - private def ambiguousErrorMsgPos(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String) = - if (sym1.hasDefault && sym2.hasDefault && sym1.enclClass == sym2.enclClass) { - val methodName = nme.defaultGetterToMethod(sym1.name) - (sym1.enclClass.pos, - "in "+ sym1.enclClass +", multiple overloaded alternatives of " + methodName + - " define default arguments") - } else { - (pos, - ("ambiguous reference to overloaded definition,\n" + - "both " + sym1 + sym1.locationString + " of type " + pre.memberType(sym1) + - "\nand " + sym2 + sym2.locationString + " of type " + pre.memberType(sym2) + - "\nmatch " + rest) - ) - } + private def issueAmbiguousTypeErrorUnlessErroneous(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String): Unit = { + // To avoid stack overflows (SI-8890), we MUST (at least) report when either `validTargets` OR `ambiguousSuppressed` + // More details: + // If `!context.ambiguousErrors`, `reporter.issueAmbiguousError` (which `context.issueAmbiguousError` forwards to) + // buffers ambiguous errors. In this case, to avoid looping, we must issue even if `!validTargets`. (TODO: why?) + // When not buffering (and thus reporting to the user), we shouldn't issue unless `validTargets`, + // otherwise we report two different errors that trace back to the same root cause, + // and unless `validTargets`, we don't know for sure the ambiguity is real anyway. + val validTargets = !(pre.isErroneous || sym1.isErroneous || sym2.isErroneous) + val ambiguousBuffered = !context.ambiguousErrors + if (validTargets || ambiguousBuffered) + context.issueAmbiguousError( + if (sym1.hasDefault && sym2.hasDefault && sym1.enclClass == sym2.enclClass) { + val methodName = nme.defaultGetterToMethod(sym1.name) + AmbiguousTypeError(sym1.enclClass.pos, + s"in ${sym1.enclClass}, multiple overloaded alternatives of $methodName define default arguments") + + } else { + AmbiguousTypeError(pos, + "ambiguous reference to overloaded definition,\n" + + s"both ${sym1.fullLocationString} of type ${pre.memberType(sym1)}\n" + + s"and ${sym2.fullLocationString} of type ${pre.memberType(sym2)}\n" + + s"match $rest") + }) + } def AccessError(tree: Tree, sym: Symbol, ctx: Context, explanation: String): AbsTypeError = AccessError(tree, sym, ctx.enclClass.owner.thisType, ctx.enclClass.owner, explanation) @@ -944,8 +965,7 @@ trait ContextErrors { val msg0 = "argument types " + argtpes.mkString("(", ",", ")") + (if (pt == WildcardType) "" else " and expected result type " + pt) - val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0) - issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(pos, msg)) + issueAmbiguousTypeErrorUnlessErroneous(tree.pos, pre, best, firstCompeting, msg0) setErrorOnLastTry(lastTry, tree) } else setError(tree) // do not even try further attempts because they should all fail // even if this is not the last attempt (because of the SO's possibility on the horizon) @@ -958,8 +978,7 @@ trait ContextErrors { } def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type, lastTry: Boolean) = { - val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, "expected type " + pt) - issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(pos, msg)) + issueAmbiguousTypeErrorUnlessErroneous(tree.pos, pre, best, firstCompeting, "expected type " + pt) setErrorOnLastTry(lastTry, tree) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 133e80788be3..b3e207b334c8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -9,6 +9,7 @@ package typechecker import scala.collection.{ immutable, mutable } import scala.annotation.tailrec import scala.reflect.internal.util.shortClassOfInstance +import scala.tools.nsc.reporters.Reporter /** * @author Martin Odersky @@ -66,7 +67,7 @@ trait Contexts { self: Analyzer => def isMask(s: ImportSelector) = s.name != nme.WILDCARD && s.rename == nme.WILDCARD imp.tree.selectors filterNot (s => isMask(s) || used(s)) foreach { sel => - unit.warning(imp posOf sel, "Unused import") + reporter.warning(imp posOf sel, "Unused import") } } allUsedSelectors --= imps @@ -98,12 +99,12 @@ trait Contexts { self: Analyzer => } - def rootContext(unit: CompilationUnit, tree: Tree = EmptyTree, erasedTypes: Boolean = false): Context = { + def rootContext(unit: CompilationUnit, tree: Tree = EmptyTree, throwing: Boolean = false, checking: Boolean = false): Context = { val rootImportsContext = (startContext /: rootImports(unit))((c, sym) => c.make(gen.mkWildcardImport(sym))) // there must be a scala.xml package when xml literals were parsed in this unit if (unit.hasXml && ScalaXmlPackage == NoSymbol) - unit.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see https://github.com/scala/scala/wiki/Scala-2.11#xml.") + reporter.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see https://github.com/scala/scala-xml for details.") // scala-xml needs `scala.xml.TopScope` to be in scope globally as `$scope` // We detect `scala-xml` by looking for `scala.xml.TopScope` and @@ -113,18 +114,21 @@ trait Contexts { self: Analyzer => else rootImportsContext.make(gen.mkImport(ScalaXmlPackage, nme.TopScope, nme.dollarScope)) val c = contextWithXML.make(tree, unit = unit) - if (erasedTypes) c.setThrowErrors() else c.setReportErrors() - c(EnrichmentEnabled | ImplicitsEnabled) = !erasedTypes + + c.initRootContext(throwing, checking) c } + def rootContextPostTyper(unit: CompilationUnit, tree: Tree = EmptyTree): Context = + rootContext(unit, tree, throwing = true) + def resetContexts() { startContext.enclosingContextChain foreach { context => context.tree match { case Import(qual, _) => qual setType singleType(qual.symbol.owner.thisType, qual.symbol) case _ => } - context.reportBuffer.clearAll() + context.reporter.clearAll() } } @@ -141,7 +145,7 @@ trait Contexts { self: Analyzer => * - A variety of bits that track the current error reporting policy (more on this later); * whether or not implicits/macros are enabled, whether we are in a self or super call or * in a constructor suffix. These are represented as bits in the mask `contextMode`. - * - Some odds and ends: undetermined type pararameters of the current line of type inference; + * - Some odds and ends: undetermined type parameters of the current line of type inference; * contextual augmentation for error messages, tracking of the nesting depth. * * And behaviour: @@ -150,19 +154,19 @@ trait Contexts { self: Analyzer => * to buffer these for use in 'silent' type checking, when some recovery might be possible. * - `Context` is something of a Zipper for the tree were are typechecking: it `enclosingContextChain` * is the path back to the root. This is exactly what we need to resolve names (`lookupSymbol`) - * and to collect in-scope implicit defintions (`implicitss`) + * and to collect in-scope implicit definitions (`implicitss`) * Supporting these are `imports`, which represents all `Import` trees in in the enclosing context chain. - * - In a similar vein, we can assess accessiblity (`isAccessible`.) + * - In a similar vein, we can assess accessibility (`isAccessible`.) * * More on error buffering: * When are type errors recoverable? In quite a few places, it turns out. Some examples: * trying to type an application with/without the expected type, or with/without implicit views * enabled. This is usually mediated by `Typer.silent`, `Inferencer#tryTwice`. * - * Intially, starting from the `typer` phase, the contexts either buffer or report errors; + * Initially, starting from the `typer` phase, the contexts either buffer or report errors; * afterwards errors are thrown. This is configured in `rootContext`. Additionally, more * fine grained control is needed based on the kind of error; ambiguity errors are often - * suppressed during exploraratory typing, such as determining whether `a == b` in an argument + * suppressed during exploratory typing, such as determining whether `a == b` in an argument * position is an assignment or a named argument, when `Infererencer#isApplicableSafe` type checks * applications with and without an expected type, or whtn `Typer#tryTypedApply` tries to fit arguments to * a function type with/without implicit views. @@ -178,7 +182,8 @@ trait Contexts { self: Analyzer => * @param _outer The next outer context. */ class Context private[typechecker](val tree: Tree, val owner: Symbol, val scope: Scope, - val unit: CompilationUnit, _outer: Context) { + val unit: CompilationUnit, _outer: Context, + private[this] var _reporter: ContextReporter = new ThrowingReporter) { private def outerIsNoContext = _outer eq null final def outer: Context = if (outerIsNoContext) NoContext else _outer @@ -254,8 +259,6 @@ trait Contexts { self: Analyzer => def macrosEnabled = this(MacrosEnabled) def enrichmentEnabled_=(value: Boolean) = this(EnrichmentEnabled) = value def enrichmentEnabled = this(EnrichmentEnabled) - def checking_=(value: Boolean) = this(Checking) = value - def checking = this(Checking) def retyping_=(value: Boolean) = this(ReTyping) = value def retyping = this(ReTyping) def inSecondTry = this(SecondTry) @@ -265,8 +268,9 @@ trait Contexts { self: Analyzer => def defaultModeForTyped: Mode = if (inTypeConstructorAllowed) Mode.NOmode else Mode.EXPRmode - /** These messages are printed when issuing an error */ - var diagnostic: List[String] = Nil + /** To enrich error messages involving default arguments. + When extending the notion, group diagnostics in an object. */ + var diagUsedDefaults: Boolean = false /** Saved type bounds for type parameters which are narrowed in a GADT. */ var savedTypeBounds: List[(Symbol, Type)] = List() @@ -310,7 +314,7 @@ trait Contexts { self: Analyzer => */ def savingUndeterminedTypeParams[A](reportAmbiguous: Boolean = ambiguousErrors)(body: => A): A = { withMode() { - this(AmbiguousErrors) = reportAmbiguous + setAmbiguousErrors(reportAmbiguous) val saved = extractUndetparams() try body finally undetparams = saved @@ -321,54 +325,59 @@ trait Contexts { self: Analyzer => // Error reporting policies and buffer. // - private var _reportBuffer: ReportBuffer = new ReportBuffer - /** A buffer for errors and warnings, used with `this.bufferErrors == true` */ - def reportBuffer = _reportBuffer - /** Discard the current report buffer, and replace with an empty one */ - def useFreshReportBuffer() = _reportBuffer = new ReportBuffer - /** Discard the current report buffer, and replace with `other` */ - def restoreReportBuffer(other: ReportBuffer) = _reportBuffer = other - - /** The first error, if any, in the report buffer */ - def firstError: Option[AbsTypeError] = reportBuffer.firstError - def errors: Seq[AbsTypeError] = reportBuffer.errors - /** Does the report buffer contain any errors? */ - def hasErrors = reportBuffer.hasErrors - - def reportErrors = this(ReportErrors) - def bufferErrors = this(BufferErrors) + // the reporter for this context + def reporter: ContextReporter = _reporter + + // if set, errors will not be reporter/thrown + def bufferErrors = reporter.isBuffering + def reportErrors = !(bufferErrors || reporter.isThrowing) + + // whether to *report* (which is separate from buffering/throwing) ambiguity errors def ambiguousErrors = this(AmbiguousErrors) - def throwErrors = contextMode.inNone(ReportErrors | BufferErrors) - - def setReportErrors(): Unit = set(enable = ReportErrors | AmbiguousErrors, disable = BufferErrors) - def setBufferErrors(): Unit = set(enable = BufferErrors, disable = ReportErrors | AmbiguousErrors) - def setThrowErrors(): Unit = this(ReportErrors | AmbiguousErrors | BufferErrors) = false - def setAmbiguousErrors(report: Boolean): Unit = this(AmbiguousErrors) = report - - /** Append the given errors to the report buffer */ - def updateBuffer(errors: Traversable[AbsTypeError]) = reportBuffer ++= errors - /** Clear all errors from the report buffer */ - def flushBuffer() { reportBuffer.clearAllErrors() } - /** Return and clear all errors from the report buffer */ - def flushAndReturnBuffer(): immutable.Seq[AbsTypeError] = { - val current = reportBuffer.errors - reportBuffer.clearAllErrors() - current - } - /** Issue and clear all warnings from the report buffer */ - def flushAndIssueWarnings() { - reportBuffer.warnings foreach { - case (pos, msg) => unit.warning(pos, msg) + private def setAmbiguousErrors(report: Boolean): Unit = this(AmbiguousErrors) = report + + /** + * Try inference twice: once without views and once with views, + * unless views are already disabled. + */ + abstract class TryTwice { + def tryOnce(isLastTry: Boolean): Unit + + final def apply(): Unit = { + val doLastTry = + // do first try if implicits are enabled + if (implicitsEnabled) { + // We create a new BufferingReporter to + // distinguish errors that occurred before entering tryTwice + // and our first attempt in 'withImplicitsDisabled'. If the + // first attempt fails, we try with implicits on + // and the original reporter. + // immediate reporting of ambiguous errors is suppressed, so that they are buffered + inSilentMode { + try { + set(disable = ImplicitsEnabled | EnrichmentEnabled) // restored by inSilentMode + tryOnce(false) + reporter.hasErrors + } catch { + case ex: CyclicReference => throw ex + case ex: TypeError => true // recoverable cyclic references? + } + } + } else true + + // do last try if try with implicits enabled failed + // (or if it was not attempted because they were disabled) + if (doLastTry) + tryOnce(true) } - reportBuffer.clearAllWarnings() } // // Temporary mode adjustment // - @inline def withMode[T](enabled: ContextMode = NOmode, disabled: ContextMode = NOmode)(op: => T): T = { + @inline final def withMode[T](enabled: ContextMode = NOmode, disabled: ContextMode = NOmode)(op: => T): T = { val saved = contextMode set(enabled, disabled) try op @@ -402,12 +411,18 @@ trait Contexts { self: Analyzer => // See comment on FormerNonStickyModes. @inline final def withOnlyStickyModes[T](op: => T): T = withMode(disabled = FormerNonStickyModes)(op) - /** @return true if the `expr` evaluates to true within a silent Context that incurs no errors */ + // inliner note: this has to be a simple method for inlining to work -- moved the `&& !reporter.hasErrors` out @inline final def inSilentMode(expr: => Boolean): Boolean = { - withMode() { // withMode with no arguments to restore the mode mutated by `setBufferErrors`. - setBufferErrors() - try expr && !hasErrors - finally reportBuffer.clearAll() + val savedContextMode = contextMode + val savedReporter = reporter + + setAmbiguousErrors(false) + _reporter = new BufferingReporter + + try expr + finally { + contextMode = savedContextMode + _reporter = savedReporter } } @@ -423,7 +438,8 @@ trait Contexts { self: Analyzer => * `Context#imports`. */ def make(tree: Tree = tree, owner: Symbol = owner, - scope: Scope = scope, unit: CompilationUnit = unit): Context = { + scope: Scope = scope, unit: CompilationUnit = unit, + reporter: ContextReporter = this.reporter): Context = { val isTemplateOrPackage = tree match { case _: Template | _: PackageDef => true case _ => false @@ -433,8 +449,10 @@ trait Contexts { self: Analyzer => case _ => false } val isImport = tree match { - case _: Import => true - case _ => false + // The guard is for SI-8403. It prevents adding imports again in the context created by + // `Namer#createInnerNamer` + case _: Import if tree != this.tree => true + case _ => false } val sameOwner = owner == this.owner val prefixInChild = @@ -444,16 +462,15 @@ trait Contexts { self: Analyzer => // The blank canvas val c = if (isImport) - new Context(tree, owner, scope, unit, this) with ImportContext + new Context(tree, owner, scope, unit, this, reporter) with ImportContext else - new Context(tree, owner, scope, unit, this) + new Context(tree, owner, scope, unit, this, reporter) // Fields that are directly propagated c.variance = variance - c.diagnostic = diagnostic + c.diagUsedDefaults = diagUsedDefaults c.openImplicits = openImplicits c.contextMode = contextMode // note: ConstructorSuffix, a bit within `mode`, is conditionally overwritten below. - c._reportBuffer = reportBuffer // Fields that may take on a different value in the child c.prefix = prefixInChild @@ -463,27 +480,46 @@ trait Contexts { self: Analyzer => // SI-8245 `isLazy` need to skip lazy getters to ensure `return` binds to the right place c.enclMethod = if (isDefDef && !owner.isLazy) c else enclMethod + if (tree != outer.tree) + c(TypeConstructorAllowed) = false + registerContext(c.asInstanceOf[analyzer.Context]) debuglog("[context] ++ " + c.unit + " / " + tree.summaryString) c } + /** Use reporter (possibly buffered) for errors/warnings and enable implicit conversion **/ + def initRootContext(throwing: Boolean = false, checking: Boolean = false): Unit = { + _reporter = + if (checking) new CheckingReporter + else if (throwing) new ThrowingReporter + else new ImmediateReporter + + setAmbiguousErrors(!throwing) + this(EnrichmentEnabled | ImplicitsEnabled) = !throwing + } + def make(tree: Tree, owner: Symbol, scope: Scope): Context = // TODO SI-7345 Moving this optimization into the main overload of `make` causes all tests to fail. - // even if it is extened to check that `unit == this.unit`. Why is this? + // even if it is extended to check that `unit == this.unit`. Why is this? if (tree == this.tree && owner == this.owner && scope == this.scope) this else make(tree, owner, scope, unit) /** Make a child context that represents a new nested scope */ - def makeNewScope(tree: Tree, owner: Symbol): Context = - make(tree, owner, newNestedScope(scope)) + def makeNewScope(tree: Tree, owner: Symbol, reporter: ContextReporter = this.reporter): Context = + make(tree, owner, newNestedScope(scope), reporter = reporter) /** Make a child context that buffers errors and warnings into a fresh report buffer. */ def makeSilent(reportAmbiguousErrors: Boolean = ambiguousErrors, newtree: Tree = tree): Context = { - val c = make(newtree) - c.setBufferErrors() + // A fresh buffer so as not to leak errors/warnings into `this`. + val c = make(newtree, reporter = new BufferingReporter) c.setAmbiguousErrors(reportAmbiguousErrors) - c._reportBuffer = new ReportBuffer // A fresh buffer so as not to leak errors/warnings into `this`. + c + } + + def makeNonSilent(newtree: Tree): Context = { + val c = make(newtree, reporter = reporter.makeImmediate) + c.setAmbiguousErrors(true) c } @@ -506,7 +542,9 @@ trait Contexts { self: Analyzer => */ def makeConstructorContext = { val baseContext = enclClass.outer.nextEnclosing(!_.tree.isInstanceOf[Template]) - val argContext = baseContext.makeNewScope(tree, owner) + // must propagate reporter! + // (caught by neg/t3649 when refactoring reporting to be specified only by this.reporter and not also by this.contextMode) + val argContext = baseContext.makeNewScope(tree, owner, reporter = this.reporter) argContext.contextMode = contextMode argContext.inSelfSuperCall = true def enterElems(c: Context) { @@ -531,63 +569,25 @@ trait Contexts { self: Analyzer => // Error and warning issuance // - private def addDiagString(msg: String) = { - val ds = - if (diagnostic.isEmpty) "" - else diagnostic.mkString("\n","\n", "") - if (msg endsWith ds) msg else msg + ds - } - - private def unitError(pos: Position, msg: String): Unit = - if (checking) onTreeCheckerError(pos, msg) else unit.error(pos, msg) - - @inline private def issueCommon(err: AbsTypeError)(pf: PartialFunction[AbsTypeError, Unit]) { - if (settings.Yissuedebug) { - log("issue error: " + err.errMsg) - (new Exception).printStackTrace() - } - if (pf isDefinedAt err) pf(err) - else if (bufferErrors) { reportBuffer += err } - else throw new TypeError(err.errPos, err.errMsg) - } - /** Issue/buffer/throw the given type error according to the current mode for error reporting. */ - def issue(err: AbsTypeError) { - issueCommon(err) { case _ if reportErrors => - unitError(err.errPos, addDiagString(err.errMsg)) - } - } - + private[typechecker] def issue(err: AbsTypeError) = reporter.issue(err)(this) /** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */ - def issueAmbiguousError(pre: Type, sym1: Symbol, sym2: Symbol, err: AbsTypeError) { - issueCommon(err) { case _ if ambiguousErrors => - if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous) - unitError(err.errPos, err.errMsg) - } - } + private[typechecker] def issueAmbiguousError(err: AbsAmbiguousTypeError) = reporter.issueAmbiguousError(err)(this) + /** Issue/throw the given error message according to the current mode for error reporting. */ + def error(pos: Position, msg: String) = reporter.error(pos, msg) + /** Issue/throw the given error message according to the current mode for error reporting. */ + def warning(pos: Position, msg: String) = reporter.warning(pos, msg) + def echo(pos: Position, msg: String) = reporter.echo(pos, msg) - /** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */ - def issueAmbiguousError(err: AbsTypeError) { - issueCommon(err) { case _ if ambiguousErrors => unitError(err.errPos, addDiagString(err.errMsg)) } - } - /** Issue/throw the given `err` according to the current mode for error reporting. */ - def error(pos: Position, err: Throwable) = - if (reportErrors) unitError(pos, addDiagString(err.getMessage())) - else throw err + def deprecationWarning(pos: Position, sym: Symbol, msg: String): Unit = + currentRun.reporting.deprecationWarning(pos, sym, msg) + def deprecationWarning(pos: Position, sym: Symbol): Unit = + currentRun.reporting.deprecationWarning(pos, sym) // TODO: allow this to escalate to an error, and implicit search will ignore deprecated implicits - /** Issue/throw the given error message according to the current mode for error reporting. */ - def error(pos: Position, msg: String) = { - val msg1 = addDiagString(msg) - if (reportErrors) unitError(pos, msg1) - else throw new TypeError(pos, msg1) - } + def featureWarning(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean): Unit = + currentRun.reporting.featureWarning(pos, featureName, featureDesc, featureTrait, construct, required) - /** Issue/throw the given error message according to the current mode for error reporting. */ - def warning(pos: Position, msg: String, force: Boolean = false) { - if (reportErrors || force) unit.warning(pos, msg) - else if (bufferErrors) reportBuffer += (pos -> msg) - } // nextOuter determines which context is searched next for implicits // (after `this`, which contributes `newImplicits` below.) In @@ -799,7 +799,7 @@ trait Contexts { self: Analyzer => isAccessible(sym, pre) && !(imported && { val e = scope.lookupEntry(name) - (e ne null) && (e.owner == scope) + (e ne null) && (e.owner == scope) && (!settings.isScala212 || e.sym.exists) }) private def collectImplicits(syms: Scope, pre: Type, imported: Boolean = false): List[ImplicitInfo] = @@ -809,10 +809,11 @@ trait Contexts { self: Analyzer => private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = { val qual = imp.qual + val qualSym = qual.tpe.typeSymbol val pre = - if (qual.tpe.typeSymbol.isPackageClass) + if (qualSym.isPackageClass) // SI-6225 important if the imported symbol is inherited by the the package object. - singleType(qual.tpe, qual.tpe member nme.PACKAGE) + qualSym.packageObject.typeOfThis else qual.tpe def collect(sels: List[ImportSelector]): List[ImplicitInfo] = sels match { @@ -885,7 +886,8 @@ trait Contexts { self: Analyzer => Some(collectImplicitImports(imports.head)) } else if (owner.isPackageClass) { // the corresponding package object may contain implicit members. - Some(collectImplicits(owner.tpe.implicitMembers, owner.tpe)) + val pre = owner.packageObject.typeOfThis + Some(collectImplicits(pre.implicitMembers, pre)) } else Some(Nil) } @@ -955,52 +957,11 @@ trait Contexts { self: Analyzer => private def importedAccessibleSymbol(imp: ImportInfo, name: Name, requireExplicit: Boolean): Symbol = imp.importedSymbol(name, requireExplicit) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false)) - /** Is `sym` defined in package object of package `pkg`? - * Since sym may be defined in some parent of the package object, - * we cannot inspect its owner only; we have to go through the - * info of the package object. However to avoid cycles we'll check - * what other ways we can before pushing that way. + /** Must `sym` defined in package object of package `pkg`, if + * it selected from a prefix with `pkg` as its type symbol? */ - def isInPackageObject(sym: Symbol, pkg: Symbol): Boolean = { - def uninitialized(what: String) = { - log(s"Cannot look for $sym in package object of $pkg; $what is not initialized.") - false - } - def pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg - def matchesInfo = ( - // need to be careful here to not get a cyclic reference during bootstrap - if (pkg.isInitialized) { - val module = pkg.info member nme.PACKAGEkw - if (module.isInitialized) - module.info.member(sym.name).alternatives contains sym - else - uninitialized("" + module) - } - else uninitialized("" + pkg) - ) - def inPackageObject(sym: Symbol) = ( - // To be in the package object, one of these must be true: - // 1) sym.owner is a package object class, and sym.owner.owner is the package class for `pkg` - // 2) sym.owner is inherited by the correct package object class - // We try to establish 1) by inspecting the owners directly, and then we try - // to rule out 2), and only if both those fail do we resort to looking in the info. - !sym.hasPackageFlag && sym.owner.exists && ( - if (sym.owner.isPackageObjectClass) - sym.owner.owner == pkgClass - else - !sym.owner.isPackageClass && matchesInfo - ) - ) - - // An overloaded symbol might not have the expected owner! - // The alternatives must be inspected directly. - pkgClass.isPackageClass && ( - if (sym.isOverloaded) - sym.alternatives forall (isInPackageObject(_, pkg)) - else - inPackageObject(sym) - ) - } + def isInPackageObject(sym: Symbol, pkg: Symbol): Boolean = + pkg.isPackage && sym.owner != pkg def isNameInScope(name: Name) = lookupSymbol(name, _ => true).isSuccess @@ -1224,61 +1185,178 @@ trait Contexts { self: Analyzer => override final def toString = super.toString + " with " + s"ImportContext { $impInfo; outer.owner = ${outer.owner} }" } - /** A buffer for warnings and errors that are accumulated during speculative type checking. */ - final class ReportBuffer { + /** A reporter for use during type checking. It has multiple modes for handling errors. + * + * The default (immediate mode) is to send the error to the global reporter. + * When switched into buffering mode via makeBuffering, errors and warnings are buffered and not be reported + * (there's a special case for ambiguity errors for some reason: those are force to the reporter when context.ambiguousErrors, + * or else they are buffered -- TODO: can we simplify this?) + * + * When using the type checker after typers, an error results in a TypeError being thrown. TODO: get rid of this mode. + * + * To handle nested contexts, reporters share buffers. TODO: only buffer in BufferingReporter, emit immediately in ImmediateReporter + */ + abstract class ContextReporter(private[this] var _errorBuffer: mutable.LinkedHashSet[AbsTypeError] = null, private[this] var _warningBuffer: mutable.LinkedHashSet[(Position, String)] = null) extends Reporter { type Error = AbsTypeError type Warning = (Position, String) - private def newBuffer[A] = mutable.LinkedHashSet.empty[A] // Important to use LinkedHS for stable results. + def issue(err: AbsTypeError)(implicit context: Context): Unit = handleError(err.errPos, addDiagString(err.errMsg)) - // [JZ] Contexts, pre- the SI-7345 refactor, avoided allocating the buffers until needed. This - // is replicated here out of conservatism. - private var _errorBuffer: mutable.LinkedHashSet[Error] = _ - private def errorBuffer = {if (_errorBuffer == null) _errorBuffer = newBuffer; _errorBuffer} - def errors: immutable.Seq[Error] = errorBuffer.toVector + protected def handleError(pos: Position, msg: String): Unit + protected def handleSuppressedAmbiguous(err: AbsAmbiguousTypeError): Unit = () + protected def handleWarning(pos: Position, msg: String): Unit = reporter.warning(pos, msg) - private var _warningBuffer: mutable.LinkedHashSet[Warning] = _ - private def warningBuffer = {if (_warningBuffer == null) _warningBuffer = newBuffer; _warningBuffer} - def warnings: immutable.Seq[Warning] = warningBuffer.toVector + def makeImmediate: ContextReporter = this + def makeBuffering: ContextReporter = this + def isBuffering: Boolean = false + def isThrowing: Boolean = false - def +=(error: AbsTypeError): this.type = { - errorBuffer += error - this - } - def ++=(errors: Traversable[AbsTypeError]): this.type = { - errorBuffer ++= errors - this - } - def +=(warning: Warning): this.type = { - warningBuffer += warning - this + /** Emit an ambiguous error according to context.ambiguousErrors + * + * - when true, use global.reporter regardless of whether we're buffering (TODO: can we change this?) + * - else, let this context reporter decide + */ + final def issueAmbiguousError(err: AbsAmbiguousTypeError)(implicit context: Context): Unit = + if (context.ambiguousErrors) reporter.error(err.errPos, addDiagString(err.errMsg)) // force reporting... see TODO above + else handleSuppressedAmbiguous(err) + + @inline final def withFreshErrorBuffer[T](expr: => T): T = { + val previousBuffer = _errorBuffer + _errorBuffer = newBuffer + val res = expr // expr will read _errorBuffer + _errorBuffer = previousBuffer + res } - def clearAll(): this.type = { - clearAllErrors(); clearAllWarnings(); + @inline final def propagatingErrorsTo[T](target: ContextReporter)(expr: => T): T = { + val res = expr // TODO: make sure we're okay skipping the try/finally overhead + if ((this ne target) && hasErrors) { // `this eq target` in e.g., test/files/neg/divergent-implicit.scala + // assert(target.errorBuffer ne _errorBuffer) + target ++= errors + // TODO: is clearAllErrors necessary? (no tests failed when dropping it) + // NOTE: even though `this ne target`, it may still be that `target.errorBuffer eq _errorBuffer`, + // so don't clear the buffer, but null out the reference so that a new one will be created when necessary (should be never??) + // (we should refactor error buffering to avoid mutation on shared buffers) + clearAllErrors() + } + res } - def clearAllErrors(): this.type = { - errorBuffer.clear() - this - } - def clearErrors(removeF: PartialFunction[AbsTypeError, Boolean]): this.type = { - errorBuffer.retain(!PartialFunction.cond(_)(removeF)) - this + protected final def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = + severity match { + case ERROR => handleError(pos, msg) + case WARNING => handleWarning(pos, msg) + case INFO => reporter.echo(pos, msg) + } + + final override def hasErrors = super.hasErrors || errorBuffer.nonEmpty + + // TODO: everything below should be pushed down to BufferingReporter (related to buffering) + // Implicit relies on this most heavily, but there you know reporter.isInstanceOf[BufferingReporter] + // can we encode this statically? + + // have to pass in context because multiple contexts may share the same ReportBuffer + def reportFirstDivergentError(fun: Tree, param: Symbol, paramTp: Type)(implicit context: Context): Unit = + errors.collectFirst { + case dte: DivergentImplicitTypeError => dte + } match { + case Some(divergent) => + // DivergentImplicit error has higher priority than "no implicit found" + // no need to issue the problem again if we are still in silent mode + if (context.reportErrors) { + context.issue(divergent.withPt(paramTp)) + errorBuffer.retain { + case dte: DivergentImplicitTypeError => false + case _ => true + } + } + case _ => + NoImplicitFoundError(fun, param)(context) + } + + def retainDivergentErrorsExcept(saved: DivergentImplicitTypeError) = + errorBuffer.retain { + case err: DivergentImplicitTypeError => err ne saved + case _ => false + } + + def propagateImplicitTypeErrorsTo(target: ContextReporter) = { + errors foreach { + case err@(_: DivergentImplicitTypeError | _: AmbiguousImplicitTypeError) => + target.errorBuffer += err + case _ => + } + // debuglog("propagateImplicitTypeErrorsTo: " + errors) } - def retainErrors(leaveF: PartialFunction[AbsTypeError, Boolean]): this.type = { - errorBuffer.retain(PartialFunction.cond(_)(leaveF)) - this + + protected def addDiagString(msg: String)(implicit context: Context): String = { + val diagUsedDefaultsMsg = "Error occurred in an application involving default arguments." + if (context.diagUsedDefaults && !(msg endsWith diagUsedDefaultsMsg)) msg + "\n" + diagUsedDefaultsMsg + else msg } - def clearAllWarnings(): this.type = { - warningBuffer.clear() - this + + final def emitWarnings() = if (_warningBuffer != null) { + _warningBuffer foreach { + case (pos, msg) => reporter.warning(pos, msg) + } + _warningBuffer = null } - def hasErrors = errorBuffer.nonEmpty - def firstError = errorBuffer.headOption + // [JZ] Contexts, pre- the SI-7345 refactor, avoided allocating the buffers until needed. This + // is replicated here out of conservatism. + private def newBuffer[A] = mutable.LinkedHashSet.empty[A] // Important to use LinkedHS for stable results. + final protected def errorBuffer = { if (_errorBuffer == null) _errorBuffer = newBuffer; _errorBuffer } + final protected def warningBuffer = { if (_warningBuffer == null) _warningBuffer = newBuffer; _warningBuffer } + + final def errors: immutable.Seq[Error] = errorBuffer.toVector + final def warnings: immutable.Seq[Warning] = warningBuffer.toVector + final def firstError: Option[AbsTypeError] = errorBuffer.headOption + + // TODO: remove ++= and clearAll* entirely in favor of more high-level combinators like withFreshErrorBuffer + final private[typechecker] def ++=(errors: Traversable[AbsTypeError]): Unit = errorBuffer ++= errors + + // null references to buffers instead of clearing them, + // as the buffers may be shared between different reporters + final def clearAll(): Unit = { _errorBuffer = null; _warningBuffer = null } + final def clearAllErrors(): Unit = { _errorBuffer = null } + } + + private[typechecker] class ImmediateReporter(_errorBuffer: mutable.LinkedHashSet[AbsTypeError] = null, _warningBuffer: mutable.LinkedHashSet[(Position, String)] = null) extends ContextReporter(_errorBuffer, _warningBuffer) { + override def makeBuffering: ContextReporter = new BufferingReporter(errorBuffer, warningBuffer) + protected def handleError(pos: Position, msg: String): Unit = reporter.error(pos, msg) + } + + + private[typechecker] class BufferingReporter(_errorBuffer: mutable.LinkedHashSet[AbsTypeError] = null, _warningBuffer: mutable.LinkedHashSet[(Position, String)] = null) extends ContextReporter(_errorBuffer, _warningBuffer) { + override def isBuffering = true + + override def issue(err: AbsTypeError)(implicit context: Context): Unit = errorBuffer += err + + // this used to throw new TypeError(pos, msg) -- buffering lets us report more errors (test/files/neg/macro-basic-mamdmi) + // the old throwing behavior was relied on by diagnostics in manifestOfType + protected def handleError(pos: Position, msg: String): Unit = errorBuffer += TypeErrorWrapper(new TypeError(pos, msg)) + override protected def handleSuppressedAmbiguous(err: AbsAmbiguousTypeError): Unit = errorBuffer += err + override protected def handleWarning(pos: Position, msg: String): Unit = warningBuffer += ((pos, msg)) + + // TODO: emit all buffered errors, warnings + override def makeImmediate: ContextReporter = new ImmediateReporter(errorBuffer, warningBuffer) + } + + /** Used after typer (specialization relies on TypeError being thrown, among other post-typer phases). + * + * TODO: get rid of it, use ImmediateReporter and a check for reporter.hasErrors where necessary + */ + private[typechecker] class ThrowingReporter extends ContextReporter { + override def isThrowing = true + protected def handleError(pos: Position, msg: String): Unit = throw new TypeError(pos, msg) } + /** Used during a run of [[scala.tools.nsc.typechecker.TreeCheckers]]? */ + private[typechecker] class CheckingReporter extends ContextReporter { + protected def handleError(pos: Position, msg: String): Unit = onTreeCheckerError(pos, msg) + } + + class ImportInfo(val tree: Import, val depth: Int) { def pos = tree.pos def posOf(sel: ImportSelector) = tree.pos withPoint sel.namePos @@ -1371,8 +1449,6 @@ object ContextMode { def apply(bits: Int): ContextMode = new ContextMode(bits) final val NOmode: ContextMode = 0 - final val ReportErrors: ContextMode = 1 << 0 - final val BufferErrors: ContextMode = 1 << 1 final val AmbiguousErrors: ContextMode = 1 << 2 /** Are we in a secondary constructor after the this constructor call? */ @@ -1395,8 +1471,6 @@ object ContextMode { /** To selectively allow enrichment in patterns, where other kinds of implicit conversions are not allowed */ final val EnrichmentEnabled: ContextMode = 1 << 8 - /** Are we in a run of [[scala.tools.nsc.typechecker.TreeCheckers]]? */ - final val Checking: ContextMode = 1 << 9 /** Are we retypechecking arguments independently from the function applied to them? See `Typer.tryTypedApply` * TODO - iron out distinction/overlap with SecondTry. @@ -1433,17 +1507,14 @@ object ContextMode { PatternAlternative | StarPatterns | SuperInit | SecondTry | ReturnExpr | TypeConstructorAllowed ) - final val DefaultMode: ContextMode = MacrosEnabled + final val DefaultMode: ContextMode = MacrosEnabled private val contextModeNameMap = Map( - ReportErrors -> "ReportErrors", - BufferErrors -> "BufferErrors", AmbiguousErrors -> "AmbiguousErrors", ConstructorSuffix -> "ConstructorSuffix", SelfSuperCall -> "SelfSuperCall", ImplicitsEnabled -> "ImplicitsEnabled", MacrosEnabled -> "MacrosEnabled", - Checking -> "Checking", ReTyping -> "ReTyping", PatternAlternative -> "PatternAlternative", StarPatterns -> "StarPatterns", diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index c8ac3622e290..4435ed0b600c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -3,7 +3,7 @@ * @author Martin Odersky */ -//todo: rewrite or disllow new T where T is a mixin (currently: not a member of T) +//todo: rewrite or disallow new T where T is a mixin (currently: not a member of T) //todo: use inherited type info also for vars and values //todo: disallow C#D in superclass //todo: treat :::= correctly @@ -71,18 +71,15 @@ trait Implicits { typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString)) val implicitSearchContext = context.makeImplicit(reportAmbiguous) val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos).bestImplicit - if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.hasErrors) { - context.updateBuffer(implicitSearchContext.reportBuffer.errors.collect { - case dte: DivergentImplicitTypeError => dte - case ate: AmbiguousImplicitTypeError => ate - }) - debuglog("update buffer: " + implicitSearchContext.reportBuffer.errors) - } + + if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.reporter.hasErrors) + implicitSearchContext.reporter.propagateImplicitTypeErrorsTo(context.reporter) + // SI-7944 undetermined type parameters that result from inference within typedImplicit land in // `implicitSearchContext.undetparams`, *not* in `context.undetparams` // Here, we copy them up to parent context (analogously to the way the errors are copied above), // and then filter out any which *were* inferred and are part of the substitutor in the implicit search result. - context.undetparams = ((context.undetparams ++ implicitSearchContext.undetparams) filterNot result.subst.from.contains).distinct + context.undetparams = ((context.undetparams ++ result.undetparams) filterNot result.subst.from.contains).distinct if (Statistics.canEnable) Statistics.stopTimer(implicitNanos, start) if (Statistics.canEnable) Statistics.stopCounter(rawTypeImpl, rawTypeStart) @@ -99,7 +96,7 @@ trait Implicits { def wrapper(inference: => SearchResult) = wrapper1(inference) val result = wrapper(inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos)) if (result.isFailure && !silent) { - val err = context.firstError + val err = context.reporter.firstError val errPos = err.map(_.errPos).getOrElse(pos) val errMsg = err.map(_.errMsg).getOrElse("implicit search has failed. to find out the reason, turn on -Xlog-implicits") onError(errPos, errMsg) @@ -162,8 +159,9 @@ trait Implicits { * @param tree The tree representing the implicit * @param subst A substituter that represents the undetermined type parameters * that were instantiated by the winning implicit. + * @param undetparams undetermined type parameters */ - class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter) { + class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter, val undetparams: List[Symbol]) { override def toString = "SearchResult(%s, %s)".format(tree, if (subst.isEmpty) "" else subst) @@ -173,16 +171,16 @@ trait Implicits { final def isSuccess = !isFailure } - lazy val SearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) { + lazy val SearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter, Nil) { override def isFailure = true } - lazy val DivergentSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) { + lazy val DivergentSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter, Nil) { override def isFailure = true override def isDivergent = true } - lazy val AmbiguousSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) { + lazy val AmbiguousSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter, Nil) { override def isFailure = true override def isAmbiguousFailure = true } @@ -634,7 +632,7 @@ trait Implicits { } case _ => fallback } - context.firstError match { // using match rather than foreach to avoid non local return. + context.reporter.firstError match { // using match rather than foreach to avoid non local return. case Some(err) => log("implicit adapt failed: " + err.errMsg) return fail(err.errMsg) @@ -657,8 +655,8 @@ trait Implicits { } } - if (context.hasErrors) - fail("hasMatchingSymbol reported error: " + context.firstError.get.errMsg) + if (context.reporter.hasErrors) + fail("hasMatchingSymbol reported error: " + context.reporter.firstError.get.errMsg) else if (itree3.isErroneous) fail("error typechecking implicit candidate") else if (isLocalToCallsite && !hasMatchingSymbol(itree2)) @@ -676,7 +674,7 @@ trait Implicits { // #2421: check that we correctly instantiated type parameters outside of the implicit tree: checkBounds(itree3, NoPrefix, NoSymbol, undetParams, targs, "inferred ") - context.firstError match { + context.reporter.firstError match { case Some(err) => return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + err.errMsg) case None => @@ -715,11 +713,11 @@ trait Implicits { case t => t } - context.firstError match { + context.reporter.firstError match { case Some(err) => fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg) case None => - val result = new SearchResult(unsuppressMacroExpansion(itree3), subst) + val result = new SearchResult(unsuppressMacroExpansion(itree3), subst, context.undetparams) if (Statistics.canEnable) Statistics.incCounter(foundImplicits) typingLog("success", s"inferred value of type $ptInstantiated is $result") result @@ -832,19 +830,39 @@ trait Implicits { * so that if there is a best candidate it can still be selected. */ object DivergentImplicitRecovery { - // symbol of the implicit that caused the divergence. - // Initially null, will be saved on first diverging expansion. - private var implicitSym: Symbol = _ - private var countdown: Int = 1 - - def sym: Symbol = implicitSym - def apply(search: SearchResult, i: ImplicitInfo): SearchResult = - if (search.isDivergent && countdown > 0) { - countdown -= 1 - implicitSym = i.sym - log(s"discarding divergent implicit $implicitSym during implicit search") + private var divergentError: Option[DivergentImplicitTypeError] = None + + private def saveDivergent(err: DivergentImplicitTypeError) { + if (divergentError.isEmpty) divergentError = Some(err) + } + + def issueSavedDivergentError() { + divergentError foreach (err => context.issue(err)) + } + + def apply(search: SearchResult, i: ImplicitInfo, errors: Seq[AbsTypeError]): SearchResult = { + // A divergent error from a nested implicit search will be found in `errors`. Stash that + // aside to be re-issued if this implicit search fails. + errors.collectFirst { case err: DivergentImplicitTypeError => err } foreach saveDivergent + + if (search.isDivergent && divergentError.isEmpty) { + // Divergence triggered by `i` at this level of the implicit serach. We haven't + // seen divergence so far, we won't issue this error just yet, and instead temporarily + // treat `i` as a failed candidate. + saveDivergent(DivergentImplicitTypeError(tree, pt, i.sym)) + log(s"discarding divergent implicit ${i.sym} during implicit search") SearchFailure - } else search + } else { + if (search.isFailure) { + // Discard the divergentError we saved (if any), as well as all errors that are not of type DivergentImplicitTypeError + // We don't want errors that occur while checking the implicit info + // to influence the check of further infos, but we should retain divergent implicit errors + // (except for the one we already squirreled away) + context.reporter.retainDivergentErrorsExcept(divergentError.getOrElse(null)) + } + search + } + } } /** Sorted list of eligible implicits. @@ -868,31 +886,33 @@ trait Implicits { * - if it matches, forget about all others it improves upon */ @tailrec private def rankImplicits(pending: Infos, acc: Infos): Infos = pending match { - case Nil => acc - case i :: is => - DivergentImplicitRecovery(typedImplicit(i, ptChecked = true, isLocalToCallsite), i) match { - case sr if sr.isDivergent => - Nil - case sr if sr.isFailure => - // We don't want errors that occur during checking implicit info - // to influence the check of further infos. - context.reportBuffer.retainErrors { - case err: DivergentImplicitTypeError => true + case Nil => acc + case firstPending :: otherPending => + def firstPendingImproves(alt: ImplicitInfo) = + firstPending == alt || ( + try improves(firstPending, alt) + catch { + case e: CyclicReference => + debugwarn(s"Discarding $firstPending during implicit search due to cyclic reference.") + true } - rankImplicits(is, acc) - case newBest => - best = newBest - val newPending = undoLog undo { - is filterNot (alt => alt == i || { - try improves(i, alt) - catch { - case e: CyclicReference => - debugwarn(s"Discarding $i during implicit search due to cyclic reference") - true - } - }) + ) + + val typedFirstPending = typedImplicit(firstPending, ptChecked = true, isLocalToCallsite) + + // Pass the errors to `DivergentImplicitRecovery` so that it can note + // the first `DivergentImplicitTypeError` that is being propagated + // from a nested implicit search; this one will be + // re-issued if this level of the search fails. + DivergentImplicitRecovery(typedFirstPending, firstPending, context.reporter.errors) match { + case sr if sr.isDivergent => Nil + case sr if sr.isFailure => rankImplicits(otherPending, acc) + case newBest => + best = newBest // firstPending is our new best, since we already pruned last time around: + val pendingImprovingBest = undoLog undo { + otherPending filterNot firstPendingImproves } - rankImplicits(newPending, i :: acc) + rankImplicits(pendingImprovingBest, firstPending :: acc) } } @@ -920,12 +940,9 @@ trait Implicits { } if (best.isFailure) { - /* If there is no winner, and we witnessed and caught divergence, - * now we can throw it for the error message. - */ - if (DivergentImplicitRecovery.sym != null) { - DivergingImplicitExpansionError(tree, pt, DivergentImplicitRecovery.sym)(context) - } + // If there is no winner, and we witnessed and recorded a divergence error, + // our recovery attempt has failed, so we must now issue it. + DivergentImplicitRecovery.issueSavedDivergentError() if (invalidImplicits.nonEmpty) setAddendum(pos, () => @@ -995,15 +1012,12 @@ trait Implicits { } case None => if (pre.isStable && !pre.typeSymbol.isExistentiallyBound) { - val companion = companionSymbolOf(sym, context) - companion.moduleClass match { - case mc: ModuleClassSymbol => - val infos = - for (im <- mc.implicitMembers.toList) yield new ImplicitInfo(im.name, singleType(pre, companion), im) - if (infos.nonEmpty) - infoMap += (sym -> infos) - case _ => - } + val pre1 = + if (sym.isPackageClass) sym.packageObject.typeOfThis + else singleType(pre, companionSymbolOf(sym, context)) + val infos = pre1.implicitMembers.iterator.map(mem => new ImplicitInfo(mem.name, pre1, mem)).toList + if (infos.nonEmpty) + infoMap += (sym -> infos) } val bts = tp.baseTypeSeq var i = 1 @@ -1124,9 +1138,9 @@ trait Implicits { try { val tree1 = typedPos(pos.focus)(arg) - context.firstError match { + context.reporter.firstError match { case Some(err) => processMacroExpansionError(err.errPos, err.errMsg) - case None => new SearchResult(tree1, EmptyTreeTypeSubstituter) + case None => new SearchResult(tree1, EmptyTreeTypeSubstituter, Nil) } } catch { case ex: TypeError => @@ -1196,7 +1210,7 @@ trait Implicits { def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass) def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = { implicit def wrapResult(tree: Tree): SearchResult = - if (tree == EmptyTree) SearchFailure else new SearchResult(tree, if (from.isEmpty) EmptyTreeTypeSubstituter else new TreeTypeSubstituter(from, to)) + if (tree == EmptyTree) SearchFailure else new SearchResult(tree, if (from.isEmpty) EmptyTreeTypeSubstituter else new TreeTypeSubstituter(from, to), Nil) val tp1 = tp0.dealias tp1 match { @@ -1256,19 +1270,20 @@ trait Implicits { if (tagInScope.isEmpty) mot(tp, Nil, Nil) else { if (ReflectRuntimeUniverse == NoSymbol) { - // todo. write a test for this - context.error(pos, + // TODO: write a test for this (the next error message is already checked by neg/interop_typetags_without_classtags_arenot_manifests.scala) + // TODO: this was using context.error, and implicit search always runs in silent mode, thus it was actually throwing a TypeError + // with the new strategy-based reporting, a BufferingReporter buffers instead of throwing + // it would be good to rework this logic to fit into the regular context.error mechanism + throw new TypeError(pos, sm"""to create a manifest here, it is necessary to interoperate with the type tag `$tagInScope` in scope. |however typetag -> manifest conversion requires Scala reflection, which is not present on the classpath. |to proceed put scala-reflect.jar on your compilation classpath and recompile.""") - return SearchFailure } if (resolveClassTag(pos, tp, allowMaterialization = true) == EmptyTree) { - context.error(pos, + throw new TypeError(pos, sm"""to create a manifest here, it is necessary to interoperate with the type tag `$tagInScope` in scope. |however typetag -> manifest conversion requires a class tag for the corresponding type to be present. |to proceed add a class tag to the type `$tp` (e.g. by introducing a context bound) and recompile.""") - return SearchFailure } val cm = typed(Ident(ReflectRuntimeCurrentMirror)) val internal = gen.mkAttributedSelect(gen.mkAttributedRef(ReflectRuntimeUniverse), UniverseInternal) @@ -1284,7 +1299,7 @@ trait Implicits { } def wrapResult(tree: Tree): SearchResult = - if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter) + if (tree == EmptyTree) SearchFailure else new SearchResult(atPos(pos.focus)(tree), EmptyTreeTypeSubstituter, Nil) /** Materializes implicits of predefined types (currently, manifests and tags). * Will be replaced by implicit macros once we fix them. @@ -1324,52 +1339,66 @@ trait Implicits { * If all fails return SearchFailure */ def bestImplicit: SearchResult = { - val failstart = if (Statistics.canEnable) Statistics.startTimer(inscopeFailNanos) else null - val succstart = if (Statistics.canEnable) Statistics.startTimer(inscopeSucceedNanos) else null + val stats = Statistics.canEnable + val failstart = if (stats) Statistics.startTimer(inscopeFailNanos) else null + val succstart = if (stats) Statistics.startTimer(inscopeSucceedNanos) else null var result = searchImplicit(context.implicitss, isLocalToCallsite = true) - if (result.isFailure) { - if (Statistics.canEnable) Statistics.stopTimer(inscopeFailNanos, failstart) - } else { - if (Statistics.canEnable) Statistics.stopTimer(inscopeSucceedNanos, succstart) - if (Statistics.canEnable) Statistics.incCounter(inscopeImplicitHits) + if (stats) { + if (result.isFailure) Statistics.stopTimer(inscopeFailNanos, failstart) + else { + Statistics.stopTimer(inscopeSucceedNanos, succstart) + Statistics.incCounter(inscopeImplicitHits) + } } + if (result.isFailure) { - val previousErrs = context.flushAndReturnBuffer() - val failstart = if (Statistics.canEnable) Statistics.startTimer(oftypeFailNanos) else null - val succstart = if (Statistics.canEnable) Statistics.startTimer(oftypeSucceedNanos) else null + val failstart = if (stats) Statistics.startTimer(oftypeFailNanos) else null + val succstart = if (stats) Statistics.startTimer(oftypeSucceedNanos) else null + + // SI-6667, never search companions after an ambiguous error in in-scope implicits + val wasAmbigious = result.isAmbiguousFailure + + // TODO: encapsulate + val previousErrs = context.reporter.errors + context.reporter.clearAllErrors() - val wasAmbigious = result.isAmbiguousFailure // SI-6667, never search companions after an ambiguous error in in-scope implicits result = materializeImplicit(pt) + // `materializeImplicit` does some preprocessing for `pt` // is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`? if (result.isFailure && !wasAmbigious) result = searchImplicit(implicitsOfExpectedType, isLocalToCallsite = false) - if (result.isFailure) { - context.updateBuffer(previousErrs) - if (Statistics.canEnable) Statistics.stopTimer(oftypeFailNanos, failstart) - } else { - if (Statistics.canEnable) Statistics.stopTimer(oftypeSucceedNanos, succstart) - if (Statistics.canEnable) Statistics.incCounter(oftypeImplicitHits) + if (result.isFailure) + context.reporter ++= previousErrs + + if (stats) { + if (result.isFailure) Statistics.stopTimer(oftypeFailNanos, failstart) + else { + Statistics.stopTimer(oftypeSucceedNanos, succstart) + Statistics.incCounter(oftypeImplicitHits) + } } } if (result.isSuccess && isView) { def maybeInvalidConversionError(msg: String) { // We have to check context.ambiguousErrors even though we are calling "issueAmbiguousError" // which ostensibly does exactly that before issuing the error. Why? I have no idea. Test is pos/t7690. + // AM: I would guess it's because ambiguous errors will be buffered in silent mode if they are not reported if (context.ambiguousErrors) context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, msg)) } pt match { case Function1(_, out) => - def prohibit(sym: Symbol) = if (sym.tpe <:< out) { - maybeInvalidConversionError(s"the result type of an implicit conversion must be more specific than ${sym.name}") - result = SearchFailure + // must inline to avoid capturing result + def prohibit(sym: Symbol) = (sym.tpe <:< out) && { + maybeInvalidConversionError(s"the result type of an implicit conversion must be more specific than ${sym.name}") + true } - prohibit(AnyRefClass) - if (settings.isScala211) prohibit(AnyValClass) + if (prohibit(AnyRefClass) || (settings.isScala211 && prohibit(AnyValClass))) + result = SearchFailure case _ => false } if (settings.isScala211 && isInvalidConversionSource(pt)) { @@ -1377,8 +1406,9 @@ trait Implicits { result = SearchFailure } } - if (result.isFailure) - debuglog("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType) + + if (result.isFailure && settings.debug) // debuglog is not inlined for some reason + log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType) result } @@ -1400,20 +1430,19 @@ trait Implicits { val eligible = new ImplicitComputation(iss, isLocalToCallsite).eligible eligible.toList.flatMap { (ii: ImplicitInfo) => - // each ImplicitInfo contributes a distinct set of constraints (generated indirectly by typedImplicit) - // thus, start each type var off with a fresh for every typedImplicit - resetTVars() - // any previous errors should not affect us now - context.flushBuffer() - - val res = typedImplicit(ii, ptChecked = false, isLocalToCallsite) - if (res.tree ne EmptyTree) List((res, tvars map (_.constr))) - else Nil + // each ImplicitInfo contributes a distinct set of constraints (generated indirectly by typedImplicit) + // thus, start each type var off with a fresh for every typedImplicit + resetTVars() + // any previous errors should not affect us now + context.reporter.clearAllErrors() + val res = typedImplicit(ii, ptChecked = false, isLocalToCallsite) + if (res.tree ne EmptyTree) List((res, tvars map (_.constr))) + else Nil + } } - } eligibleInfos(context.implicitss, isLocalToCallsite = true) ++ eligibleInfos(implicitsOfExpectedType, isLocalToCallsite = false) - } + } } object ImplicitNotFoundMsg { @@ -1446,8 +1475,10 @@ trait Implicits { }) private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName) + private def typeArgsAtSym(paramTp: Type) = paramTp.baseType(sym).typeArgs + + def format(paramName: Name, paramTp: Type): String = format(typeArgsAtSym(paramTp) map (_.toString)) - def format(paramName: Name, paramTp: Type): String = format(paramTp.typeArgs map (_.toString)) def format(typeArgs: List[String]): String = interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc? diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index fc0e2c7c803f..cf97474d9af4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -199,8 +199,6 @@ trait Infer extends Checkable { def getContext = context - def issue(err: AbsTypeError): Unit = context.issue(err) - def explainTypes(tp1: Type, tp2: Type) = { if (context.reportErrors) withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2)) @@ -297,11 +295,17 @@ trait Infer extends Checkable { && !isByNameParamType(tp) && isCompatible(tp, dropByName(pt)) ) + def isCompatibleSam(tp: Type, pt: Type): Boolean = { + val samFun = typer.samToFunctionType(pt) + (samFun ne NoType) && isCompatible(tp, samFun) + } + val tp1 = normalize(tp) ( (tp1 weak_<:< pt) || isCoercible(tp1, pt) || isCompatibleByName(tp, pt) + || isCompatibleSam(tp, pt) ) } def isCompatibleArgs(tps: List[Type], pts: List[Type]) = (tps corresponds pts)(isCompatible) @@ -546,7 +550,14 @@ trait Infer extends Checkable { val targs = solvedTypes(tvars, tparams, tparams map varianceInTypes(formals), upper = false, lubDepth(formals) max lubDepth(argtpes)) // Can warn about inferring Any/AnyVal as long as they don't appear // explicitly anywhere amongst the formal, argument, result, or expected type. - def canWarnAboutAny = !(pt :: restpe :: formals ::: argtpes exists (t => (t contains AnyClass) || (t contains AnyValClass))) + // ...or lower bound of a type param, since they're asking for it. + def canWarnAboutAny = { + val loBounds = tparams map (_.info.bounds.lo) + val hasAny = pt :: restpe :: formals ::: argtpes ::: loBounds exists (t => + (t contains AnyClass) || (t contains AnyValClass) + ) + !hasAny + } def argumentPosition(idx: Int): Position = context.tree match { case x: ValOrDefDef => x.rhs match { case Apply(fn, args) if idx < args.size => args(idx).pos @@ -554,11 +565,11 @@ trait Infer extends Checkable { } case _ => context.tree.pos } - if (settings.warnInferAny.value && context.reportErrors && canWarnAboutAny) { + if (settings.warnInferAny && context.reportErrors && canWarnAboutAny) { foreachWithIndex(targs) ((targ, idx) => targ.typeSymbol match { case sym @ (AnyClass | AnyValClass) => - context.unit.warning(argumentPosition(idx), s"a type was inferred to be `${sym.name}`; this may indicate a programming error.") + reporter.warning(argumentPosition(idx), s"a type was inferred to be `${sym.name}`; this may indicate a programming error.") case _ => } ) @@ -781,7 +792,7 @@ trait Infer extends Checkable { def applicableExpectingPt(pt: Type): Boolean = { val silent = context.makeSilent(reportAmbiguousErrors = false) val result = newTyper(silent).infer.isApplicable(undetparams, ftpe, argtpes0, pt) - if (silent.hasErrors && !pt.isWildcard) + if (silent.reporter.hasErrors && !pt.isWildcard) applicableExpectingPt(WildcardType) // second try else result @@ -1006,7 +1017,7 @@ trait Infer extends Checkable { /** Substitute free type variables `undetparams` of type constructor * `tree` in pattern, given prototype `pt`. * - * @param tree the constuctor that needs to be instantiated + * @param tree the constructor that needs to be instantiated * @param undetparams the undetermined type parameters * @param pt0 the expected result type of the instance */ @@ -1266,33 +1277,36 @@ trait Infer extends Checkable { * If no alternative matches `pt`, take the parameterless one anyway. */ def inferExprAlternative(tree: Tree, pt: Type): Tree = { - def tryOurBests(pre: Type, alts: List[Symbol], isSecondTry: Boolean): Unit = { - val alts0 = alts filter (alt => isWeaklyCompatible(pre memberType alt, pt)) - val alts1 = if (alts0.isEmpty) alts else alts0 - val bests = bestAlternatives(alts1) { (sym1, sym2) => - val tp1 = pre memberType sym1 - val tp2 = pre memberType sym2 - - ( (tp2 eq ErrorType) - || isWeaklyCompatible(tp1, pt) && !isWeaklyCompatible(tp2, pt) - || isStrictlyMoreSpecific(tp1, tp2, sym1, sym2) - ) - } - // todo: missing test case for bests.isEmpty - bests match { - case best :: Nil => tree setSymbol best setType (pre memberType best) - case best :: competing :: _ if alts0.nonEmpty => - // SI-6912 Don't give up and leave an OverloadedType on the tree. - // Originally I wrote this as `if (secondTry) ... `, but `tryTwice` won't attempt the second try - // unless an error is issued. We're not issuing an error, in the assumption that it would be - // spurious in light of the erroneous expected type - if (pt.isErroneous) setError(tree) - else AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry) - case _ => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry) + val c = context + class InferTwice(pre: Type, alts: List[Symbol]) extends c.TryTwice { + def tryOnce(isSecondTry: Boolean): Unit = { + val alts0 = alts filter (alt => isWeaklyCompatible(pre memberType alt, pt)) + val alts1 = if (alts0.isEmpty) alts else alts0 + val bests = bestAlternatives(alts1) { (sym1, sym2) => + val tp1 = pre memberType sym1 + val tp2 = pre memberType sym2 + + ( (tp2 eq ErrorType) + || isWeaklyCompatible(tp1, pt) && !isWeaklyCompatible(tp2, pt) + || isStrictlyMoreSpecific(tp1, tp2, sym1, sym2) + ) + } + // todo: missing test case for bests.isEmpty + bests match { + case best :: Nil => tree setSymbol best setType (pre memberType best) + case best :: competing :: _ if alts0.nonEmpty => + // SI-6912 Don't give up and leave an OverloadedType on the tree. + // Originally I wrote this as `if (secondTry) ... `, but `tryTwice` won't attempt the second try + // unless an error is issued. We're not issuing an error, in the assumption that it would be + // spurious in light of the erroneous expected type + if (pt.isErroneous) setError(tree) + else AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry) + case _ => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry) + } } } tree.tpe match { - case OverloadedType(pre, alts) => tryTwice(tryOurBests(pre, alts, _)) ; tree + case OverloadedType(pre, alts) => (new InferTwice(pre, alts)).apply() ; tree case _ => tree } } @@ -1370,70 +1384,41 @@ trait Infer extends Checkable { * @pre tree.tpe is an OverloadedType. */ def inferMethodAlternative(tree: Tree, undetparams: List[Symbol], argtpes0: List[Type], pt0: Type): Unit = { - val OverloadedType(pre, alts) = tree.tpe - var varargsStar = false - val argtpes = argtpes0 mapConserve { - case RepeatedType(tp) => varargsStar = true ; tp - case tp => tp - } - def followType(sym: Symbol) = followApply(pre memberType sym) - def bestForExpectedType(pt: Type, isLastTry: Boolean): Unit = { - val applicable0 = alts filter (alt => context inSilentMode isApplicable(undetparams, followType(alt), argtpes, pt)) - val applicable = overloadsToConsiderBySpecificity(applicable0, argtpes, varargsStar) - val ranked = bestAlternatives(applicable)((sym1, sym2) => - isStrictlyMoreSpecific(followType(sym1), followType(sym2), sym1, sym2) - ) - ranked match { - case best :: competing :: _ => AmbiguousMethodAlternativeError(tree, pre, best, competing, argtpes, pt, isLastTry) // ambiguous - case best :: Nil => tree setSymbol best setType (pre memberType best) // success - case Nil if pt.isWildcard => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry) // failed - case Nil => bestForExpectedType(WildcardType, isLastTry) // failed, but retry with WildcardType - } - } - // This potentially makes up to four attempts: tryTwice may execute + // This potentially makes up to four attempts: tryOnce may execute // with and without views enabled, and bestForExpectedType will try again // with pt = WildcardType if it fails with pt != WildcardType. - tryTwice { isLastTry => - val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 - debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt") - bestForExpectedType(pt, isLastTry) - } - } + val c = context + class InferMethodAlternativeTwice extends c.TryTwice { + private[this] val OverloadedType(pre, alts) = tree.tpe + private[this] var varargsStar = false + private[this] val argtpes = argtpes0 mapConserve { + case RepeatedType(tp) => varargsStar = true ; tp + case tp => tp + } - /** Try inference twice, once without views and once with views, - * unless views are already disabled. - */ - def tryTwice(infer: Boolean => Unit): Unit = { - if (context.implicitsEnabled) { - val savedContextMode = context.contextMode - var fallback = false - context.setBufferErrors() - // We cache the current buffer because it is impossible to - // distinguish errors that occurred before entering tryTwice - // and our first attempt in 'withImplicitsDisabled'. If the - // first attempt fails we try with implicits on *and* clean - // buffer but that would also flush any pre-tryTwice valid - // errors, hence some manual buffer tweaking is necessary. - val errorsToRestore = context.flushAndReturnBuffer() - try { - context.withImplicitsDisabled(infer(false)) - if (context.hasErrors) { - fallback = true - context.contextMode = savedContextMode - context.flushBuffer() - infer(true) + private def followType(sym: Symbol) = followApply(pre memberType sym) + // separate method to help the inliner + private def isAltApplicable(pt: Type)(alt: Symbol) = context inSilentMode { isApplicable(undetparams, followType(alt), argtpes, pt) && !context.reporter.hasErrors } + private def rankAlternatives(sym1: Symbol, sym2: Symbol) = isStrictlyMoreSpecific(followType(sym1), followType(sym2), sym1, sym2) + private def bestForExpectedType(pt: Type, isLastTry: Boolean): Unit = { + val applicable = overloadsToConsiderBySpecificity(alts filter isAltApplicable(pt), argtpes, varargsStar) + val ranked = bestAlternatives(applicable)(rankAlternatives) + ranked match { + case best :: competing :: _ => AmbiguousMethodAlternativeError(tree, pre, best, competing, argtpes, pt, isLastTry) // ambiguous + case best :: Nil => tree setSymbol best setType (pre memberType best) // success + case Nil if pt.isWildcard => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry) // failed + case Nil => bestForExpectedType(WildcardType, isLastTry) // failed, but retry with WildcardType } - } catch { - case ex: CyclicReference => throw ex - case ex: TypeError => // recoverable cyclic references - context.contextMode = savedContextMode - if (!fallback) infer(true) else () - } finally { - context.contextMode = savedContextMode - context.updateBuffer(errorsToRestore) + } + + private[this] val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 + def tryOnce(isLastTry: Boolean): Unit = { + debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt") + bestForExpectedType(pt, isLastTry) } } - else infer(true) + + (new InferMethodAlternativeTwice).apply() } /** Assign `tree` the type of all polymorphic alternatives diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 9cf92ca5b943..da7b8b09aa28 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -42,7 +42,7 @@ import Fingerprint._ * (Expr(elems)) * (TypeTag(Int)) */ -trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { +trait Macros extends MacroRuntimes with Traces with Helpers { self: Analyzer => import global._ @@ -50,6 +50,8 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { import treeInfo.{isRepeatedParamType => _, _} import MacrosStats._ + lazy val fastTrack = new FastTrack[self.type](self) + def globalSettings = global.settings protected def findMacroClassLoader(): ClassLoader = { @@ -224,7 +226,8 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { val Apply(_, pickledPayload) = wrapped val payload = pickledPayload.map{ case Assign(k, v) => (unpickleAtom(k), unpickleAtom(v)) }.toMap - import typer.TyperErrorGen._ + // TODO: refactor error handling: fail always throws a TypeError, + // and uses global state (analyzer.lastTreeToTyper) to determine the position for the error def fail(msg: String) = MacroCantExpandIncompatibleMacrosError(msg) def unpickle[T](field: String, clazz: Class[T]): T = { def failField(msg: String) = fail(s"$field $msg") @@ -261,7 +264,12 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { } def isBlackbox(expandee: Tree): Boolean = isBlackbox(dissectApplied(expandee).core.symbol) - def isBlackbox(macroDef: Symbol): Boolean = { + def isBlackbox(macroDef: Symbol): Boolean = pluginsIsBlackbox(macroDef) + + /** Default implementation of `isBlackbox`. + * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsIsBlackbox for more details) + */ + def standardIsBlackbox(macroDef: Symbol): Boolean = { val fastTrackBoxity = fastTrack.get(macroDef).map(_.isBlackbox) val bindingBoxity = loadMacroImplBinding(macroDef).map(_.isBlackbox) fastTrackBoxity orElse bindingBoxity getOrElse false @@ -415,9 +423,10 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { val wrappedArgs = mapWithIndex(args)((arg, j) => { val fingerprint = implParams(min(j, implParams.length - 1)) + val duplicatedArg = duplicateAndKeepPositions(arg) fingerprint match { - case LiftedTyped => context.Expr[Nothing](arg.duplicate)(TypeTag.Nothing) // TODO: SI-5752 - case LiftedUntyped => arg.duplicate + case LiftedTyped => context.Expr[Nothing](duplicatedArg)(TypeTag.Nothing) // TODO: SI-5752 + case LiftedUntyped => duplicatedArg case _ => abort(s"unexpected fingerprint $fingerprint in $binding with paramss being $paramss " + s"corresponding to arg $arg in $argss") } @@ -568,7 +577,10 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc val expanded1 = try onSuccess(duplicateAndKeepPositions(expanded)) finally popMacroContext() if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1) - if (settings.Ymacroexpand.value == settings.MacroExpand.Discard) expandee.setType(expanded1.tpe) + if (settings.Ymacroexpand.value == settings.MacroExpand.Discard) { + suppressMacroExpansion(expandee) + expandee.setType(expanded1.tpe) + } else expanded1 case Fallback(fallback) => onFallback(fallback) case Delayed(delayed) => onDelayed(delayed) @@ -613,7 +625,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { // `macroExpandApply` is called from `adapt`, where implicit conversions are disabled // therefore we need to re-enable the conversions back temporarily val result = typer.context.withImplicitsEnabled(typer.typed(tree, mode, pt)) - if (result.isErrorTyped && macroDebugVerbose) println(s"$label has failed: ${typer.context.reportBuffer.errors}") + if (result.isErrorTyped && macroDebugVerbose) println(s"$label has failed: ${typer.context.reporter.errors}") result } } @@ -706,7 +718,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { sealed abstract class MacroStatus(val result: Tree) case class Success(expanded: Tree) extends MacroStatus(expanded) - case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.seenMacroExpansionsFallingBack = true } + case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.reporting.seenMacroExpansionsFallingBack = true } case class Delayed(delayed: Tree) extends MacroStatus(delayed) case class Skipped(skipped: Tree) extends MacroStatus(skipped) case class Failure(failure: Tree) extends MacroStatus(failure) @@ -780,7 +792,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { } } catch { case ex: Throwable => - popMacroContext() + if (openMacros.nonEmpty) popMacroContext() // weirdly we started popping on an empty stack when refactoring fatalWarnings logic val realex = ReflectionUtils.unwrapThrowable(ex) realex match { case ex: AbortMacroException => MacroGeneratedAbort(expandee, ex) diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index ba183fe3e66e..0aa62d771e71 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -212,7 +212,9 @@ trait MethodSynthesis { List(cd, mdef) case _ => // Shouldn't happen, but let's give ourselves a reasonable error when it does - abort("No synthetics for " + meth + ": synthetics contains " + context.unit.synthetics.keys.mkString(", ")) + context.error(cd.pos, s"Internal error: Symbol for synthetic factory method not found among ${context.unit.synthetics.keys.mkString(", ")}") + // Soldier on for the sake of the presentation compiler + List(cd) } case _ => stat :: Nil @@ -355,8 +357,9 @@ trait MethodSynthesis { def derivedSym: Symbol = { // Only methods will do! Don't want to pick up any stray // companion objects of the same name. - val result = enclClass.info decl name suchThat (x => x.isMethod && x.isSynthetic) - assert(result != NoSymbol, "not found: "+name+" in "+enclClass+" "+enclClass.info.decls) + val result = enclClass.info decl name filter (x => x.isMethod && x.isSynthetic) + if (result == NoSymbol || result.isOverloaded) + context.error(tree.pos, s"Internal error: Unable to find the synthetic factory method corresponding to implicit class $name in $enclClass / ${enclClass.info.decls}") result } def derivedTree: DefDef = diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 23dc57d5b925..711cfba24d80 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -171,7 +171,7 @@ trait Namers extends MethodSynthesis { val newFlags = (sym.flags & LOCKED) | flags sym.rawInfo match { case tr: TypeRef => - // !!! needed for: pos/t5954d; the uniques type cache will happilly serve up the same TypeRef + // !!! needed for: pos/t5954d; the uniques type cache will happily serve up the same TypeRef // over this mutated symbol, and we witness a stale cache for `parents`. tr.invalidateCaches() case _ => @@ -296,7 +296,7 @@ trait Namers extends MethodSynthesis { } tree.symbol match { case NoSymbol => try dispatch() catch typeErrorHandler(tree, this.context) - case sym => enterExistingSym(sym) + case sym => enterExistingSym(sym, tree) } } @@ -413,6 +413,7 @@ trait Namers extends MethodSynthesis { if (isRedefinition) { updatePosFlags(existing, tree.pos, tree.mods.flags) setPrivateWithin(tree, existing) + clearRenamedCaseAccessors(existing) existing } else assignAndEnterSymbol(tree) setFlag inConstructorFlag @@ -443,7 +444,7 @@ trait Namers extends MethodSynthesis { && clazz.exists ) if (fails) { - context.unit.error(tree.pos, ( + reporter.error(tree.pos, ( s"Companions '$clazz' and '$module' must be defined in same file:\n" + s" Found in ${clazz.sourceFile.canonicalPath} and ${module.sourceFile.canonicalPath}") ) @@ -583,7 +584,7 @@ trait Namers extends MethodSynthesis { // more than one hidden name, the second will not be warned. // So it is the position of the actual hidden name. // - // Note: java imports have precence over definitions in the same package + // Note: java imports have precedence over definitions in the same package // so don't warn for them. There is a corresponding special treatment // in the shadowing rules in typedIdent to (SI-7232). In any case, // we shouldn't be emitting warnings for .java source files. @@ -711,17 +712,14 @@ trait Namers extends MethodSynthesis { val m = ensureCompanionObject(tree, caseModuleDef) m.moduleClass.updateAttachment(new ClassForCaseCompanionAttachment(tree)) } - val hasDefault = impl.body exists { - case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => mexists(vparamss)(_.mods.hasDefault) - case _ => false - } + val hasDefault = impl.body exists treeInfo.isConstructorWithDefault if (hasDefault) { val m = ensureCompanionObject(tree) m.updateAttachment(new ConstructorDefaultsAttachment(tree, null)) } val owner = tree.symbol.owner - if (settings.lint && owner.isPackageObjectClass && !mods.isImplicit) { - context.unit.warning(tree.pos, + if (settings.warnPackageObjectClasses && owner.isPackageObjectClass && !mods.isImplicit) { + reporter.warning(tree.pos, "it is not recommended to define classes/objects inside of package objects.\n" + "If possible, define " + tree.symbol + " in " + owner.skipPackageObject + " instead." ) @@ -733,13 +731,15 @@ trait Namers extends MethodSynthesis { log("enter implicit wrapper "+tree+", owner = "+owner) enterImplicitWrapper(tree) } - else context.unit.error(tree.pos, "implicit classes must accept exactly one primary constructor parameter") + else reporter.error(tree.pos, "implicit classes must accept exactly one primary constructor parameter") } validateCompanionDefs(tree) } // Hooks which are overridden in the presentation compiler - def enterExistingSym(sym: Symbol): Context = this.context + def enterExistingSym(sym: Symbol, tree: Tree): Context = { + this.context + } def enterIfNotThere(sym: Symbol) { } def enterSyntheticSym(tree: Tree): Symbol = { @@ -1043,10 +1043,10 @@ trait Namers extends MethodSynthesis { * so the resulting type is a valid external method type, it does not contain (references to) skolems. */ def thisMethodType(restpe: Type) = { - val checkDependencies = new DependentTypeChecker(context)(this) - checkDependencies check vparamSymss - // DEPMETTODO: check not needed when they become on by default - checkDependencies(restpe) + if (vparamSymss.lengthCompare(0) > 0) { // OPT fast path for methods of 0-1 parameter lists + val checkDependencies = new DependentTypeChecker(context)(this) + checkDependencies check vparamSymss + } val makeMethodType = (vparams: List[Symbol], restpe: Type) => { // TODODEPMET: check that we actually don't need to do anything here @@ -1180,7 +1180,13 @@ trait Namers extends MethodSynthesis { } } - addDefaultGetters(meth, ddef, vparamss, tparams, overriddenSymbol(methResTp)) + val overridden = { + val isConstr = meth.isConstructor + if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol(methResTp) + } + val hasDefaults = mexists(vparamss)(_.symbol.hasDefault) || mexists(overridden.paramss)(_.hasDefault) + if (hasDefaults) + addDefaultGetters(meth, ddef, vparamss, tparams, overridden) // fast track macros, i.e. macros defined inside the compiler, are hardcoded // hence we make use of that and let them have whatever right-hand side they need @@ -1222,7 +1228,7 @@ trait Namers extends MethodSynthesis { * typechecked, the corresponding param would not yet have the "defaultparam" * flag. */ - private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) { + private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overridden: Symbol) { val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetAttrs(ddef.duplicate) // having defs here is important to make sure that there's no sneaky tree sharing // in methods with multiple default parameters @@ -1230,7 +1236,6 @@ trait Namers extends MethodSynthesis { def rvparamss = rvparamss0.map(_.map(_.duplicate)) val methOwner = meth.owner val isConstr = meth.isConstructor - val overridden = if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol val overrides = overridden != NoSymbol && !overridden.isOverloaded // value parameters of the base class (whose defaults might be overridden) var baseParamss = (vparamss, overridden.tpe.paramss) match { @@ -1492,8 +1497,7 @@ trait Namers extends MethodSynthesis { case defn: MemberDef => val ainfos = defn.mods.annotations filterNot (_ eq null) map { ann => val ctx = typer.context - val annCtx = ctx.make(ann) - annCtx.setReportErrors() + val annCtx = ctx.makeNonSilent(ann) // need to be lazy, #1782. beforeTyper to allow inferView in annotation args, SI-5892. AnnotationInfo lazily { enteringTyper(newTyper(annCtx) typedAnnotation ann) @@ -1642,6 +1646,7 @@ trait Namers extends MethodSynthesis { def symbolAllowsDeferred = ( sym.isValueParameter || sym.isTypeParameterOrSkolem + || (sym.isAbstractType && sym.owner.isClass) || context.tree.isInstanceOf[ExistentialTypeTree] ) // Does the symbol owner require no undefined members? @@ -1748,7 +1753,6 @@ trait Namers extends MethodSynthesis { for (p <- vps) this(p.info) // can only refer to symbols in earlier parameter sections - // (if the extension is enabled) okParams ++= vps } } diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index dceb0a47d8b6..50f658f68d68 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -174,8 +174,8 @@ trait NamesDefaults { self: Analyzer => // assigning the correct method symbol, typedSelect will just assign the type. the reason // to still call 'typed' is to correctly infer singleton types, SI-5259. val selectPos = - if(qual.pos.isRange && baseFun.pos.isRange) qual.pos.union(baseFun.pos).withStart(Math.min(qual.pos.end, baseFun.pos.end)) - else baseFun.pos + if(qual.pos.isRange && baseFun1.pos.isRange) qual.pos.union(baseFun1.pos).withStart(Math.min(qual.pos.end, baseFun1.pos.end)) + else baseFun1.pos val f = blockTyper.typedOperator(Select(newQual, selected).setSymbol(baseFun1.symbol).setPos(selectPos)) if (funTargs.isEmpty) f else TypeApply(f, funTargs).setType(baseFun.tpe) @@ -379,18 +379,37 @@ trait NamesDefaults { self: Analyzer => def makeNamedTypes(syms: List[Symbol]) = syms map (sym => NamedType(sym.name, sym.tpe)) - def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOfNamedArg _): (List[Symbol], Boolean) = { - val namedArgs = args.dropWhile(arg => { - val n = argName(arg) - n.isEmpty || params.forall(p => p.name != n.get) - }) - val namedParams = params.drop(args.length - namedArgs.length) - // missing: keep those with a name which doesn't exist in namedArgs - val missingParams = namedParams.filter(p => namedArgs.forall(arg => { + /** + * Returns the parameter symbols of an invocation expression that are not defined by the list + * of arguments. + * + * @param args The list of arguments + * @param params The list of parameter symbols of the invoked method + * @param argName A function that extracts the name of an argument expression, if it is a named argument. + */ + def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name]): (List[Symbol], Boolean) = { + // The argument list contains first a mix of positional args and named args that are on the + // right parameter position, and then a number or named args on different positions. + + // collect all named arguments whose position does not match the parameter they define + val namedArgsOnChangedPosition = args.zip(params) dropWhile { + case (arg, param) => + val n = argName(arg) + // drop the argument if + // - it's not named, or + // - it's named, but defines the parameter on its current position, or + // - it's named, but none of the parameter names matches (treated as a positional argument, an assignment expression) + n.isEmpty || n.get == param.name || params.forall(_.name != n.get) + } map (_._1) + + val paramsWithoutPositionalArg = params.drop(args.length - namedArgsOnChangedPosition.length) + + // missing parameters: those with a name which is not specified in one of the namedArgsOnChangedPosition + val missingParams = paramsWithoutPositionalArg.filter(p => namedArgsOnChangedPosition.forall { arg => val n = argName(arg) n.isEmpty || n.get != p.name - })) - val allPositional = missingParams.length == namedParams.length + }) + val allPositional = missingParams.length == paramsWithoutPositionalArg.length (missingParams, allPositional) } @@ -407,7 +426,7 @@ trait NamesDefaults { self: Analyzer => previousArgss: List[List[Tree]], params: List[Symbol], pos: scala.reflect.internal.util.Position, context: Context): (List[Tree], List[Symbol]) = { if (givenArgs.length < params.length) { - val (missing, positional) = missingParams(givenArgs, params) + val (missing, positional) = missingParams(givenArgs, params, nameOfNamedArg) if (missing forall (_.hasDefault)) { val defaultArgs = missing flatMap (p => { val defGetter = defaultGetter(p, context) @@ -536,8 +555,8 @@ trait NamesDefaults { self: Analyzer => def matchesName(param: Symbol) = !param.isSynthetic && ( (param.name == name) || (param.deprecatedParamName match { case Some(`name`) => - context0.unit.deprecationWarning(arg.pos, - "the parameter name "+ name +" has been deprecated. Use "+ param.name +" instead.") + context0.deprecationWarning(arg.pos, param, + s"the parameter name $name has been deprecated. Use ${param.name} instead.") true case _ => false }) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index cf3f265f0cfe..fa4a764f1b12 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -261,7 +261,7 @@ trait PatternTypers { def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = { def duplErrTree = setError(treeCopy.Apply(tree, fun0, args)) - def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree } + def duplErrorTree(err: AbsTypeError) = { context.issue(err); duplErrTree } if (args.length > MaxTupleArity) return duplErrorTree(TooManyArgsPatternError(fun)) @@ -309,7 +309,7 @@ trait PatternTypers { // the union of the expected type and the inferred type of the argument to unapply val glbType = glb(ensureFullyDefined(pt) :: unapplyArg.tpe_* :: Nil) val wrapInTypeTest = canRemedy && !(fun1.symbol.owner isNonBottomSubClass ClassTagClass) - val formals = patmat.alignPatterns(fun1, args).unexpandedFormals + val formals = patmat.alignPatterns(context.asInstanceOf[analyzer.Context], fun1, args).unexpandedFormals val args1 = typedArgsForFormals(args, formals, mode) val result = UnApply(fun1, args1) setPos tree.pos setType glbType @@ -336,7 +336,7 @@ trait PatternTypers { val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args)) // must call doTypedUnapply directly, as otherwise we get undesirable rewrites // and re-typechecks of the target of the unapply call in PATTERNmode, - // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object, + // this breaks down when the classTagExtractor (which defines the unapply member) is not a simple reference to an object, // but an arbitrary tree as is the case here val res = doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index b166bf988d4b..d2931ff9e1aa 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -132,13 +132,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans defaultMethodNames.toList.distinct foreach { name => val methods = clazz.info.findMember(name, 0L, requiredFlags = METHOD, stableOnly = false).alternatives - val haveDefaults = methods filter (sym => mexists(sym.info.paramss)(_.hasDefault) && !nme.isProtectedAccessorName(sym.name)) + def hasDefaultParam(tpe: Type): Boolean = tpe match { + case MethodType(params, restpe) => (params exists (_.hasDefault)) || hasDefaultParam(restpe) + case _ => false + } + val haveDefaults = methods filter ( + if (settings.isScala211) + (sym => mexists(sym.info.paramss)(_.hasDefault) && !nme.isProtectedAccessorName(sym.name)) + else + (sym => hasDefaultParam(sym.info) && !nme.isProtectedAccessorName(sym.name)) + ) if (haveDefaults.lengthCompare(1) > 0) { val owners = haveDefaults map (_.owner) // constructors of different classes are allowed to have defaults if (haveDefaults.exists(x => !x.isConstructor) || owners.distinct.size < haveDefaults.size) { - unit.error(clazz.pos, + reporter.error(clazz.pos, "in "+ clazz + ", multiple overloaded alternatives of "+ haveDefaults.head + " define default arguments" + ( @@ -153,17 +162,17 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // Check for doomed attempt to overload applyDynamic if (clazz isSubClass DynamicClass) { for ((_, m1 :: m2 :: _) <- (clazz.info member nme.applyDynamic).alternatives groupBy (_.typeParams.length)) { - unit.error(m1.pos, "implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)") + reporter.error(m1.pos, "implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)") } } // This has become noisy with implicit classes. - if (settings.lint && settings.developer) { + if (settings.warnPolyImplicitOverload && settings.developer) { clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym => // implicit classes leave both a module symbol and a method symbol as residue val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule) if (alts.size > 1) - alts foreach (x => unit.warning(x.pos, "parameterized overloaded implicit methods are not visible as view bounds")) + alts foreach (x => reporter.warning(x.pos, "parameterized overloaded implicit methods are not visible as view bounds")) } } } @@ -272,10 +281,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans mixinOverrideErrors.toList match { case List() => case List(MixinOverrideError(_, msg)) => - unit.error(clazz.pos, msg) + reporter.error(clazz.pos, msg) case MixinOverrideError(member, msg) :: others => val others1 = others.map(_.member.name.decode).filter(member.name.decode != _).distinct - unit.error( + reporter.error( clazz.pos, msg+(if (others1.isEmpty) "" else ";\n other members with override errors are: "+(others1 mkString ", "))) @@ -338,7 +347,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans ) } def emitOverrideError(fullmsg: String) { - if (member.owner == clazz) unit.error(member.pos, fullmsg) + if (member.owner == clazz) reporter.error(member.pos, fullmsg) else mixinOverrideErrors += new MixinOverrideError(member, fullmsg) } @@ -455,7 +464,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans checkOverrideDeprecated() if (settings.warnNullaryOverride) { if (other.paramss.isEmpty && !member.paramss.isEmpty) { - unit.warning(member.pos, "non-nullary method overrides nullary method") + reporter.warning(member.pos, "non-nullary method overrides nullary method") } } } @@ -487,7 +496,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans typer.infer.checkKindBounds(high :: Nil, lowType :: Nil, rootType, low.owner) match { // (1.7.2) case Nil => case kindErrors => - unit.error(member.pos, + reporter.error(member.pos, "The kind of "+member.keyString+" "+member.varianceString + member.nameString+ " does not conform to the expected kind of " + other.defString + other.locationString + "." + kindErrors.toList.mkString("\n", ", ", "")) @@ -498,7 +507,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans typer.infer.checkKindBounds(low :: Nil, lowType.normalize :: Nil, rootType, low.owner) match { case Nil => case kindErrors => - unit.error(member.pos, + reporter.error(member.pos, "The kind of the right-hand side "+lowType.normalize+" of "+low.keyString+" "+ low.varianceString + low.nameString+ " does not conform to its expected kind."+ kindErrors.toList.mkString("\n", ", ", "")) @@ -534,10 +543,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } def checkOverrideDeprecated() { - if (other.hasDeprecatedOverridingAnnotation) { + if (other.hasDeprecatedOverridingAnnotation && !member.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) { val suffix = other.deprecatedOverridingMessage map (": " + _) getOrElse "" val msg = s"overriding ${other.fullLocationString} is deprecated$suffix" - unit.deprecationWarning(member.pos, msg) + currentRun.reporting.deprecationWarning(member.pos, other, msg) } } } @@ -745,7 +754,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans checkNoAbstractDecls(clazz) if (abstractErrors.nonEmpty) - unit.error(clazz.pos, abstractErrorMessage) + reporter.error(clazz.pos, abstractErrorMessage) } else if (clazz.isTrait && !(clazz isSubClass AnyValClass)) { // For non-AnyVal classes, prevent abstract methods in interfaces that override @@ -756,7 +765,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // override a concrete method in Object. The jvm, however, does not. val overridden = decl.matchingSymbol(ObjectClass, ObjectTpe) if (overridden.isFinal) - unit.error(decl.pos, "trait cannot redefine final method from class AnyRef") + reporter.error(decl.pos, "trait cannot redefine final method from class AnyRef") } } @@ -809,7 +818,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal) - def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix) + def issueError(suffix: String) = reporter.error(member.pos, member.toString() + " overrides nothing" + suffix) nonMatching match { case Nil => issueError("") @@ -862,7 +871,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case _ :: Nil => ;// OK case tp1 :: tp2 :: _ => - unit.error(clazz.pos, "illegal inheritance;\n " + clazz + + reporter.error(clazz.pos, "illegal inheritance;\n " + clazz + " inherits different type instances of " + baseClass + ":\n" + tp1 + " and " + tp2) explainTypes(tp1, tp2) @@ -879,7 +888,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case _ => "type "+tp } override def issueVarianceError(base: Symbol, sym: Symbol, required: Variance) { - currentRun.currentUnit.error(base.pos, + reporter.error(base.pos, s"${sym.variance} $sym occurs in $required position in ${tpString(base.info)} of $base") } } @@ -945,9 +954,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans def apply(tp: Type) = mapOver(tp).normalize } - def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint) (fn, args) match { + def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.warnOptionImplicit) (fn, args) match { case (tap@TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == currentRun.runDefinitions.Option_apply => - unit.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567 + reporter.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567 case _ => } @@ -1022,7 +1031,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans def nonSensibleWarning(what: String, alwaysEqual: Boolean) = { val msg = alwaysEqual == (name == nme.EQ || name == nme.eq) - unit.warning(pos, s"comparing $what using `${name.decode}' will always yield $msg") + reporter.warning(pos, s"comparing $what using `${name.decode}' will always yield $msg") isNonSensible = true } def nonSensible(pre: String, alwaysEqual: Boolean) = @@ -1037,7 +1046,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } def unrelatedTypes() = if (!isNonSensible) { val weaselWord = if (isEitherValueClass) "" else " most likely" - unit.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg") + reporter.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg") } if (nullCount == 2) // null == null @@ -1086,7 +1095,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // better to have lubbed and lost def warnIfLubless(): Unit = { val common = global.lub(List(actual.tpe, receiver.tpe)) - if (ObjectTpe <:< common) + if (ObjectTpe <:< common && !(ObjectTpe <:< actual.tpe && ObjectTpe <:< receiver.tpe)) unrelatedTypes() } // warn if actual has a case parent that is not same as receiver's; @@ -1132,7 +1141,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans && callsSelf ) if (trivialInifiniteLoop) - unit.warning(valOrDef.rhs.pos, s"${valOrDef.symbol.fullLocationString} does nothing other than call itself recursively") + reporter.warning(valOrDef.rhs.pos, s"${valOrDef.symbol.fullLocationString} does nothing other than call itself recursively") } // Transformation ------------------------------------------------------------ @@ -1222,7 +1231,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans finally if (currentLevel.maxindex > 0) { // An implementation restriction to avoid VerifyErrors and lazyvals mishaps; see SI-4717 debuglog("refsym = " + currentLevel.refsym) - unit.error(currentLevel.refpos, "forward reference not allowed from self constructor invocation") + reporter.error(currentLevel.refpos, "forward reference not allowed from self constructor invocation") } case ModuleDef(_, _, _) => eliminateModuleDefs(tree) case ValDef(_, _, _, _) => @@ -1232,7 +1241,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans val lazySym = tree.symbol.lazyAccessorOrSelf if (lazySym.isLocalToBlock && index <= currentLevel.maxindex) { debuglog("refsym = " + currentLevel.refsym) - unit.error(currentLevel.refpos, "forward reference extends over definition of " + lazySym) + reporter.error(currentLevel.refpos, "forward reference extends over definition of " + lazySym) } tree1 :: Nil } @@ -1246,7 +1255,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans try typer.infer.checkBounds(tree0, pre, owner, tparams, argtps, "") catch { case ex: TypeError => - unit.error(tree0.pos, ex.getMessage()) + reporter.error(tree0.pos, ex.getMessage()) if (settings.explaintypes) { val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds) (argtps, bounds).zipped map ((targ, bound) => explainTypes(bound.lo, targ)) @@ -1278,11 +1287,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans private def checkUndesiredProperties(sym: Symbol, pos: Position) { // If symbol is deprecated, and the point of reference is not enclosed // in either a deprecated member or a scala bridge method, issue a warning. - if (sym.isDeprecated && !currentOwner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) { - unit.deprecationWarning(pos, "%s%s is deprecated%s".format( - sym, sym.locationString, sym.deprecationMessage map (": " + _) getOrElse "") - ) - } + // TODO: x.hasBridgeAnnotation doesn't seem to be needed here... + if (sym.isDeprecated && !currentOwner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) + currentRun.reporting.deprecationWarning(pos, sym) + // Similar to deprecation: check if the symbol is marked with @migration // indicating it has changed semantics between versions. if (sym.hasMigrationAnnotation && settings.Xmigration.value != NoScalaVersion) { @@ -1290,20 +1298,20 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get) catch { case e : NumberFormatException => - unit.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}") + reporter.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}") // if we can't parse the format on the migration annotation just conservatively assume it changed true } if (changed) - unit.warning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}") + reporter.warning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}") } // See an explanation of compileTimeOnly in its scaladoc at scala.annotation.compileTimeOnly. - if (sym.isCompileTimeOnly) { + if (sym.isCompileTimeOnly && !currentOwner.ownerChain.exists(x => x.isCompileTimeOnly)) { def defaultMsg = sm"""Reference to ${sym.fullLocationString} should not have survived past type checking, |it should have been processed and eliminated during expansion of an enclosing macro.""" // The getOrElse part should never happen, it's just here as a backstop. - unit.error(pos, sym.compileTimeOnlyMessage getOrElse defaultMsg) + reporter.error(pos, sym.compileTimeOnlyMessage getOrElse defaultMsg) } } @@ -1313,8 +1321,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans && !qual.tpe.isInstanceOf[ThisType] && sym.accessedOrSelf.isVal ) - if (settings.lint.value && isLikelyUninitialized) - unit.warning(pos, s"Selecting ${sym} from ${sym.owner}, which extends scala.DelayedInit, is likely to yield an uninitialized value") + if (settings.warnDelayedInit && isLikelyUninitialized) + reporter.warning(pos, s"Selecting ${sym} from ${sym.owner}, which extends scala.DelayedInit, is likely to yield an uninitialized value") } private def lessAccessible(otherSym: Symbol, memberSym: Symbol): Boolean = ( @@ -1346,7 +1354,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans if (memberSym.isDeferred) "may be unable to provide a concrete implementation of" else "may be unable to override" - unit.warning(memberSym.pos, + reporter.warning(memberSym.pos, "%s%s references %s %s.".format( memberSym.fullLocationString, comparison, accessFlagsToString(otherSym), otherSym @@ -1380,8 +1388,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans private def checkByNameRightAssociativeDef(tree: DefDef) { tree match { case DefDef(_, name, _, params :: _, _, _) => - if (settings.lint && !treeInfo.isLeftAssoc(name.decodedName) && params.exists(p => isByName(p.symbol))) - unit.warning(tree.pos, + if (settings.warnByNameRightAssociative && !treeInfo.isLeftAssoc(name.decodedName) && params.exists(p => isByName(p.symbol))) + reporter.warning(tree.pos, "by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.") case _ => } @@ -1396,12 +1404,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans if (symbol.isDeprecated) { val concrOvers = symbol.allOverriddenSymbols.filter(sym => - !sym.isDeprecated && !sym.isDeferred) + !sym.isDeprecated && !sym.isDeferred && !sym.hasDeprecatedOverridingAnnotation && !sym.enclClass.hasDeprecatedInheritanceAnnotation) if(!concrOvers.isEmpty) - unit.deprecationWarning( + currentRun.reporting.deprecationWarning( tree.pos, - symbol.toString + " overrides concrete, non-deprecated symbol(s):" + - concrOvers.map(_.name.decode).mkString(" ", ", ", "")) + symbol, + s"${symbol.toString} overrides concrete, non-deprecated symbol(s): ${concrOvers.map(_.name.decode).mkString(", ")}") } } private def isRepeatedParamArg(tree: Tree) = currentApplication match { @@ -1462,7 +1470,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans applyChecks(sym.annotations) // validate implicitNotFoundMessage analyzer.ImplicitNotFoundMsg.check(sym) foreach { warn => - unit.warning(tree.pos, f"Invalid implicitNotFound message for ${sym}%s${sym.locationString}%s:%n$warn") + reporter.warning(tree.pos, f"Invalid implicitNotFound message for ${sym}%s${sym.locationString}%s:%n$warn") } case tpt@TypeTree() => @@ -1587,7 +1595,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType)) ) if (!isOk) - unit.warning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead") + reporter.warning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead") case _ => () } @@ -1595,15 +1603,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans private def checkAnyValSubclass(clazz: Symbol) = { if (clazz.isDerivedValueClass) { if (clazz.isTrait) - unit.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal") + reporter.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal") else if (clazz.hasAbstractFlag) - unit.error(clazz.pos, "`abstract' modifier cannot be used with value classes") + reporter.error(clazz.pos, "`abstract' modifier cannot be used with value classes") } } private def checkUnexpandedMacro(t: Tree) = if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro) - unit.error(t.pos, "macro has not been expanded") + reporter.error(t.pos, "macro has not been expanded") override def transform(tree: Tree): Tree = { val savedLocalTyper = localTyper @@ -1626,7 +1634,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans if (settings.warnNullaryUnit) checkNullaryMethodReturnType(sym) if (settings.warnInaccessible) { - if (!sym.isConstructor && !sym.isEffectivelyFinal && !sym.isSynthetic) + if (!sym.isConstructor && !sym.isEffectivelyFinalOrNotOverridden && !sym.isSynthetic) checkAccessibilityOfReferencedTypes(tree) } tree match { @@ -1698,7 +1706,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans tree case treeInfo.WildcardStarArg(_) if !isRepeatedParamArg(tree) => - unit.error(tree.pos, "no `: _*' annotation allowed here\n"+ + reporter.error(tree.pos, "no `: _*' annotation allowed here\n"+ "(such annotations are only allowed in arguments to *-parameters)") tree @@ -1771,7 +1779,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } catch { case ex: TypeError => if (settings.debug) ex.printStackTrace() - unit.error(tree.pos, ex.getMessage()) + reporter.error(tree.pos, ex.getMessage()) tree } finally { localTyper = savedLocalTyper diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala index 57f27a05fd81..ea44b9dc39c5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala +++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala @@ -61,7 +61,7 @@ trait StdAttachments { val metadata = MacroExpansionAttachment(expandee, expanded) expandee updateAttachment metadata expanded match { - case expanded: Tree => expanded updateAttachment metadata + case expanded: Tree if !expanded.isEmpty => expanded updateAttachment metadata case _ => // do nothing } } diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 87da56514207..db81eecdf5fc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -12,21 +12,47 @@ import scala.collection.{ mutable, immutable } import mutable.ListBuffer import symtab.Flags._ -/** This phase adds super accessors for all super calls that either +/** This phase performs the following functions, each of which could be split out in a + * mini-phase: + * + * (1) Adds super accessors for all super calls that either * appear in a trait or have as a target a member of some outer class. - * It also replaces references to parameter accessors with aliases - * by super references to these aliases. The phase also checks that - * symbols accessed from super are not abstract, or are overridden by - * an abstract override. Finally, the phase also mangles the names - * of class-members which are private up to an enclosing non-package - * class, in order to avoid overriding conflicts. * - * This phase also sets SPECIALIZED flag on type parameters with + * (2) Converts references to parameter fields that have the same name as a corresponding + * public parameter field in a superclass to a reference to the superclass + * field (corresponding = super class field is initialized with subclass field). + * This info is pre-computed by the `alias` field in Typer. `dotc` follows a different + * route; it computes everything in SuperAccessors and changes the subclass field + * to a forwarder instead of manipulating references. This is more modular. + * + * (3) Adds protected accessors if the access to the protected member happens + * in a class which is not a subclass of the member's owner. + * + * (4) Mangles the names of class-members which are + * private up to an enclosing non-package class, in order to avoid overriding conflicts. + * This is a dubious, and it would be better to deprecate class-qualified privates. + * + * (5) This phase also sets SPECIALIZED flag on type parameters with * `@specialized` annotation. We put this logic here because the * flag must be set before pickling. * - * @author Martin Odersky - * @version 1.0 + * It also checks that: + * + * (1) Symbols accessed from super are not abstract, or are overridden by + * an abstract override. + * + * (2) If a symbol accessed accessed from super is defined in a real class (not a trait), + * there are no abstract members which override this member in Java's rules + * (see SI-4989; such an access would lead to illegal bytecode) + * + * (3) Super calls do not go to some synthetic members of Any (see isDisallowed) + * + * (4) Super calls do not go to synthetic field accessors + * + * (5) A class and its companion object do not both define a class or module with the + * same name. + * + * TODO: Rename phase to "Accessors" because it handles more than just super accessors */ abstract class SuperAccessors extends transform.Transform with transform.TypingTransformers { import global._ @@ -56,11 +82,11 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT val buf = accDefs.getOrElse(clazz, sys.error("no acc def buf for "+clazz)) buf += typers(clazz) typed tree } - private def ensureAccessor(sel: Select) = { + private def ensureAccessor(sel: Select, mixName: TermName = nme.EMPTY) = { val Select(qual, name) = sel val sym = sel.symbol val clazz = qual.symbol - val supername = nme.superName(name) + val supername = nme.superName(name, mixName) val superAcc = clazz.info.decl(supername).suchThat(_.alias == sym) orElse { debuglog(s"add super acc ${sym.fullLocationString} to $clazz") val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE | ARTIFACT) setAlias sym @@ -98,7 +124,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT if (other == NoSymbol) other = linked.info.decl(sym.name.toTermName).filter(_.isModule) if (other != NoSymbol) - unit.error(sym.pos, "name clash: "+sym.owner+" defines "+sym+ + reporter.error(sym.pos, "name clash: "+sym.owner+" defines "+sym+ "\nand its companion "+sym.owner.companionModule+" also defines "+ other) } @@ -113,19 +139,31 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT val member = sym.overridingSymbol(clazz) if (mix != tpnme.EMPTY || member == NoSymbol || !(member.isAbstractOverride && member.isIncompleteIn(clazz))) - unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+ + reporter.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+ "unless it is overridden by a member declared `abstract' and `override'") } else if (mix == tpnme.EMPTY && !sym.owner.isTrait){ // SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract. val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner) intermediateClasses.map(sym.overridingSymbol).find(s => s.isDeferred && !s.isAbstractOverride && !s.owner.isTrait).foreach { absSym => - unit.error(sel.pos, s"${sym.fullLocationString} cannot be directly accessed from ${clazz} because ${absSym.owner} redeclares it as abstract") + reporter.error(sel.pos, s"${sym.fullLocationString} cannot be directly accessed from ${clazz} because ${absSym.owner} redeclares it as abstract") } } - if (name.isTermName && mix == tpnme.EMPTY && (clazz.isTrait || clazz != currentClass || !validCurrentOwner)) - ensureAccessor(sel) + def mixIsTrait = sup.tpe match { + case SuperType(thisTpe, superTpe) => superTpe.typeSymbol.isTrait + } + + val needAccessor = name.isTermName && { + mix.isEmpty && (clazz.isTrait || clazz != currentClass || !validCurrentOwner) || + // SI-8803. If we access super[A] from an inner class (!= currentClass) or closure (validCurrentOwner), + // where A is the superclass we need an accessor. If A is a parent trait we don't: in this case mixin + // will re-route the super call directly to the impl class (it's statically known). + !mix.isEmpty && (clazz != currentClass || !validCurrentOwner) && !mixIsTrait + } + + if (needAccessor) + ensureAccessor(sel, mix.toTermName) else sel } @@ -221,12 +259,12 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT // also exists in a superclass, because they may be surprised // to find out that a constructor parameter will shadow a // field. See SI-4762. - if (settings.lint) { + if (settings.warnPrivateShadow) { if (sym.isPrivateLocal && sym.paramss.isEmpty) { qual.symbol.ancestors foreach { parent => parent.info.decls filterNot (x => x.isPrivate || x.isLocalToThis) foreach { m2 => if (sym.name == m2.name && m2.isGetter && m2.accessed.isMutable) { - unit.warning(sel.pos, + reporter.warning(sel.pos, sym.accessString + " " + sym.fullLocationString + " shadows mutable " + m2.name + " inherited from " + m2.owner + ". Changes to " + m2.name + " will not be visible within " + sym.owner + " - you may want to give them distinct names.") @@ -284,9 +322,9 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT case Super(_, mix) => if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) { if (!settings.overrideVars) - unit.error(tree.pos, "super may be not be used on " + sym.accessedOrSelf) + reporter.error(tree.pos, "super may be not be used on " + sym.accessedOrSelf) } else if (isDisallowed(sym)) { - unit.error(tree.pos, "super not allowed here: use this." + name.decode + " instead") + reporter.error(tree.pos, "super not allowed here: use this." + name.decode + " instead") } transformSuperSelect(sel) @@ -346,12 +384,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT * performance hit for the compiler as a whole. */ override def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = { + val savedValid = validCurrentOwner if (owner.isClass) validCurrentOwner = true val savedLocalTyper = localTyper localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner) typers = typers updated (owner, localTyper) val result = super.atOwner(tree, owner)(trans) localTyper = savedLocalTyper + validCurrentOwner = savedValid typers -= owner result } diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 9516f9413590..8f13507fa9bc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -54,6 +54,9 @@ trait SyntheticMethods extends ast.TreeDSL { /** Does not force the info of `caseclazz` */ final def caseAccessorName(caseclazz: Symbol, paramName: TermName) = (renamedCaseAccessors get caseclazz).fold(paramName)(_(paramName)) + final def clearRenamedCaseAccessors(caseclazz: Symbol): Unit = { + renamedCaseAccessors -= caseclazz + } /** Add the synthetic methods to case classes. */ @@ -84,24 +87,6 @@ trait SyntheticMethods extends ast.TreeDSL { def accessors = clazz.caseFieldAccessors val arity = accessors.size - // If this is ProductN[T1, T2, ...], accessorLub is the lub of T1, T2, ..., . - // !!! Hidden behind -Xexperimental due to bummer type inference bugs. - // Refining from Iterator[Any] leads to types like - // - // Option[Int] { def productIterator: Iterator[String] } - // - // appearing legitimately, but this breaks invariant places - // like Tags and Arrays which are not robust and infer things - // which they shouldn't. - val accessorLub = ( - if (settings.Xexperimental) { - global.weakLub(accessors map (_.tpe.finalResultType)) match { - case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents) - case tp => tp - } - } - else AnyTpe - ) def forwardToRuntime(method: Symbol): Tree = forwardMethod(method, getMember(ScalaRunTimeModule, (method.name prepend "_")))(mkThis :: _) @@ -122,8 +107,8 @@ trait SyntheticMethods extends ast.TreeDSL { } } def productIteratorMethod = { - createMethod(nme.productIterator, iteratorOfType(accessorLub))(_ => - gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(mkThis)) + createMethod(nme.productIterator, iteratorOfType(AnyTpe))(_ => + gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(AnyTpe), List(mkThis)) ) } @@ -168,7 +153,7 @@ trait SyntheticMethods extends ast.TreeDSL { def thatCast(eqmeth: Symbol): Tree = gen.mkCast(Ident(eqmeth.firstParam), clazz.tpe) - /* The equality method core for case classes and inline clases. + /* The equality method core for case classes and inline classes. * 1+ args: * (that.isInstanceOf[this.C]) && { * val x$1 = that.asInstanceOf[this.C] @@ -243,7 +228,7 @@ trait SyntheticMethods extends ast.TreeDSL { List( Product_productPrefix -> (() => constantNullary(nme.productPrefix, clazz.name.decode)), Product_productArity -> (() => constantNullary(nme.productArity, arity)), - Product_productElement -> (() => perElementMethod(nme.productElement, accessorLub)(mkThisSelect)), + Product_productElement -> (() => perElementMethod(nme.productElement, AnyTpe)(mkThisSelect)), Product_iterator -> (() => productIteratorMethod), Product_canEqual -> (() => canEqualMethod) // This is disabled pending a reimplementation which doesn't add any @@ -339,12 +324,11 @@ trait SyntheticMethods extends ast.TreeDSL { !hasOverridingImplementation(m) || { clazz.isDerivedValueClass && (m == Any_hashCode || m == Any_equals) && { // Without a means to suppress this warning, I've thought better of it. - // - // if (settings.lint) { - // (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m => - // currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics") - // } - // } + if (settings.warnValueOverrides) { + (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m => + currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics") + } + } true } } @@ -378,7 +362,14 @@ trait SyntheticMethods extends ast.TreeDSL { for (ddef @ DefDef(_, _, _, _, _, _) <- templ.body ; if isRewrite(ddef.symbol)) { val original = ddef.symbol - val newAcc = deriveMethod(ddef.symbol, name => context.unit.freshTermName(name + "$")) { newAcc => + val i = original.owner.caseFieldAccessors.indexOf(original) + def freshAccessorName = { + devWarning(s"Unable to find $original among case accessors of ${original.owner}: ${original.owner.caseFieldAccessors}") + context.unit.freshTermName(original.name + "$") + } + def nameSuffixedByParamIndex = original.name.append(nme.CASE_ACCESSOR + "$" + i).toTermName + val newName = if (i < 0) freshAccessorName else nameSuffixedByParamIndex + val newAcc = deriveMethod(ddef.symbol, name => newName) { newAcc => newAcc.makePublic newAcc resetFlag (ACCESSOR | PARAMACCESSOR | OVERRIDE) ddef.rhs.duplicate diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala index 90ec3a89b891..57dc74d2a0d0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala @@ -11,7 +11,6 @@ trait Tags { self: Typer => private val runDefinitions = currentRun.runDefinitions - import runDefinitions._ private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = enteringTyper { def wrapper (tree: => Tree): Tree = if (allowMaterialization) (context.withMacrosEnabled[Tree](tree)) else (context.withMacrosDisabled[Tree](tree)) @@ -66,7 +65,7 @@ trait Tags { // if someone requests a type tag, but scala-reflect.jar isn't on the library classpath, then bail if (pre == NoType && ApiUniverseClass == NoSymbol) EmptyTree else { - val tagSym = if (concrete) TypeTagClass else WeakTypeTagClass + val tagSym = if (concrete) runDefinitions.TypeTagClass else runDefinitions.WeakTypeTagClass val tagTp = if (pre == NoType) TypeRef(ApiUniverseClass.toTypeConstructor, tagSym, List(tp)) else singleType(pre, pre member tagSym.name) val taggedTp = appliedType(tagTp, List(tp)) resolveTag(pos, taggedTp, allowMaterialization) diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index a2f52e190546..743bbe53bd31 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -17,7 +17,7 @@ abstract class TreeCheckers extends Analyzer { override protected def onTreeCheckerError(pos: Position, msg: String) { if (settings.fatalWarnings) - currentUnit.warning(pos, "\n** Error during internal checking:\n" + msg) + reporter.warning(pos, "\n** Error during internal checking:\n" + msg) } case class DiffResult[T](lost: List[T], gained: List[T]) { @@ -170,7 +170,7 @@ abstract class TreeCheckers extends Analyzer { ) - def errorFn(pos: Position, msg: Any): Unit = currentUnit.warning(pos, "[check: %s] %s".format(phase.prev, msg)) + def errorFn(pos: Position, msg: Any): Unit = reporter.warning(pos, "[check: %s] %s".format(phase.prev, msg)) def errorFn(msg: Any): Unit = errorFn(NoPosition, msg) def informFn(msg: Any) { @@ -208,8 +208,7 @@ abstract class TreeCheckers extends Analyzer { } def check(unit: CompilationUnit) { informProgress("checking "+unit) - val context = rootContext(unit) - context.checking = true + val context = rootContext(unit, checking = true) tpeOfTree.clear() SymbolTracker.check(phase, unit) val checker = new TreeChecker(context) diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 60346e7be17d..0f90c6a47845 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -41,9 +41,9 @@ trait TypeDiagnostics { * indicate that the restriction may be lifted in the future. */ def restrictionWarning(pos: Position, unit: CompilationUnit, msg: String): Unit = - unit.warning(pos, "Implementation restriction: " + msg) + reporter.warning(pos, "Implementation restriction: " + msg) def restrictionError(pos: Position, unit: CompilationUnit, msg: String): Unit = - unit.error(pos, "Implementation restriction: " + msg) + reporter.error(pos, "Implementation restriction: " + msg) /** A map of Positions to addendums - if an error involves a position in * the map, the addendum should also be printed. @@ -435,12 +435,8 @@ trait TypeDiagnostics { trait TyperDiagnostics { self: Typer => - private def contextError(context0: Analyzer#Context, pos: Position, msg: String) = context0.error(pos, msg) - private def contextError(context0: Analyzer#Context, pos: Position, err: Throwable) = context0.error(pos, err) - private def contextWarning(pos: Position, msg: String) = context.unit.warning(pos, msg) - def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) = - contextWarning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString)) + context.warning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString)) object checkUnused { val ignoreNames = Set[TermName]("readResolve", "readObject", "writeObject", "writeReplace") @@ -536,21 +532,21 @@ trait TypeDiagnostics { if (sym.isDefaultGetter) "default argument" else if (sym.isConstructor) "constructor" else if (sym.isVar || sym.isGetter && sym.accessed.isVar) "var" - else if (sym.isVal || sym.isGetter && sym.accessed.isVal) "val" + else if (sym.isVal || sym.isGetter && sym.accessed.isVal || sym.isLazy) "val" else if (sym.isSetter) "setter" else if (sym.isMethod) "method" else if (sym.isModule) "object" else "term" ) - unit.warning(pos, s"$why $what in ${sym.owner} is never used") + reporter.warning(pos, s"$why $what in ${sym.owner} is never used") } p.unsetVars foreach { v => - unit.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set - it could be a val") + reporter.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set - it could be a val") } p.unusedTypes foreach { t => val sym = t.symbol val why = if (sym.isPrivate) "private" else "local" - unit.warning(t.pos, s"$why ${sym.fullLocationString} is never used") + reporter.warning(t.pos, s"$why ${sym.fullLocationString} is never used") } } } @@ -576,11 +572,11 @@ trait TypeDiagnostics { } else f } def apply(tree: Tree): Tree = { - // Error suppression will squash some of these warnings unless we circumvent it. + // Error suppression (in context.warning) would squash some of these warnings. // It is presumed if you are using a -Y option you would really like to hear - // the warnings you've requested. + // the warnings you've requested; thus, use reporter.warning. if (settings.warnDeadCode && context.unit.exists && treeOK(tree) && exprOK) - context.warning(tree.pos, "dead code following this construct", force = true) + reporter.warning(tree.pos, "dead code following this construct") tree } @@ -604,6 +600,23 @@ trait TypeDiagnostics { ) } + // warn about class/method/type-members' type parameters that shadow types already in scope + def warnTypeParameterShadow(tparams: List[TypeDef], sym: Symbol): Unit = + if (settings.warnTypeParameterShadow && !isPastTyper && !sym.isSynthetic) { + def enclClassOrMethodOrTypeMember(c: Context): Context = + if (!c.owner.exists || c.owner.isClass || c.owner.isMethod || (c.owner.isType && !c.owner.isParameter)) c + else enclClassOrMethodOrTypeMember(c.outer) + + tparams.filter(_.name != typeNames.WILDCARD).foreach { tp => + // we don't care about type params shadowing other type params in the same declaration + enclClassOrMethodOrTypeMember(context).outer.lookupSymbol(tp.name, s => s != tp.symbol && s.hasRawInfo && reallyExists(s)) match { + case LookupSucceeded(_, sym2) => context.warning(tp.pos, + s"type parameter ${tp.name} defined in $sym shadows $sym2 defined in ${sym2.owner}. You may want to rename your type parameter, or possibly remove it.") + case _ => + } + } + } + /** Report a type error. * * @param pos The position where to report the error @@ -627,13 +640,13 @@ trait TypeDiagnostics { case Import(expr, _) => expr.pos case _ => ex.pos } - contextError(context0, pos, cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage()) + context0.error(pos, cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage()) if (sym == ObjectClass) throw new FatalError("cannot redefine root "+sym) } case _ => - contextError(context0, ex.pos, ex) + context0.error(ex.pos, ex.msg) } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 9f3f257529d8..2dd79075ee77 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -155,21 +155,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } else { mkArg = gen.mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args if (!param.hasDefault && !paramFailed) { - context.reportBuffer.errors.collectFirst { - case dte: DivergentImplicitTypeError => dte - } match { - case Some(divergent) => - // DivergentImplicit error has higher priority than "no implicit found" - // no need to issue the problem again if we are still in silent mode - if (context.reportErrors) { - context.issue(divergent.withPt(paramTp)) - context.reportBuffer.clearErrors { - case dte: DivergentImplicitTypeError => true - } - } - case _ => - NoImplicitFoundError(fun, param) - } + context.reporter.reportFirstDivergentError(fun, param, paramTp)(context) paramFailed = true } /* else { @@ -197,7 +183,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper !from.isError && !to.isError && context.implicitsEnabled - && (inferView(EmptyTree, from, to, reportAmbiguous = false) != EmptyTree) + && (inferView(context.tree, from, to, reportAmbiguous = false, saveErrors = true) != EmptyTree) + // SI-8230 / SI-8463 We'd like to change this to `saveErrors = false`, but can't. + // For now, we can at least pass in `context.tree` rather then `EmptyTree` so as + // to avoid unpositioned type errors. ) def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree = @@ -475,20 +464,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (cond) typerWithLocalContext(c)(f) else f(this) @inline - final def typerWithLocalContext[T](c: Context)(f: Typer => T): T = { - val res = f(newTyper(c)) - if (c.hasErrors) - context.updateBuffer(c.flushAndReturnBuffer()) - res - } - - @inline - final def withSavedContext[T](c: Context)(f: => T) = { - val savedErrors = c.flushAndReturnBuffer() - val res = f - c.updateBuffer(savedErrors) - res - } + final def typerWithLocalContext[T](c: Context)(f: Typer => T): T = + c.reporter.propagatingErrorsTo(context.reporter)(f(newTyper(c))) /** The typer for a label definition. If this is part of a template we * first have to enter the label definition. @@ -561,7 +538,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } val qual = typedQualifier { atPos(tree.pos.makeTransparent) { tree match { - case Ident(_) => Ident(rootMirror.getPackageObjectWithMember(pre, sym)) + case Ident(_) => + val packageObject = + if (sym.owner.isModuleClass) sym.owner.sourceModule // historical optimization, perhaps no longer needed + else pre.typeSymbol.packageObject + Ident(packageObject) case Select(qual, _) => Select(qual, nme.PACKAGEkw) case SelectFromTypeTree(qual, _) => Select(qual, nme.PACKAGEkw) } @@ -681,6 +662,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (Statistics.canEnable) Statistics.stopCounter(subtypeFailed, subtypeStart) if (Statistics.canEnable) Statistics.stopTimer(failedSilentNanos, failedSilentStart) } + @inline def wrapResult(reporter: ContextReporter, result: T) = + if (reporter.hasErrors) { + stopStats() + SilentTypeError(reporter.errors: _*) + } else SilentResultValue(result) + try { if (context.reportErrors || reportAmbiguousErrors != context.ambiguousErrors || @@ -694,20 +681,17 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper context.undetparams = context1.undetparams context.savedTypeBounds = context1.savedTypeBounds context.namedApplyBlockInfo = context1.namedApplyBlockInfo - if (context1.hasErrors) { - stopStats() - SilentTypeError(context1.errors: _*) - } else { - // If we have a successful result, emit any warnings it created. - context1.flushAndIssueWarnings() - SilentResultValue(result) - } + + // If we have a successful result, emit any warnings it created. + if (!context1.reporter.hasErrors) + context1.reporter.emitWarnings() + + wrapResult(context1.reporter, result) } else { assert(context.bufferErrors || isPastTyper, "silent mode is not available past typer") - withSavedContext(context){ - val res = op(this) - val errorsToReport = context.flushAndReturnBuffer() - if (errorsToReport.isEmpty) SilentResultValue(res) else SilentTypeError(errorsToReport.head) + + context.reporter.withFreshErrorBuffer { + wrapResult(context.reporter, op(this)) } } } catch { @@ -738,26 +722,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val featureName = (nestedOwners map (_.name + ".")).mkString + featureTrait.name def action(): Boolean = { def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, reportAmbiguous = true, isView = false, context).isSuccess - def hasOption = settings.language.value exists (s => s == featureName || s == "_") + def hasOption = settings.language contains featureName val OK = hasImport || hasOption if (!OK) { val Some(AnnotationInfo(_, List(Literal(Constant(featureDesc: String)), Literal(Constant(required: Boolean))), _)) = featureTrait getAnnotation LanguageFeatureAnnot - val req = if (required) "needs to" else "should" - val fqname = "scala.language." + featureName - val explain = ( - if (currentRun.reportedFeature contains featureTrait) "" else - s"""| - |This can be achieved by adding the import clause 'import $fqname' - |or by setting the compiler option -language:$featureName. - |See the Scala docs for value $fqname for a discussion - |why the feature $req be explicitly enabled.""".stripMargin - ) - currentRun.reportedFeature += featureTrait - - val msg = s"$featureDesc $req be enabled\nby making the implicit value $fqname visible.$explain" replace ("#", construct) - if (required) unit.error(pos, msg) - else currentRun.featureWarnings.warn(pos, msg) + context.featureWarning(pos, featureName, featureDesc, featureTrait, construct, required) } OK } @@ -775,6 +745,26 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => } + /** + * Convert a SAM type to the corresponding FunctionType, + * extrapolating BoundedWildcardTypes in the process + * (no type precision is lost by the extrapolation, + * but this facilitates dealing with the types arising from Java's use-site variance). + */ + def samToFunctionType(tp: Type, sam: Symbol = NoSymbol): Type = { + val samSym = sam orElse samOf(tp) + + def correspondingFunctionSymbol = { + val numVparams = samSym.info.params.length + if (numVparams > definitions.MaxFunctionArity) NoSymbol + else FunctionClass(numVparams) + } + + if (samSym.exists && samSym.owner != correspondingFunctionSymbol) // don't treat Functions as SAMs + wildcardExtrapolation(normalize(tp memberInfo samSym)) + else NoType + } + /** Perform the following adaptations of expression, pattern or type `tree` wrt to * given mode `mode` and given prototype `pt`: * (-1) For expressions with annotated types, let AnnotationCheckers decide what to do @@ -827,14 +817,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } // avoid throwing spurious DivergentImplicit errors - if (context.hasErrors) + if (context.reporter.hasErrors) setError(tree) else withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree))(typer1 => if (original != EmptyTree && pt != WildcardType) ( typer1 silent { tpr => val withImplicitArgs = tpr.applyImplicitArgs(tree) - if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway + if (tpr.context.reporter.hasErrors) tree // silent will wrap it in SilentTypeError anyway else tpr.typed(withImplicitArgs, mode, pt) } orElse { _ => @@ -858,7 +848,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case Block(_, tree1) => tree1.symbol case _ => tree.symbol } - if (!meth.isConstructor && isFunctionType(pt)) { // (4.2) + if (!meth.isConstructor && (isFunctionType(pt) || samOf(pt).exists)) { // (4.2) debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt") checkParamsConvertible(tree, tree.tpe) val tree0 = etaExpand(context.unit, tree, this) @@ -884,13 +874,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def adaptType(): Tree = { // @M When not typing a type constructor (!context.inTypeConstructorAllowed) - // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *, + // or raw type, types must be of kind *, // and thus parameterized types must be applied to their type arguments // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't? def properTypeRequired = ( tree.hasSymbolField && !context.inTypeConstructorAllowed - && !(tree.symbol.isJavaDefined && context.unit.isJava) + && !context.unit.isJava ) // @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!! // (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!) @@ -952,16 +942,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def adaptConstant(value: Constant): Tree = { val sym = tree.symbol - if (sym != null && sym.isDeprecated) { - val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("") - unit.deprecationWarning(tree.pos, msg) - } + if (sym != null && sym.isDeprecated) + context.deprecationWarning(tree.pos, sym) + treeCopy.Literal(tree, value) } // Ignore type errors raised in later phases that are due to mismatching types with existential skolems // We have lift crashing in 2.9 with an adapt failure in the pattern matcher. - // Here's my hypothsis why this happens. The pattern matcher defines a variable of type + // Here's my hypothesis why this happens. The pattern matcher defines a variable of type // // val x: T = expr // @@ -1041,11 +1030,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (tree.tpe <:< AnyTpe) pt.dealias match { case TypeRef(_, UnitClass, _) => // (12) if (settings.warnValueDiscard) - context.unit.warning(tree.pos, "discarded non-Unit value") + context.warning(tree.pos, "discarded non-Unit value") return typedPos(tree.pos, mode, pt)(Block(List(tree), Literal(Constant(())))) case TypeRef(_, sym, _) if isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt) => if (settings.warnNumericWiden) - context.unit.warning(tree.pos, "implicit numeric widening") + context.warning(tree.pos, "implicit numeric widening") return typedPos(tree.pos, mode, pt)(Select(tree, "to" + sym.name)) case _ => } @@ -1063,13 +1052,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case coercion => def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe if (settings.logImplicitConv) - unit.echo(tree.pos, msg) + context.echo(tree.pos, msg) debuglog(msg) val silentContext = context.makeImplicit(context.ambiguousErrors) val res = newTyper(silentContext).typed( new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt) - silentContext.firstError match { + silentContext.reporter.firstError match { case Some(err) => context.issue(err) case None => return res } @@ -1135,7 +1124,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper adaptConstant(value) case OverloadedType(pre, alts) if !mode.inFunMode => // (1) inferExprAlternative(tree, pt) - adapt(tree, mode, pt, original) + adaptAfterOverloadResolution(tree, mode, pt, original) case NullaryMethodType(restpe) => // (2) adapt(tree setType restpe, mode, pt, original) case TypeRef(_, ByNameParamClass, arg :: Nil) if mode.inExprMode => // (2) @@ -1168,6 +1157,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } + // This just exists to help keep track of the spots where we have to adapt a tree after + // overload resolution. These proved hard to find during the fix for SI-8267. + def adaptAfterOverloadResolution(tree: Tree, mode: Mode, pt: Type = WildcardType, original: Tree = EmptyTree): Tree = { + adapt(tree, mode, pt, original) + } + def instantiate(tree: Tree, mode: Mode, pt: Type): Tree = { inferExprInstance(tree, context.extractUndetparams(), pt) adapt(tree, mode, pt) @@ -1223,7 +1218,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case EmptyTree => qual case coercion => if (settings.logImplicitConv) - unit.echo(qual.pos, + context.echo(qual.pos, "applied implicit conversion from %s to %s = %s".format( qual.tpe, searchTemplate, coercion.symbol.defString)) @@ -1288,7 +1283,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper private def validateNoCaseAncestor(clazz: Symbol) = { if (!phase.erasedTypes) { for (ancestor <- clazz.ancestors find (_.isCase)) { - unit.error(clazz.pos, ( + context.error(clazz.pos, ( "case %s has case ancestor %s, but case-to-case inheritance is prohibited."+ " To overcome this limitation, use extractors to pattern match on non-leaf nodes." ).format(clazz, ancestor.fullName)) @@ -1305,7 +1300,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val isValueClass = !clazz.isTrait def where = if (isValueClass) "value class" else "universal trait extending from class Any" def implRestriction(tree: Tree, what: String) = - unit.error(tree.pos, s"implementation restriction: $what is not allowed in $where" + + context.error(tree.pos, s"implementation restriction: $what is not allowed in $where" + "\nThis restriction is planned to be removed in subsequent releases.") /** * Deeply traverses the tree in search of constructs that are not allowed @@ -1334,7 +1329,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } for (stat <- body) { - def notAllowed(what: String) = unit.error(stat.pos, s"$what is not allowed in $where") + def notAllowed(what: String) = context.error(stat.pos, s"$what is not allowed in $where") stat match { // see https://issues.scala-lang.org/browse/SI-6444 // see https://issues.scala-lang.org/browse/SI-6463 @@ -1362,9 +1357,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper private def validateDerivedValueClass(clazz: Symbol, body: List[Tree]) = { if (clazz.isTrait) - unit.error(clazz.pos, "only classes (not traits) are allowed to extend AnyVal") + context.error(clazz.pos, "only classes (not traits) are allowed to extend AnyVal") if (!clazz.isStatic) - unit.error(clazz.pos, "value class may not be a "+ + context.error(clazz.pos, "value class may not be a "+ (if (clazz.owner.isTerm) "local class" else "member of another class")) if (!clazz.isPrimitiveValueClass) { clazz.primaryConstructor.paramss match { @@ -1372,26 +1367,26 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val decls = clazz.info.decls val paramAccessor = clazz.constrParamAccessors.head if (paramAccessor.isMutable) - unit.error(paramAccessor.pos, "value class parameter must not be a var") + context.error(paramAccessor.pos, "value class parameter must not be a var") val accessor = decls.toList.find(x => x.isMethod && x.accessedOrSelf == paramAccessor) accessor match { case None => - unit.error(paramAccessor.pos, "value class parameter must be a val and not be private[this]") + context.error(paramAccessor.pos, "value class parameter must be a val and not be private[this]") case Some(acc) if acc.isProtectedLocal => - unit.error(paramAccessor.pos, "value class parameter must not be protected[this]") + context.error(paramAccessor.pos, "value class parameter must not be protected[this]") case Some(acc) => if (acc.tpe.typeSymbol.isDerivedValueClass) - unit.error(acc.pos, "value class may not wrap another user-defined value class") + context.error(acc.pos, "value class may not wrap another user-defined value class") checkEphemeral(clazz, body filterNot (stat => stat.symbol != null && stat.symbol.accessedOrSelf == paramAccessor)) } case _ => - unit.error(clazz.pos, "value class needs to have exactly one val parameter") + context.error(clazz.pos, "value class needs to have exactly one val parameter") } } for (tparam <- clazz.typeParams) if (tparam hasAnnotation definitions.SpecializedClass) - unit.error(tparam.pos, "type parameter of value class may not be specialized") + context.error(tparam.pos, "type parameter of value class may not be specialized") } /** Typechecks a parent type reference. @@ -1545,7 +1540,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1) val clazz = context.owner assert(clazz != NoSymbol, templ) - val cscope = context.outer.makeNewScope(ctor, context.outer.owner) + val dummy = context.outer.owner.newLocalDummy(templ.pos) + val cscope = context.outer.makeNewScope(ctor, dummy) + if (dummy.isTopLevel) currentRun.symSource(dummy) = currentUnit.source.file val cbody2 = { // called both during completion AND typing. val typer1 = newTyper(cscope) // XXX: see about using the class's symbol.... @@ -1684,10 +1681,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val sameSourceFile = context.unit.source.file == psym.sourceFile - if (psym.hasDeprecatedInheritanceAnnotation && !sameSourceFile) { + if (!isPastTyper && psym.hasDeprecatedInheritanceAnnotation && + !sameSourceFile && !context.owner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) { val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse "" val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix" - unit.deprecationWarning(parent.pos, msg) + context.deprecationWarning(parent.pos, psym, msg) } if (psym.isSealed && !phase.erasedTypes) @@ -1754,13 +1752,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { if (!clazz.owner.isPackageClass) - unit.error(clazz.pos, "inner classes cannot be classfile annotations") - else restrictionWarning(cdef.pos, unit, + context.error(clazz.pos, "inner classes cannot be classfile annotations") + // Ignore @SerialVersionUID, because it is special-cased and handled completely differently. + // It only extends ClassfileAnnotationClass instead of StaticAnnotation to get the enforcement + // of constant argument values "for free". Related to SI-7041. + else if (clazz != SerialVersionUIDAttr) restrictionWarning(cdef.pos, unit, """|subclassing Classfile does not |make your annotation visible at runtime. If that is what |you want, you must write the annotation class in Java.""".stripMargin) } + warnTypeParameterShadow(tparams1, clazz) + if (!isPastTyper) { for (ann <- clazz.getAnnotation(DeprecatedAttr)) { val m = companionSymbolOf(clazz, context) @@ -1809,7 +1812,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper private def ensurePredefParentsAreInSameSourceFile(template: Template) = { val parentSyms = template.parents map (_.symbol) filterNot (_ == AnyRefClass) if (parentSyms exists (_.associatedFile != PredefModule.associatedFile)) - unit.error(template.pos, s"All parents of Predef must be defined in ${PredefModule.associatedFile}.") + context.error(template.pos, s"All parents of Predef must be defined in ${PredefModule.associatedFile}.") } /** In order to override this in the TreeCheckers Typer so synthetics aren't re-added * all the time, it is exposed here the module/class typing methods go through it. @@ -1880,7 +1883,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper ConstrArgsInParentOfTraitError(parents1.head, clazz) if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel) - unit.error(clazz.pos, "inner classes cannot be classfile annotations") + context.error(clazz.pos, "inner classes cannot be classfile annotations") if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType]) @@ -1908,7 +1911,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (clazz.isTrait) { for (decl <- clazz.info.decls if decl.isTerm && decl.isEarlyInitialized) { - unit.warning(decl.pos, "Implementation restriction: early definitions in traits are not initialized before the super class is initialized.") + context.warning(decl.pos, "Implementation restriction: early definitions in traits are not initialized before the super class is initialized.") } } @@ -2096,7 +2099,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case xs => xs.map(_.nameString).mkString(" (of ", " with ", ")") } def fail(pos: Position, msg: String): Boolean = { - unit.error(pos, msg) + context.error(pos, msg) false } /* Have to examine all parameters in all lists. @@ -2163,6 +2166,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val tparams1 = ddef.tparams mapConserve typedTypeDef val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef) + warnTypeParameterShadow(tparams1, meth) + meth.annotations.map(_.completeInfo()) for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1) @@ -2239,6 +2244,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val typedMods = typedModifiers(tdef.mods) tdef.symbol.annotations.map(_.completeInfo()) + warnTypeParameterShadow(tparams1, tdef.symbol) + // @specialized should not be pickled when compiling with -no-specialize if (settings.nospecialization && currentRun.compiles(tdef.symbol)) { tdef.symbol.removeAnnotation(definitions.SpecializedClass) @@ -2479,7 +2486,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * an alternative TODO: add partial function AST node or equivalent and get rid of this synthesis --> do everything in uncurry (or later) * however, note that pattern matching codegen is designed to run *before* uncurry */ - def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Mode, pt: Type): Tree = { + def synthesizePartialFunction(paramName: TermName, paramPos: Position, paramSynthetic: Boolean, + tree: Tree, mode: Mode, pt: Type): Tree = { assert(pt.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt.") val targs = pt.dealiasWiden.typeArgs @@ -2507,7 +2515,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE)).duplicate.asInstanceOf[CaseDef]) // must generate a new tree every time - def selector: Tree = gen.mkUnchecked( + def selector(paramSym: Symbol): Tree = gen.mkUnchecked( if (sel != EmptyTree) sel.duplicate else atPos(tree.pos.focusStart)( // SI-6925: subsume type of the selector to `argTp` @@ -2518,7 +2526,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // hence the cast, which will be erased in posterasure // (the cast originally caused extremely weird types to show up // in test/scaladoc/run/SI-5933.scala because `variantToSkolem` was missing `tpSym.initialize`) - gen.mkCastPreservingAnnotations(Ident(paramName), argTp) + gen.mkCastPreservingAnnotations(Ident(paramSym), argTp) )) def mkParam(methodSym: Symbol, tp: Type = argTp) = @@ -2546,14 +2554,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper methodSym setInfo polyType(List(A1, B1), MethodType(paramSyms, B1.tpe)) val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) - // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it) - paramSyms foreach (methodBodyTyper.context.scope enter _) + if (!paramSynthetic) methodBodyTyper.context.scope enter x // First, type without the default case; only the cases provided // by the user are typed. The LUB of these becomes `B`, the lower // bound of `B1`, which in turn is the result type of the default // case - val match0 = methodBodyTyper.typedMatch(selector, cases, mode, resTp) + val match0 = methodBodyTyper.typedMatch(selector(x), cases, mode, resTp) val matchResTp = match0.tpe B1 setInfo TypeBounds.lower(matchResTp) // patch info @@ -2627,11 +2634,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val paramSym = mkParam(methodSym) val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it) - methodBodyTyper.context.scope enter paramSym + if (!paramSynthetic) methodBodyTyper.context.scope enter paramSym methodSym setInfo MethodType(List(paramSym), BooleanTpe) val defaultCase = mkDefaultCase(FALSE) - val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanTpe) + val match_ = methodBodyTyper.typedMatch(selector(paramSym), casesTrue :+ defaultCase, mode, BooleanTpe) DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanTpe)) } @@ -2645,10 +2652,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper methodSym setInfo MethodType(List(paramSym), AnyTpe) val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) - // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it) - methodBodyTyper.context.scope enter paramSym + if (!paramSynthetic) methodBodyTyper.context.scope enter paramSym - val match_ = methodBodyTyper.typedMatch(selector, cases, mode, resTp) + val match_ = methodBodyTyper.typedMatch(selector(paramSym), cases, mode, resTp) val matchResTp = match_.tpe methodSym setInfo MethodType(List(paramSym), matchResTp) // patch info @@ -2699,7 +2705,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * `{ * def apply$body(p1: T1, ..., pN: TN): T = body * new S { - * def apply(p1: T1, ..., pN: TN): T = apply$body(p1,..., pN) + * def apply(p1: T1', ..., pN: TN'): T' = apply$body(p1,..., pN) * } * }` * @@ -2709,6 +2715,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * * The `apply` method is identified by the argument `sam`; `S` corresponds to the argument `samClassTp`, * and `resPt` is derived from `samClassTp` -- it may be fully defined, or not... + * If it is not fully defined, we derive `samClassTpFullyDefined` by inferring any unknown type parameters. + * + * The types T1' ... TN' and T' are derived from the method signature of the sam method, + * as seen from the fully defined `samClassTpFullyDefined`. * * The function's body is put in a method outside of the class definition to enforce scoping. * S's members should not be in scope in `body`. @@ -2720,6 +2730,22 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * However T must be fully defined before we type the instantiation, as it'll end up as a parent type, * which must be fully defined. Would be nice to have some kind of mechanism to insert type vars in a block of code, * and have the instantiation of the first occurrence propagate to the rest of the block. + * + * TODO: by-name params + * scala> trait LazySink { def accept(a: => Any): Unit } + * defined trait LazySink + * + * scala> val f: LazySink = (a) => (a, a) + * f: LazySink = $anonfun$1@1fb26910 + * + * scala> f(println("!")) + * :10: error: LazySink does not take parameters + * f(println("!")) + * ^ + * + * scala> f.accept(println("!")) + * ! + * ! */ def synthesizeSAMFunction(sam: Symbol, fun: Function, resPt: Type, samClassTp: Type, mode: Mode): Tree = { // assert(fun.vparams forall (vp => isFullyDefined(vp.tpt.tpe))) -- by construction, as we take them from sam's info @@ -2800,14 +2826,21 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper samClassTp } - // `final override def ${sam.name}($p1: $T1, ..., $pN: $TN): $resPt = ${sam.name}\$body'($p1, ..., $pN)` + // what's the signature of the method that we should actually be overriding? + val samMethTp = samClassTpFullyDefined memberInfo sam + // Before the mutation, `tp <:< vpar.tpt.tpe` should hold. + // TODO: error message when this is not the case, as the expansion won't type check + // - Ti' <:< Ti and T <: T' must hold for the samDef body to type check + val funArgTps = foreach2(samMethTp.paramTypes, fun.vparams)((tp, vpar) => vpar.tpt setType tp) + + // `final override def ${sam.name}($p1: $T1', ..., $pN: $TN'): ${samMethTp.finalResultType} = ${sam.name}\$body'($p1, ..., $pN)` val samDef = DefDef(Modifiers(FINAL | OVERRIDE | SYNTHETIC), sam.name.toTermName, Nil, List(fun.vparams), - TypeTree(samBodyDef.tpt.tpe) setPos sampos.focus, - Apply(Ident(bodyName), fun.vparams map (p => Ident(p.name))) + TypeTree(samMethTp.finalResultType) setPos sampos.focus, + Apply(Ident(bodyName), fun.vparams map gen.paramToArg) ) val serializableParentAddendum = @@ -2837,6 +2870,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper ) } + // TODO: improve error reporting -- when we're in silent mode (from `silent(_.doTypedApply(tree, fun, args, mode, pt)) orElse onError`) + // the errors in the function don't get out... + if (block exists (_.isErroneous)) + context.error(fun.pos, s"Could not derive subclass of $samClassTp\n (with SAM `def $sam$samMethTp`)\n based on: $fun.") + classDef.symbol addAnnotation SerialVersionUIDAnnotation block } @@ -2857,7 +2895,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * as `(a => a): Int => Int` should not (yet) get the sam treatment. */ val sam = - if (!settings.Xexperimental || pt.typeSymbol == FunctionSymbol) NoSymbol + if (pt.typeSymbol == FunctionSymbol) NoSymbol else samOf(pt) /* The SAM case comes first so that this works: @@ -2867,15 +2905,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * Note that the arity of the sam must correspond to the arity of the function. */ val samViable = sam.exists && sameLength(sam.info.params, fun.vparams) + val ptNorm = if (samViable) samToFunctionType(pt, sam) else pt val (argpts, respt) = - if (samViable) { - val samInfo = pt memberInfo sam - (samInfo.paramTypes, samInfo.resultType) - } else { - pt baseType FunctionSymbol match { - case TypeRef(_, FunctionSymbol, args :+ res) => (args, res) - case _ => (fun.vparams map (_ => if (pt == ErrorType) ErrorType else NoType), WildcardType) - } + ptNorm baseType FunctionSymbol match { + case TypeRef(_, FunctionSymbol, args :+ res) => (args, res) + case _ => (fun.vparams map (_ => if (pt == ErrorType) ErrorType else NoType), WildcardType) } if (!FunctionSymbol.exists) @@ -2920,7 +2954,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val p = fun.vparams.head if (p.tpt.tpe == null) p.tpt setType outerTyper.typedType(p.tpt).tpe - outerTyper.synthesizePartialFunction(p.name, p.pos, fun.body, mode, pt) + outerTyper.synthesizePartialFunction(p.name, p.pos, paramSynthetic = false, fun.body, mode, pt) // Use synthesizeSAMFunction to expand `(p1: T1, ..., pN: TN) => body` // to an instance of the corresponding anonymous subclass of `pt`. @@ -3003,7 +3037,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper ConstructorsOrderError(stat) } - if (treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos, + if (!isPastTyper && treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos, "a pure expression does nothing in statement position; " + "you may be omitting necessary parentheses" ) @@ -3020,7 +3054,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper || (looker.hasAccessorFlag && !accessed.hasAccessorFlag && accessed.isPrivate) ) - def checkNoDoubleDefs(stats: List[Tree]): Unit = { + def checkNoDoubleDefs: Unit = { val scope = if (inBlock) context.scope else context.owner.info.decls var e = scope.elems while ((e ne null) && e.owner == scope) { @@ -3055,8 +3089,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // the corresponding synthetics to the package class, only to the package object class. def shouldAdd(sym: Symbol) = inBlock || !context.isInPackageObject(sym, context.owner) - for (sym <- scope if shouldAdd(sym)) - for (tree <- context.unit.synthetics get sym) { + for (sym <- scope) + for (tree <- context.unit.synthetics get sym if shouldAdd(sym)) { // OPT: shouldAdd is usually true. Call it here, rather than in the outer loop newStats += typedStat(tree) // might add even more synthetics to the scope context.unit.synthetics -= sym } @@ -3102,7 +3136,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val stats1 = stats mapConserve typedStat if (phase.erasedTypes) stats1 else { - checkNoDoubleDefs(stats1) + // As packages are open, it doesn't make sense to check double definitions here. Furthermore, + // it is expensive if the package is large. Instead, such double defininitions are checked in `Namers.enterInScope` + if (!context.owner.isPackageClass) + checkNoDoubleDefs addSynthetics(stats1) } } @@ -3159,7 +3196,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = { // TODO_NMT: check the assumption that args nonEmpty def duplErrTree = setError(treeCopy.Apply(tree, fun0, args)) - def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree } + def duplErrorTree(err: AbsTypeError) = { context.issue(err); duplErrTree } def preSelectOverloaded(fun: Tree): Tree = { if (fun.hasSymbolField && fun.symbol.isOverloaded) { @@ -3198,7 +3235,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (sym1 != NoSymbol) sym = sym1 } if (sym == NoSymbol) fun - else adapt(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode, WildcardType) + else adaptAfterOverloadResolution(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode) } else fun } @@ -3239,15 +3276,31 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper (arg1, arg1.tpe.deconst) }.unzip } - if (context.hasErrors) + if (context.reporter.hasErrors) setError(tree) else { inferMethodAlternative(fun, undetparams, argTpes, pt) - doTypedApply(tree, adapt(fun, mode.forFunMode, WildcardType), args1, mode, pt) + doTypedApply(tree, adaptAfterOverloadResolution(fun, mode.forFunMode, WildcardType), args1, mode, pt) } } handleOverloaded + case _ if isPolymorphicSignature(fun.symbol) => + // Mimic's Java's treatment of polymorphic signatures as described in + // https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.12.3 + // + // One can think of these methods as being infinitely overloaded. We create + // a ficticious new cloned method symbol for each call site that takes on a signature + // governed by a) the argument types and b) the expected type + val args1 = typedArgs(args, forArgMode(fun, mode)) + val pts = args1.map(_.tpe.deconst) + val clone = fun.symbol.cloneSymbol + val cloneParams = pts map (pt => clone.newValueParameter(currentUnit.freshTermName()).setInfo(pt)) + val resultType = if (isFullyDefined(pt)) pt else ObjectTpe + clone.modifyInfo(mt => copyMethodType(mt, cloneParams, resultType)) + val fun1 = fun.setSymbol(clone).setType(clone.info) + doTypedApply(tree, fun1, args1, mode, resultType).setType(resultType) + case mt @ MethodType(params, _) => val paramTypes = mt.paramTypes // repeat vararg as often as needed, remove by-name @@ -3258,25 +3311,25 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * to that. This is the last thing which is tried (after * default arguments) */ - def tryTupleApply: Tree = ( + def tryTupleApply: Tree = { if (eligibleForTupleConversion(paramTypes, argslen) && !phase.erasedTypes) { val tupleArgs = List(atPos(tree.pos.makeTransparent)(gen.mkTuple(args))) // expected one argument, but got 0 or >1 ==> try applying to tuple // the inner "doTypedApply" does "extractUndetparams" => restore when it fails val savedUndetparams = context.undetparams silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) map { t => - // Depending on user options, may warn or error here if - // a Unit or tuple was inserted. - val keepTree = ( - !mode.typingExprNotFun - || t.symbol == null - || checkValidAdaptation(t, args) - ) - if (keepTree) t else EmptyTree + // Depending on user options, may warn or error here if + // a Unit or tuple was inserted. + val keepTree = ( + !mode.typingExprNotFun // why? introduced in 4e488a60, doc welcome + || t.symbol == null // ditto + || checkValidAdaptation(t, args) + ) + if (keepTree) t else EmptyTree } orElse { _ => context.undetparams = savedUndetparams ; EmptyTree } } else EmptyTree - ) + } /* Treats an application which uses named or default arguments. * Also works if names + a vararg used: when names are used, the vararg @@ -3367,8 +3420,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper duplErrTree } else if (lencmp2 == 0) { // useful when a default doesn't match parameter type, e.g. def f[T](x:T="a"); f[Int]() - val note = "Error occurred in an application involving default arguments." - if (!(context.diagnostic contains note)) context.diagnostic = note :: context.diagnostic + context.diagUsedDefaults = true doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgs, mode, pt) } else { rollbackNamesDefaultsOwnerChanges() @@ -3675,7 +3727,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2) - unit.deprecationWarning(ann.pos, "@deprecated now takes two arguments; see the scaladoc.") + context.deprecationWarning(ann.pos, DeprecatedAttr, "@deprecated now takes two arguments; see the scaladoc.") if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation else annInfo(typedAnn) @@ -3744,8 +3796,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case TypeRef(pre, sym, args) => if (sym.isAliasType && containsLocal(tp) && (tp.dealias ne tp)) apply(tp.dealias) else { - if (pre.isVolatile) - InferTypeWithVolatileTypeSelectionError(tree, pre) + if (pre.isVolatile) pre match { + case SingleType(_, sym) if sym.isSynthetic && isPastTyper => + debuglog(s"ignoring volatility of prefix in pattern matcher generated inferred type: $tp") // See pos/t7459c.scala + case _ => + InferTypeWithVolatileTypeSelectionError(tree, pre) + } mapOver(tp) } case _ => @@ -3827,7 +3883,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match { case OverloadedType(pre, alts) => inferPolyAlternatives(fun, mapList(args)(treeTpe)) - val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree) + + // SI-8267 `memberType` can introduce existentials *around* a PolyType/MethodType, see AsSeenFromMap#captureThis. + // If we had selected a non-overloaded symbol, `memberType` would have been called in `makeAccessible` + // and the resulting existential type would have been skolemized in `adapt` *before* we typechecked + // the enclosing type-/ value- application. + // + // However, if the selection is overloaded, we defer calling `memberType` until we can select a single + // alternative here. It is therefore necessary to skolemize the existential here. + // + val fun1 = adaptAfterOverloadResolution(fun, mode.forFunMode | TAPPmode) + + val tparams = fun1.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree) val args1 = if (sameLength(args, tparams)) { //@M: in case TypeApply we can't check the kind-arities of the type arguments, // as we don't know which alternative to choose... here we do @@ -3841,7 +3908,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // ...actually this was looping anyway, see bug #278. return TypedApplyWrongNumberOfTpeParametersError(fun, fun) - typedTypeApply(tree, mode, fun, args1) + typedTypeApply(tree, mode, fun1, args1) case SingleType(_, _) => typedTypeApply(tree, mode, fun setType fun.tpe.widen, args) case PolyType(tparams, restpe) if tparams.nonEmpty => @@ -4207,7 +4274,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val cases = tree.cases if (selector == EmptyTree) { if (pt.typeSymbol == PartialFunctionClass) - synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, tree, mode, pt) + synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, paramSynthetic = true, tree, mode, pt) else { val arity = if (isFunctionType(pt)) pt.dealiasWiden.typeArgs.length - 1 else 1 val params = for (i <- List.range(0, arity)) yield @@ -4249,7 +4316,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // it is non-Unit) so we have to retype it. Fortunately it won't come up much // unless the warning is legitimate. if (typed(expr).tpe.typeSymbol != UnitClass) - unit.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded") + context.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded") } val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner) val tp = pluginsTypedReturn(NothingTpe, this, res, restpt.tpe) @@ -4341,7 +4408,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper c.retyping = true try { val res = newTyper(c).typedArgs(args, mode) - if (c.hasErrors) None else Some(res) + if (c.reporter.hasErrors) None else Some(res) } catch { case ex: CyclicReference => throw ex @@ -4394,7 +4461,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (retry) { val Select(qual, name) = fun tryTypedArgs(args, forArgMode(fun, mode)) match { - case Some(args1) => + case Some(args1) if !args1.exists(arg => arg.exists(_.isErroneous)) => val qual1 = if (!pt.isError) adaptToArguments(qual, name, args1, pt, reportAmbiguous = true, saveErrors = true) else qual @@ -4405,7 +4472,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => () } } - typeErrors foreach issue + typeErrors foreach context.issue setError(treeCopy.Apply(tree, fun, args)) } @@ -4459,7 +4526,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper doTypedApply(tree, fun2, args, mode, pt) case err: SilentTypeError => onError({ - err.reportableErrors foreach issue + err.reportableErrors foreach context.issue args foreach (arg => typed(arg, mode, ErrorType)) setError(tree) }) @@ -4670,8 +4737,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case SilentTypeError(err: AccessTypeError) => (tree1, Some(err)) case SilentTypeError(err) => - context issue err - return setError(tree) + SelectWithUnderlyingError(tree, err) + return tree case SilentResultValue(treeAndPre) => (stabilize(treeAndPre._1, treeAndPre._2, mode, pt), None) } @@ -4696,7 +4763,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else // before failing due to access, try a dynamic call. asDynamicCall getOrElse { - issue(accessibleError.get) + context.issue(accessibleError.get) setError(tree) } case _ => @@ -4707,10 +4774,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // temporarily use `filter` as an alternative for `withFilter` def tryWithFilterAndFilter(tree: Select, qual: Tree): Tree = { - def warn() = unit.deprecationWarning(tree.pos, s"`withFilter' method does not yet exist on ${qual.tpe.widen}, using `filter' method instead") + def warn(sym: Symbol) = context.deprecationWarning(tree.pos, sym, s"`withFilter' method does not yet exist on ${qual.tpe.widen}, using `filter' method instead") silent(_ => typedSelect(tree, qual, nme.withFilter)) orElse { _ => silent(_ => typed1(Select(qual, nme.filter) setPos tree.pos, mode, pt)) match { - case SilentResultValue(res) => warn() ; res + case SilentResultValue(res) => warn(res.symbol) ; res case SilentTypeError(err) => WithFilterError(tree, err) } } @@ -5109,7 +5176,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def isPlausible(m: Symbol) = m.alternatives exists (m => requiresNoArgs(m.info)) def maybeWarn(s: String): Unit = { - def warn(message: String) = context.unit.warning(lit.pos, s"$message Did you forget the interpolator?") + def warn(message: String) = context.warning(lit.pos, s"possible missing interpolator: $message") def suspiciousSym(name: TermName) = context.lookupSymbol(name, _ => true).symbol def suspiciousExpr = InterpolatorCodeRegex findFirstIn s def suspiciousIdents = InterpolatorIdentRegex findAllIn s map (s => suspiciousSym(s drop 1)) @@ -5117,9 +5184,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // heuristics - no warning on e.g. a string with only "$asInstanceOf" if (s contains ' ') ( if (suspiciousExpr.nonEmpty) - warn("That looks like an interpolated expression!") // "${...}" + warn("detected an interpolated expression") // "${...}" else - suspiciousIdents find isPlausible foreach (sym => warn(s"`$$${sym.name}` looks like an interpolated identifier!")) // "$id" + suspiciousIdents find isPlausible foreach (sym => warn(s"detected interpolated identifier `$$${sym.name}`")) // "$id" ) } lit match { @@ -5129,7 +5196,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def typedLiteral(tree: Literal) = { - if (settings.lint) warnMissingInterpolator(tree) + if (settings.warnMissingInterpolator) warnMissingInterpolator(tree) tree setType (if (tree.value.tag == UnitTag) UnitTpe else ConstantType(tree.value)) } @@ -5140,16 +5207,19 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper typed(tree.ref, MonoQualifierModes | mode.onlyTypePat, AnyRefTpe) } - if (!refTyped.isErrorTyped) + if (refTyped.isErrorTyped) { + setError(tree) + } else { tree setType refTyped.tpe.resultType - - if (treeInfo.admitsTypeSelection(refTyped)) tree - else UnstableTreeError(refTyped) + if (refTyped.isErrorTyped || treeInfo.admitsTypeSelection(refTyped)) tree + else UnstableTreeError(tree) + } } def typedSelectFromTypeTree(tree: SelectFromTypeTree) = { val qual1 = typedType(tree.qualifier, mode) - if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1) + if (qual1.isErrorTyped) setError(treeCopy.SelectFromTypeTree(tree, qual1, tree.name)) + else if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1) else typedSelect(tree, qual1, tree.name) } @@ -5161,7 +5231,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedExistentialTypeTree(tree: ExistentialTypeTree) = { val tree1 = typerWithLocalContext(context.makeNewScope(tree, context.owner)){ - _.typedExistentialTypeTree(tree, mode) + typer => + if (context.inTypeConstructorAllowed) + typer.context.withinTypeConstructorAllowed(typer.typedExistentialTypeTree(tree, mode)) + else + typer.typedExistentialTypeTree(tree, mode) } checkExistentialsFeature(tree1.pos, tree1.tpe, "the existential type") tree1 @@ -5487,11 +5561,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val commonMessage = "macro defs must have explicitly specified return types" def reportFailure() = { ddef.symbol.setFlag(IS_ERROR) - unit.error(ddef.pos, commonMessage) + context.error(ddef.pos, commonMessage) } def reportWarning(inferredType: Type) = { val explanation = s"inference of $inferredType from macro impl's c.Expr[$inferredType] is deprecated and is going to stop working in 2.12" - unit.deprecationWarning(ddef.pos, s"$commonMessage ($explanation)") + context.deprecationWarning(ddef.pos, ddef.symbol, s"$commonMessage ($explanation)") } computeMacroDefTypeFromMacroImplRef(ddef, rhs1) match { case ErrorType => ErrorType diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index cc2d9141ce4d..fc1f45e35809 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -142,17 +142,30 @@ trait Unapplies extends ast.TreeDSL { /** The unapply method corresponding to a case class */ def caseModuleUnapplyMeth(cdef: ClassDef): DefDef = { - val tparams = constrTparamsInvariant(cdef) - val method = constrParamss(cdef) match { + val tparams = constrTparamsInvariant(cdef) + val method = constrParamss(cdef) match { case xs :: _ if xs.nonEmpty && isRepeatedParamType(xs.last.tpt) => nme.unapplySeq case _ => nme.unapply } - val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree)) - val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule) - val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef) }, ifNull)(Ident(unapplyParamName)) + val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree)) + val resultType = if (!settings.isScala212) TypeTree() else { // fix for SI-6541 under -Xsource:2.12 + def repeatedToSeq(tp: Tree) = tp match { + case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS_NAME), tps) => AppliedTypeTree(gen.rootScalaDot(tpnme.Seq), tps) + case _ => tp + } + constrParamss(cdef) match { + case Nil | Nil :: _ => + gen.rootScalaDot(tpnme.Boolean) + case params :: _ => + val constrParamTypes = params.map(param => repeatedToSeq(param.tpt)) + AppliedTypeTree(gen.rootScalaDot(tpnme.Option), List(treeBuilder.makeTupleType(constrParamTypes))) + } + } + val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule) + val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef) }, ifNull)(Ident(unapplyParamName)) atPos(cdef.pos.focus)( - DefDef(caseMods, method, tparams, List(cparams), TypeTree(), body) + DefDef(caseMods, method, tparams, List(cparams), resultType, body) ) } diff --git a/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala b/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala new file mode 100644 index 000000000000..44516512292a --- /dev/null +++ b/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.util + +import scala.tools.nsc.io.AbstractFile +import java.net.URL + +/** + * Simple interface that allows us to abstract over how class file lookup is performed + * in different classpath representations. + */ +// TODO at the end, after the possible removal of the old classpath representation, this class shouldn't be generic +// T should be just changed to AbstractFile +trait ClassFileLookup[T] { + def findClassFile(name: String): Option[AbstractFile] + + /** + * It returns both classes from class file and source files (as our base ClassRepresentation). + * So note that it's not so strictly related to findClassFile. + */ + def findClass(name: String): Option[ClassRepresentation[T]] + + /** + * A sequence of URLs representing this classpath. + */ + def asURLs: Seq[URL] + + /** The whole classpath in the form of one String. + */ + def asClassPathString: String + + // for compatibility purposes + @deprecated("Use asClassPathString instead of this one", "2.11.5") + def asClasspathString: String = asClassPathString + + /** The whole sourcepath in the form of one String. + */ + def asSourcePathString: String +} + +/** + * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader. + */ +// TODO at the end, after the possible removal of the old classpath implementation, this class shouldn't be generic +// T should be just changed to AbstractFile +trait ClassRepresentation[T] { + def binary: Option[T] + def source: Option[AbstractFile] + + def name: String +} + +object ClassRepresentation { + def unapply[T](classRep: ClassRepresentation[T]): Option[(Option[T], Option[AbstractFile])] = + Some((classRep.binary, classRep.source)) +} diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index d2ba61cc0b57..8d4d07759f26 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -7,16 +7,18 @@ package scala.tools.nsc package util +import io.{ AbstractFile, Directory, File, Jar } +import java.net.MalformedURLException import java.net.URL +import java.util.regex.PatternSyntaxException import scala.collection.{ mutable, immutable } -import io.{ File, Directory, Path, Jar, AbstractFile } import scala.reflect.internal.util.StringOps.splitWhere -import Jar.isJarOrZip +import scala.tools.nsc.classpath.FileUtils + import File.pathSeparator -import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator -import java.net.MalformedURLException -import java.util.regex.PatternSyntaxException -import scala.reflect.runtime.ReflectionUtils +import FileUtils.endsClass +import FileUtils.endsScalaOrJava +import Jar.isJarOrZip /**

    * This module provides star expansion of '-classpath' option arguments, behaves the same as @@ -89,7 +91,7 @@ object ClassPath { /** A class modeling aspects of a ClassPath which should be * propagated to any classpaths it creates. */ - abstract class ClassPathContext[T] { + abstract class ClassPathContext[T] extends classpath.ClassPathFactory[ClassPath[T]] { /** A filter which can be used to exclude entities from the classpath * based on their name. */ @@ -99,75 +101,47 @@ object ClassPath { */ def validClassFile(name: String) = endsClass(name) && isValidName(name) def validPackage(name: String) = (name != "META-INF") && (name != "") && (name.charAt(0) != '.') - def validSourceFile(name: String) = endsScala(name) || endsJava(name) + def validSourceFile(name: String) = endsScalaOrJava(name) /** From the representation to its identifier. */ def toBinaryName(rep: T): String - /** Create a new classpath based on the abstract file. - */ - def newClassPath(file: AbstractFile): ClassPath[T] - - /** Creators for sub classpaths which preserve this context. - */ def sourcesInPath(path: String): List[ClassPath[T]] = for (file <- expandPath(path, expandStar = false) ; dir <- Option(AbstractFile getDirectory file)) yield new SourcePath[T](dir, this) - - def contentsOfDirsInPath(path: String): List[ClassPath[T]] = - for (dir <- expandPath(path, expandStar = false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield - newClassPath(entry) - - def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] = - classesInPathImpl(path, expand = true).toIndexedSeq - - def classesInPath(path: String) = classesInPathImpl(path, expand = false) - - // Internal - private def classesInPathImpl(path: String, expand: Boolean) = - for (file <- expandPath(path, expand) ; dir <- Option(AbstractFile getDirectory file)) yield - newClassPath(dir) - - def classesInManifest(used: Boolean) = - if (used) for (url <- manifests) yield newClassPath(AbstractFile getResources url) else Nil } - def manifests = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF").filter(_.getProtocol() == "jar").toList + def manifests: List[java.net.URL] = { + import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator + Thread.currentThread().getContextClassLoader() + .getResources("META-INF/MANIFEST.MF") + .filter(_.getProtocol == "jar").toList + } class JavaContext extends ClassPathContext[AbstractFile] { def toBinaryName(rep: AbstractFile) = { val name = rep.name assert(endsClass(name), name) - name.substring(0, name.length - 6) + FileUtils.stripClassExtension(name) } + def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this) } object DefaultJavaContext extends JavaContext - private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class" - private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala" - private def endsJava(s: String) = s.length > 5 && s.substring(s.length - 5) == ".java" - /** From the source file to its identifier. */ - def toSourceName(f: AbstractFile): String = { - val name = f.name - - if (endsScala(name)) name.substring(0, name.length - 6) - else if (endsJava(name)) name.substring(0, name.length - 5) - else throw new FatalError("Unexpected source file ending: " + name) - } + def toSourceName(f: AbstractFile): String = FileUtils.stripSourceExtension(f.name) } + import ClassPath._ /** * Represents a package which contains classes and other packages */ -abstract class ClassPath[T] { - type AnyClassRep = ClassPath[T]#ClassRep - +abstract class ClassPath[T] extends ClassFileLookup[T] { /** * The short name of the package (without prefix) */ @@ -179,28 +153,37 @@ abstract class ClassPath[T] { */ def origin: Option[String] = None - /** A list of URLs representing this classpath. - */ - def asURLs: List[URL] - - /** The whole classpath in the form of one String. - */ - def asClasspathString: String - /** Info which should be propagated to any sub-classpaths. */ def context: ClassPathContext[T] /** Lists of entities. */ - def classes: IndexedSeq[AnyClassRep] + def classes: IndexedSeq[ClassRepresentation[T]] def packages: IndexedSeq[ClassPath[T]] def sourcepaths: IndexedSeq[AbstractFile] + /** The entries this classpath is composed of. In class `ClassPath` it's just the singleton list containing `this`. + * Subclasses such as `MergedClassPath` typically return lists with more elements. + */ + def entries: IndexedSeq[ClassPath[T]] = IndexedSeq(this) + + /** Merge classpath of `platform` and `urls` into merged classpath */ + def mergeUrlsIntoClassPath(urls: URL*): MergedClassPath[T] = { + // Collect our new jars/directories and add them to the existing set of classpaths + val allEntries = + (entries ++ + urls.map(url => context.newClassPath(io.AbstractFile.getURL(url))) + ).distinct + + // Combine all of our classpaths (old and new) into one merged classpath + new MergedClassPath(allEntries, context) + } + /** * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader. */ - case class ClassRep(binary: Option[T], source: Option[AbstractFile]) { + case class ClassRep(binary: Option[T], source: Option[AbstractFile]) extends ClassRepresentation[T] { def name: String = binary match { case Some(x) => context.toBinaryName(x) case _ => @@ -219,25 +202,27 @@ abstract class ClassPath[T] { * Find a ClassRep given a class name of the form "package.subpackage.ClassName". * Does not support nested classes on .NET */ - def findClass(name: String): Option[AnyClassRep] = + override def findClass(name: String): Option[ClassRepresentation[T]] = splitWhere(name, _ == '.', doDropIndex = true) match { case Some((pkg, rest)) => val rep = packages find (_.name == pkg) flatMap (_ findClass rest) rep map { - case x: ClassRep => x + case x: ClassRepresentation[T] => x case x => throw new FatalError("Unexpected ClassRep '%s' found searching for name '%s'".format(x, name)) } case _ => classes find (_.name == name) } - def findSourceFile(name: String): Option[AbstractFile] = + override def findClassFile(name: String): Option[AbstractFile] = findClass(name) match { - case Some(ClassRep(Some(x: AbstractFile), _)) => Some(x) + case Some(ClassRepresentation(Some(x: AbstractFile), _)) => Some(x) case _ => None } - def sortString = join(split(asClasspathString).sorted: _*) + override def asSourcePathString: String = sourcepaths.mkString(pathSeparator) + + def sortString = join(split(asClassPathString).sorted: _*) override def equals(that: Any) = that match { case x: ClassPath[_] => this.sortString == x.sortString case _ => false @@ -249,10 +234,12 @@ abstract class ClassPath[T] { * A Classpath containing source files */ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends ClassPath[T] { + import FileUtils.AbstractFileOps + def name = dir.name override def origin = dir.underlyingSource map (_.path) - def asURLs = if (dir.file == null) Nil else List(dir.toURL) - def asClasspathString = dir.path + def asURLs = dir.toURLs() + def asClassPathString = dir.path val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq(dir) private def traverse() = { @@ -275,10 +262,12 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends * A directory (or a .jar file) containing classfiles and packages */ class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] { + import FileUtils.AbstractFileOps + def name = dir.name override def origin = dir.underlyingSource map (_.path) - def asURLs = if (dir.file == null) List(new URL(name)) else List(dir.toURL) - def asClasspathString = dir.path + def asURLs = dir.toURLs(default = Seq(new URL(name))) + def asClassPathString = dir.path val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq() // calculates (packages, classes) in one traversal. @@ -322,9 +311,10 @@ extends MergedClassPath[T](original.entries map (e => subst getOrElse (e, e)), o * A classpath unifying multiple class- and sourcepath entries. */ class MergedClassPath[T]( - val entries: IndexedSeq[ClassPath[T]], + override val entries: IndexedSeq[ClassPath[T]], val context: ClassPathContext[T]) extends ClassPath[T] { + def this(entries: TraversableOnce[ClassPath[T]], context: ClassPathContext[T]) = this(entries.toIndexedSeq, context) @@ -333,12 +323,12 @@ extends ClassPath[T] { lazy val sourcepaths: IndexedSeq[AbstractFile] = entries flatMap (_.sourcepaths) override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")")) - override def asClasspathString: String = join(entries map (_.asClasspathString) : _*) + override def asClassPathString: String = join(entries map (_.asClassPathString) : _*) - lazy val classes: IndexedSeq[AnyClassRep] = { + lazy val classes: IndexedSeq[ClassRepresentation[T]] = { var count = 0 val indices = mutable.HashMap[String, Int]() - val cls = new mutable.ArrayBuffer[AnyClassRep](1024) + val cls = new mutable.ArrayBuffer[ClassRepresentation[T]](1024) for (e <- entries; c <- e.classes) { val name = c.name @@ -347,9 +337,9 @@ extends ClassPath[T] { val existing = cls(idx) if (existing.binary.isEmpty && c.binary.isDefined) - cls(idx) = existing.copy(binary = c.binary) + cls(idx) = ClassRep(binary = c.binary, source = existing.source) if (existing.source.isEmpty && c.source.isDefined) - cls(idx) = existing.copy(source = c.source) + cls(idx) = ClassRep(binary = existing.binary, source = c.source) } else { indices(name) = count @@ -387,10 +377,12 @@ extends ClassPath[T] { } new MergedClassPath[T](newEntries, context) } + def show() { println("ClassPath %s has %d entries and results in:\n".format(name, entries.size)) - asClasspathString split ':' foreach (x => println(" " + x)) + asClassPathString split ':' foreach (x => println(" " + x)) } + override def toString() = "merged classpath "+ entries.mkString("(", "\n", ")") } diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala index ba44126df26c..352816803f86 100755 --- a/src/compiler/scala/tools/nsc/util/DocStrings.scala +++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala @@ -8,7 +8,7 @@ package util import scala.reflect.internal.Chars._ -/** Utilitity methods for doc comment strings +/** Utility methods for doc comment strings */ object DocStrings { diff --git a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala index 225f6ca68eec..be245347a89d 100644 --- a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala +++ b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala @@ -14,12 +14,10 @@ abstract class StatisticsInfo { import global._ import scala.reflect.internal.TreesStats.nodeByType - val phasesShown = List("parser", "typer", "patmat", "erasure", "cleanup") - val retainedCount = Statistics.newCounter("#retained tree nodes") val retainedByType = Statistics.newByClass("#retained tree nodes by type")(Statistics.newCounter("")) - def print(phase: Phase) = if (phasesShown contains phase.name) { + def print(phase: Phase) = if (settings.Ystatistics contains phase.name) { inform("*** Cumulative statistics at phase " + phase) retainedCount.value = 0 for (c <- retainedByType.keys) diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala index 8630ecf69eaa..8fed53c89fb0 100644 --- a/src/compiler/scala/tools/reflect/FastTrack.scala +++ b/src/compiler/scala/tools/reflect/FastTrack.scala @@ -5,19 +5,23 @@ import scala.reflect.reify.Taggers import scala.tools.nsc.typechecker.{ Analyzer, Macros } import scala.reflect.runtime.Macros.currentMirror import scala.reflect.api.Universe -import scala.tools.reflect.quasiquotes.{ Quasiquotes => QuasiquoteImpls } +import scala.reflect.quasiquotes.{ Quasiquotes => QuasiquoteImpls } /** Optimizes system macro expansions by hardwiring them directly to their implementations * bypassing standard reflective load and invoke to avoid the overhead of Java/Scala reflection. */ -trait FastTrack { - self: Macros with Analyzer => +class FastTrack[MacrosAndAnalyzer <: Macros with Analyzer](val macros: MacrosAndAnalyzer) { + import macros._ import global._ import definitions._ import scala.language.implicitConversions import treeInfo.Applied + def contains(symbol: Symbol): Boolean = fastTrackCache().contains(symbol) + def apply(symbol: Symbol): FastTrackEntry = fastTrackCache().apply(symbol) + def get(symbol: Symbol): Option[FastTrackEntry] = fastTrackCache().get(symbol) + private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers private implicit def context2macroimplementations(c0: MacroContext): FormatInterpolator { val c: c0.type } = @@ -39,7 +43,6 @@ trait FastTrack { } /** A map from a set of pre-established macro symbols to their implementations. */ - def fastTrack: Map[Symbol, FastTrackEntry] = fastTrackCache() private val fastTrackCache = perRunCaches.newGeneric[Map[Symbol, FastTrackEntry]] { val runDefinitions = currentRun.runDefinitions import runDefinitions._ diff --git a/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/src/compiler/scala/tools/reflect/FormatInterpolator.scala index 0258002850bc..b445f1e2bbce 100644 --- a/src/compiler/scala/tools/reflect/FormatInterpolator.scala +++ b/src/compiler/scala/tools/reflect/FormatInterpolator.scala @@ -93,7 +93,8 @@ abstract class FormatInterpolator { case '\n' => "\\n" case '\f' => "\\f" case '\r' => "\\r" - case '\"' => "\\u0022" // $" in future + case '\"' => "${'\"'}" /* avoid lint warn */ + + " or a triple-quoted literal \"\"\"with embedded \" or \\u0022\"\"\"" // $" in future case '\'' => "'" case '\\' => """\\""" case x => "\\u%04x" format x @@ -116,7 +117,7 @@ abstract class FormatInterpolator { c.error(errPoint, msg("unsupported")) s0 } else { - c.enclosingUnit.deprecationWarning(errPoint, msg("deprecated")) + currentRun.reporting.deprecationWarning(errPoint, msg("deprecated")) try StringContext.treatEscapes(s0) catch escapeHatch } } @@ -181,13 +182,23 @@ abstract class FormatInterpolator { case (part, n) => copyPart(part, n) } - //q"{..$evals; ${fstring.toString}.format(..$ids)}" - locally { + //q"{..$evals; new StringOps(${fstring.toString}).format(..$ids)}" + val format = fstring.toString + if (ids.isEmpty && !format.contains("%")) Literal(Constant(format)) + else { + val scalaPackage = Select(Ident(nme.ROOTPKG), TermName("scala")) + val newStringOps = Select( + New(Select(Select(Select(scalaPackage, + TermName("collection")), TermName("immutable")), TypeName("StringOps"))), + termNames.CONSTRUCTOR + ) val expr = Apply( Select( - Literal(Constant(fstring.toString)), - newTermName("format")), + Apply( + newStringOps, + List(Literal(Constant(format)))), + TermName("format")), ids.toList ) val p = c.macroApplication.pos diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala index 3ae21b6b9887..8d8418945a4e 100644 --- a/src/compiler/scala/tools/reflect/ReflectMain.scala +++ b/src/compiler/scala/tools/reflect/ReflectMain.scala @@ -1,17 +1,17 @@ package scala.tools package reflect +import scala.reflect.internal.util.ScalaClassLoader import scala.tools.nsc.Driver import scala.tools.nsc.Global import scala.tools.nsc.Settings -import scala.tools.nsc.util.ScalaClassLoader -import scala.tools.util.PathResolver +import scala.tools.util.PathResolverFactory object ReflectMain extends Driver { private def classloaderFromSettings(settings: Settings) = { - val classpath = new PathResolver(settings).result - ScalaClassLoader.fromURLs(classpath.asURLs, getClass.getClassLoader) + val classPathURLs = PathResolverFactory.create(settings).resultAsURLs + ScalaClassLoader.fromURLs(classPathURLs, getClass.getClassLoader) } override def newCompiler(): Global = new ReflectGlobal(settings, reporter, classloaderFromSettings(settings)) diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 3b12086cc740..1643e0061fc9 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -141,7 +141,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => val run = new Run run.symSource(ownerClass) = NoAbstractFile // need to set file to something different from null, so that currentRun.defines works phase = run.typerPhase // need to set a phase to something <= typerPhase, otherwise implicits in typedSelect will be disabled - currentTyper.context.setReportErrors() // need to manually set context mode, otherwise typer.silent will throw exceptions + globalPhase = run.typerPhase // amazing... looks like phase and globalPhase are different things, so we need to set them separately + currentTyper.context.initRootContext() // need to manually set context mode, otherwise typer.silent will throw exceptions reporter.reset() val expr3 = withContext(transform(currentTyper, expr2)) diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala deleted file mode 100644 index 3cfc1eb2a1c8..000000000000 --- a/src/compiler/scala/tools/util/Javap.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools -package util - -import scala.tools.nsc.util.ScalaClassLoader -import java.io.PrintWriter - -trait JpResult { - def isError: Boolean - def value: Any - def show(): Unit -} - -trait Javap { - def loader: ScalaClassLoader - def printWriter: PrintWriter - def apply(args: Seq[String]): List[JpResult] - def tryFile(path: String): Option[Array[Byte]] - def tryClass(path: String): Array[Byte] -} - -object NoJavap extends Javap { - def loader: ScalaClassLoader = getClass.getClassLoader - def printWriter: PrintWriter = new PrintWriter(System.err, true) - def apply(args: Seq[String]): List[JpResult] = Nil - def tryFile(path: String): Option[Array[Byte]] = None - def tryClass(path: String): Array[Byte] = Array() -} diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index 5526660509eb..8e5b1e0a5c1f 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -7,14 +7,17 @@ package scala package tools package util +import java.net.URL import scala.tools.reflect.WrappedProperties.AccessControl -import scala.tools.nsc.{ Settings } -import scala.tools.nsc.util.{ ClassPath, JavaClassPath } +import scala.tools.nsc.Settings +import scala.tools.nsc.util.{ ClassFileLookup, ClassPath, JavaClassPath } import scala.reflect.io.{ File, Directory, Path, AbstractFile } import scala.reflect.runtime.ReflectionUtils import ClassPath.{ JavaContext, DefaultJavaContext, join, split } import PartialFunction.condOpt import scala.language.postfixOps +import scala.tools.nsc.classpath.{ AggregateFlatClassPath, ClassPathFactory, FlatClassPath, FlatClassPathFactory } +import scala.tools.nsc.settings.ClassPathRepresentationType // Loosely based on the draft specification at: // https://wiki.scala-lang.org/display/SIW/Classpath @@ -48,9 +51,8 @@ object PathResolver { /** Values found solely by inspecting environment or property variables. */ object Environment { - private def searchForBootClasspath = ( + private def searchForBootClasspath = systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse "" - ) /** Environment variables which java pays attention to so it * seems we do as well. @@ -104,7 +106,7 @@ object PathResolver { else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path else "" - // XXX It must be time for someone to figure out what all these things + // TODO It must be time for someone to figure out what all these things // are intended to do. This is disabled here because it was causing all // the scala jars to end up on the classpath twice: one on the boot // classpath as set up by the runner (or regular classpath under -nobootcp) @@ -170,39 +172,48 @@ object PathResolver { !ReflectionUtils.scalacShouldntLoadClassfile(name) } - // called from scalap + @deprecated("This method is no longer used be scalap and will be deleted", "2.11.5") def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = { val s = new Settings() s.classpath.value = path - new PathResolver(s, context) result + new PathResolver(s, context).result } /** With no arguments, show the interesting values in Environment and Defaults. * If there are arguments, show those in Calculated as if those options had been * given to a scala runner. */ - def main(args: Array[String]): Unit = { + def main(args: Array[String]): Unit = if (args.isEmpty) { println(Environment) println(Defaults) - } - else { + } else { val settings = new Settings() val rest = settings.processArguments(args.toList, processAll = false)._2 - val pr = new PathResolver(settings) - println(" COMMAND: 'scala %s'".format(args.mkString(" "))) + val pr = PathResolverFactory.create(settings) + println("COMMAND: 'scala %s'".format(args.mkString(" "))) println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) - pr.result.show() + + pr.result match { + case cp: JavaClassPath => + cp.show() + case cp: AggregateFlatClassPath => + println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") + } } - } } -class PathResolver(settings: Settings, context: JavaContext) { - import PathResolver.{ Defaults, Environment, AsLines, MkLines, ppcp } +trait PathResolverResult { + def result: ClassFileLookup[AbstractFile] - def this(settings: Settings) = this(settings, - if (settings.YnoLoadImplClass) PathResolver.NoImplClassJavaContext - else DefaultJavaContext) + def resultAsURLs: Seq[URL] = result.asURLs +} + +abstract class PathResolverBase[BaseClassPathType <: ClassFileLookup[AbstractFile], ResultClassPathType <: BaseClassPathType] +(settings: Settings, classPathFactory: ClassPathFactory[BaseClassPathType]) + extends PathResolverResult { + + import PathResolver.{ AsLines, Defaults, ppcp } private def cmdLineOrElse(name: String, alt: String) = { (commandLineFor(name) match { @@ -232,6 +243,7 @@ class PathResolver(settings: Settings, context: JavaContext) { def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else "" def scalaBootClassPath = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath) def scalaExtDirs = cmdLineOrElse("extdirs", Defaults.scalaExtDirs) + /** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as: * [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect * [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg) @@ -250,16 +262,14 @@ class PathResolver(settings: Settings, context: JavaContext) { * - Otherwise, if CLASSPATH is set, it is that * - If neither of those, then "." is used. */ - def userClassPath = ( - if (!settings.classpath.isDefault) - settings.classpath.value + def userClassPath = + if (!settings.classpath.isDefault) settings.classpath.value else sys.env.getOrElse("CLASSPATH", ".") - ) - import context._ + import classPathFactory._ // Assemble the elements! - def basis = List[Traversable[ClassPath[AbstractFile]]]( + def basis = List[Traversable[BaseClassPathType]]( classesInPath(javaBootClassPath), // 1. The Java bootstrap class path. contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path. classesInExpandedPath(javaUserClassPath), // 3. The Java application class path. @@ -278,7 +288,7 @@ class PathResolver(settings: Settings, context: JavaContext) { | javaBootClassPath = ${ppcp(javaBootClassPath)} | javaExtDirs = ${ppcp(javaExtDirs)} | javaUserClassPath = ${ppcp(javaUserClassPath)} - | useJavaClassPath = $useJavaClassPath + | useJavaClassPath = $useJavaClassPath | scalaBootClassPath = ${ppcp(scalaBootClassPath)} | scalaExtDirs = ${ppcp(scalaExtDirs)} | userClassPath = ${ppcp(userClassPath)} @@ -288,8 +298,10 @@ class PathResolver(settings: Settings, context: JavaContext) { def containers = Calculated.containers - lazy val result = { - val cp = new JavaClassPath(containers.toIndexedSeq, context) + import PathResolver.MkLines + + def result: ResultClassPathType = { + val cp = computeResult() if (settings.Ylogcp) { Console print f"Classpath built from ${settings.toConciseString} %n" Console print s"Defaults: ${PathResolver.Defaults}" @@ -301,5 +313,37 @@ class PathResolver(settings: Settings, context: JavaContext) { cp } - def asURLs = result.asURLs + @deprecated("Use resultAsURLs instead of this one", "2.11.5") + def asURLs: List[URL] = resultAsURLs.toList + + protected def computeResult(): ResultClassPathType +} + +class PathResolver(settings: Settings, context: JavaContext) + extends PathResolverBase[ClassPath[AbstractFile], JavaClassPath](settings, context) { + + def this(settings: Settings) = + this(settings, + if (settings.YnoLoadImplClass) PathResolver.NoImplClassJavaContext + else DefaultJavaContext) + + override protected def computeResult(): JavaClassPath = + new JavaClassPath(containers.toIndexedSeq, context) +} + +class FlatClassPathResolver(settings: Settings, flatClassPathFactory: ClassPathFactory[FlatClassPath]) + extends PathResolverBase[FlatClassPath, AggregateFlatClassPath](settings, flatClassPathFactory) { + + def this(settings: Settings) = this(settings, new FlatClassPathFactory(settings)) + + override protected def computeResult(): AggregateFlatClassPath = AggregateFlatClassPath(containers.toIndexedSeq) +} + +object PathResolverFactory { + + def create(settings: Settings): PathResolverResult = + settings.YclasspathImpl.value match { + case ClassPathRepresentationType.Flat => new FlatClassPathResolver(settings) + case ClassPathRepresentationType.Recursive => new PathResolver(settings) + } } diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala index 1d39a59cf4d6..7858bf06581b 100644 --- a/src/compiler/scala/tools/util/SocketServer.scala +++ b/src/compiler/scala/tools/util/SocketServer.scala @@ -28,12 +28,12 @@ trait CompileOutputCommon { * @author Martin Odersky * @version 1.0 */ -abstract class SocketServer extends CompileOutputCommon { +abstract class SocketServer(fixPort: Int = 0) extends CompileOutputCommon { def shutdown: Boolean def session(): Unit def timeout(): Unit = () // called after a timeout is detected for subclasses to cleanup // a hook for subclasses - protected def createServerSocket(): ServerSocket = new ServerSocket(0) + protected def createServerSocket(): ServerSocket = new ServerSocket(fixPort) var in: BufferedReader = _ var out: PrintWriter = _ diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath index c2aab19f1890..7e2f119193fc 100644 --- a/src/eclipse/partest/.classpath +++ b/src/eclipse/partest/.classpath @@ -4,11 +4,11 @@ - - + + - - + + diff --git a/src/eclipse/repl/.classpath b/src/eclipse/repl/.classpath index 601a231aeb2e..cbaabb9af1af 100644 --- a/src/eclipse/repl/.classpath +++ b/src/eclipse/repl/.classpath @@ -1,11 +1,10 @@ - - - - - - - - + + + + + + + diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath index c8f0e89b8a8c..ee6427176a79 100644 --- a/src/eclipse/scaladoc/.classpath +++ b/src/eclipse/scaladoc/.classpath @@ -6,8 +6,8 @@ - - - + + + diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java index 657850415586..9bd378c61c26 100644 --- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java +++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java @@ -23,6 +23,7 @@ * @since 1.8 * @author Doug Lea */ +@Deprecated /*public*/ abstract class CountedCompleter extends ForkJoinTask { private static final long serialVersionUID = 5232453752276485070L; @@ -471,6 +472,7 @@ private static sun.misc.Unsafe getUnsafe() { * @since 1.7 * @author Doug Lea */ +@Deprecated public class ForkJoinPool extends AbstractExecutorService { /* @@ -3578,6 +3580,7 @@ static void quiesceCommonPool() { * } * }} */ + @Deprecated public static interface ManagedBlocker { /** * Possibly blocks the current thread, for example waiting for diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java index fd1e132b07ea..b4f5c24ca93f 100644 --- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java +++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java @@ -180,6 +180,7 @@ * @since 1.7 * @author Doug Lea */ +@Deprecated public abstract class ForkJoinTask implements Future, Serializable { /* @@ -391,6 +392,7 @@ private int doInvoke() { * any ForkJoinPool will call helpExpungeStaleExceptions when its * pool becomes isQuiescent. */ + @Deprecated static final class ExceptionNode extends WeakReference> { final Throwable ex; ExceptionNode next; @@ -1330,6 +1332,7 @@ public final boolean compareAndSetForkJoinTaskTag(short e, short tag) { * to be compliant with AbstractExecutorService constraints * when used in ForkJoinPool. */ + @Deprecated static final class AdaptedRunnable extends ForkJoinTask implements RunnableFuture { final Runnable runnable; @@ -1349,6 +1352,7 @@ static final class AdaptedRunnable extends ForkJoinTask /** * Adaptor for Runnables without results */ + @Deprecated static final class AdaptedRunnableAction extends ForkJoinTask implements RunnableFuture { final Runnable runnable; @@ -1366,6 +1370,7 @@ public final void setRawResult(Void v) { } /** * Adaptor for Callables */ + @Deprecated static final class AdaptedCallable extends ForkJoinTask implements RunnableFuture { final Callable callable; diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java index e62fc6eb713e..e00fb5cc4371 100644 --- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java +++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java @@ -20,6 +20,7 @@ * @since 1.7 * @author Doug Lea */ +@Deprecated public class ForkJoinWorkerThread extends Thread { /* * ForkJoinWorkerThreads are managed by ForkJoinPools and perform diff --git a/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java index 07e81b395df5..47d52af89527 100644 --- a/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java +++ b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java @@ -53,6 +53,7 @@ * @author Doug Lea * @param the type of elements held in this collection */ +@Deprecated public class LinkedTransferQueue extends AbstractQueue implements TransferQueue, java.io.Serializable { private static final long serialVersionUID = -3223113410248163686L; @@ -416,6 +417,7 @@ public class LinkedTransferQueue extends AbstractQueue * unnecessary ordering constraints: Writes that are intrinsically * ordered wrt other accesses or CASes use simple relaxed forms. */ + @Deprecated static final class Node { final boolean isData; // false if this is a request node volatile Object item; // initially non-null if isData; CASed to match @@ -789,6 +791,7 @@ private int countOfMode(boolean data) { return count; } + @Deprecated final class Itr implements Iterator { private Node nextNode; // next node to return item for private E nextItem; // the corresponding item diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java index 1e7cdd952dc0..f4a77f0f61dd 100644 --- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java +++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java @@ -133,6 +133,7 @@ * @since 1.7 * @author Doug Lea */ +@Deprecated public abstract class RecursiveAction extends ForkJoinTask { private static final long serialVersionUID = 5232453952276485070L; diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java index d1e15471431c..097b7cda1fe1 100644 --- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java +++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java @@ -36,6 +36,7 @@ * @since 1.7 * @author Doug Lea */ +@Deprecated public abstract class RecursiveTask extends ForkJoinTask { private static final long serialVersionUID = 5232453952276485270L; diff --git a/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java index 19237c909289..3ea1af66bc55 100644 --- a/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java +++ b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java @@ -32,6 +32,7 @@ * @since 1.7 * @author Doug Lea */ +@Deprecated public class ThreadLocalRandom extends Random { // same constants as Random, but must be redeclared because private private static final long multiplier = 0x5DEECE66DL; @@ -80,6 +81,7 @@ protected ThreadLocalRandom initialValue() { * * @return the current thread's {@code ThreadLocalRandom} */ + @Deprecated public static ThreadLocalRandom current() { return localRandom.get(); } diff --git a/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java index 7d149c7ae541..4fcd8ea601e9 100644 --- a/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java +++ b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java @@ -37,6 +37,7 @@ * @author Doug Lea * @param the type of elements held in this collection */ +@Deprecated public interface TransferQueue extends BlockingQueue { /** * Transfers the element to a waiting consumer immediately, if possible. diff --git a/src/forkjoin/scala/concurrent/util/Unsafe.java b/src/forkjoin/scala/concurrent/util/Unsafe.java index ef893c94d979..d82e4bbdd510 100644 --- a/src/forkjoin/scala/concurrent/util/Unsafe.java +++ b/src/forkjoin/scala/concurrent/util/Unsafe.java @@ -7,14 +7,12 @@ \* */ package scala.concurrent.util; - - - import java.lang.reflect.Field; - +@Deprecated public final class Unsafe { + @Deprecated public final static sun.misc.Unsafe instance; static { try { diff --git a/src/intellij-14/README b/src/intellij-14/README new file mode 100644 index 000000000000..310a766a2088 --- /dev/null +++ b/src/intellij-14/README @@ -0,0 +1,12 @@ +Use the latest IntelliJ IDEA release and install the Scala plugin from within the IDE. + +Compilation withing IDEA is performed in "-Dlocker.skip=1" mode: the sources are built +directly using the STARR compiler. + +The following steps are required to use IntelliJ IDEA on Scala trunk + - Run "ant init". This will download some JARs from to ./build/deps, which are + included in IntelliJ's classpath. + - Run src/intellij-14/setup.sh + - Open ./src/intellij-14/scala.ipr in IntelliJ + - File, Project Settings, Project, SDK. Create an SDK entry named "1.6" containing the + Java 1.6 SDK diff --git a/src/intellij-14/actors.iml.SAMPLE b/src/intellij-14/actors.iml.SAMPLE new file mode 100644 index 000000000000..3da7a5f77773 --- /dev/null +++ b/src/intellij-14/actors.iml.SAMPLE @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/asm.iml.SAMPLE b/src/intellij-14/asm.iml.SAMPLE new file mode 100644 index 000000000000..9b2fd58ce7dc --- /dev/null +++ b/src/intellij-14/asm.iml.SAMPLE @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/compiler.iml.SAMPLE b/src/intellij-14/compiler.iml.SAMPLE new file mode 100644 index 000000000000..858ca2f2c2e4 --- /dev/null +++ b/src/intellij-14/compiler.iml.SAMPLE @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/diff.sh b/src/intellij-14/diff.sh new file mode 100755 index 000000000000..54f9248608fb --- /dev/null +++ b/src/intellij-14/diff.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash +# +# Diffs the SAMPLE files against the working project config. +# +export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )" +for f in "$SCRIPT_DIR"/*.{iml,ipr}; do + echo $f; diff -u $f.SAMPLE $f; +done diff --git a/src/intellij-14/forkjoin.iml.SAMPLE b/src/intellij-14/forkjoin.iml.SAMPLE new file mode 100644 index 000000000000..42507b2911e2 --- /dev/null +++ b/src/intellij-14/forkjoin.iml.SAMPLE @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/interactive.iml.SAMPLE b/src/intellij-14/interactive.iml.SAMPLE new file mode 100644 index 000000000000..db12a7dc9ba0 --- /dev/null +++ b/src/intellij-14/interactive.iml.SAMPLE @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/library.iml.SAMPLE b/src/intellij-14/library.iml.SAMPLE new file mode 100644 index 000000000000..08cccba4b923 --- /dev/null +++ b/src/intellij-14/library.iml.SAMPLE @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/manual.iml.SAMPLE b/src/intellij-14/manual.iml.SAMPLE new file mode 100644 index 000000000000..2e67076e2812 --- /dev/null +++ b/src/intellij-14/manual.iml.SAMPLE @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/partest-extras.iml.SAMPLE b/src/intellij-14/partest-extras.iml.SAMPLE new file mode 100644 index 000000000000..b3537a949ad5 --- /dev/null +++ b/src/intellij-14/partest-extras.iml.SAMPLE @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/partest-javaagent.iml.SAMPLE b/src/intellij-14/partest-javaagent.iml.SAMPLE new file mode 100644 index 000000000000..3a387aab0fa6 --- /dev/null +++ b/src/intellij-14/partest-javaagent.iml.SAMPLE @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/reflect.iml.SAMPLE b/src/intellij-14/reflect.iml.SAMPLE new file mode 100644 index 000000000000..87da13777b92 --- /dev/null +++ b/src/intellij-14/reflect.iml.SAMPLE @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/repl.iml.SAMPLE b/src/intellij-14/repl.iml.SAMPLE new file mode 100644 index 000000000000..5a7476b1ef34 --- /dev/null +++ b/src/intellij-14/repl.iml.SAMPLE @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/scala.iml.SAMPLE b/src/intellij-14/scala.iml.SAMPLE new file mode 100644 index 000000000000..9e8718dd45a5 --- /dev/null +++ b/src/intellij-14/scala.iml.SAMPLE @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/scala.ipr.SAMPLE b/src/intellij-14/scala.ipr.SAMPLE new file mode 100644 index 000000000000..7c2022f3a9e6 --- /dev/null +++ b/src/intellij-14/scala.ipr.SAMPLE @@ -0,0 +1,261 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + localhost + 5050 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/scaladoc.iml.SAMPLE b/src/intellij-14/scaladoc.iml.SAMPLE new file mode 100644 index 000000000000..1e7621ffedca --- /dev/null +++ b/src/intellij-14/scaladoc.iml.SAMPLE @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/scalap.iml.SAMPLE b/src/intellij-14/scalap.iml.SAMPLE new file mode 100644 index 000000000000..e09b8d11b604 --- /dev/null +++ b/src/intellij-14/scalap.iml.SAMPLE @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/setup.sh b/src/intellij-14/setup.sh new file mode 100755 index 000000000000..ec303778edf7 --- /dev/null +++ b/src/intellij-14/setup.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +# +# Generates IntelliJ IDEA project files based on the checked-in samples. +# + +set -e +export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )" +echo "About to delete .ipr and .iml files and replace with the .SAMPLE files. Press enter to continue or CTRL-C to cancel." +read + +for f in "$SCRIPT_DIR"/*.SAMPLE; do + g=${f%.SAMPLE} + cp $f $g +done diff --git a/src/intellij-14/test-junit.iml.SAMPLE b/src/intellij-14/test-junit.iml.SAMPLE new file mode 100644 index 000000000000..786f02e2e2c2 --- /dev/null +++ b/src/intellij-14/test-junit.iml.SAMPLE @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/test.iml.SAMPLE b/src/intellij-14/test.iml.SAMPLE new file mode 100644 index 000000000000..a384d7226610 --- /dev/null +++ b/src/intellij-14/test.iml.SAMPLE @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij-14/update.sh b/src/intellij-14/update.sh new file mode 100755 index 000000000000..eb6fea782f37 --- /dev/null +++ b/src/intellij-14/update.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash +# +# Updates the .SAMPLE files with the current project files. +# + +set -e +export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )" + +echo "About to create overwrite the .ipr.SAMPLE and .iml.SAMPLE files with the current project files. Press enter to continue or CTRL-C to cancel." +read + +for f in "$SCRIPT_DIR"/*.{iml,ipr}; do + cp $f $f.SAMPLE +done + +for f in "$SCRIPT_DIR"/*.SAMPLE; do + g=${f%.SAMPLE} + if [[ ! -f $g ]]; then + echo "Stale sample file, deleting $f" + rm $f + fi +done diff --git a/src/intellij/README b/src/intellij/README index ade87749cd07..a39691f4f066 100644 --- a/src/intellij/README +++ b/src/intellij/README @@ -1,8 +1,12 @@ Use the latest IntelliJ IDEA release and install the Scala plugin from within the IDE. +Compilation withing IDEA is performed in "-Dlocker.skip=1" mode: the sources are built +directly using the STARR compiler. + The following steps are required to use IntelliJ IDEA on Scala trunk - - compile "locker" using "ant locker.done". This will also download some JARs from - Maven to ./build/deps, which are included in IntelliJ's classpath. + - Run "ant init". This will download some JARs from to ./build/deps, which are + included in IntelliJ's classpath. - Run src/intellij/setup.sh - Open ./src/intellij/scala-lang.ipr in IntelliJ - - File, Project Settings, Project, SDK. Create an SDK entry named "1.6" containing the java 1.6 SDK + - File, Project Settings, Project, SDK. Create an SDK entry named "1.6" containing the + java 1.6 SDK diff --git a/src/intellij/actors.iml.SAMPLE b/src/intellij/actors.iml.SAMPLE index 896c4966ffd2..b15af8b11071 100644 --- a/src/intellij/actors.iml.SAMPLE +++ b/src/intellij/actors.iml.SAMPLE @@ -4,7 +4,8 @@ diff --git a/src/intellij/compiler.iml.SAMPLE b/src/intellij/compiler.iml.SAMPLE index 9fb9cd55eb14..50253000ab3c 100644 --- a/src/intellij/compiler.iml.SAMPLE +++ b/src/intellij/compiler.iml.SAMPLE @@ -4,7 +4,8 @@ @@ -19,8 +20,8 @@ - - + + diff --git a/src/intellij/interactive.iml.SAMPLE b/src/intellij/interactive.iml.SAMPLE index c6c8ebb60618..83178021d311 100644 --- a/src/intellij/interactive.iml.SAMPLE +++ b/src/intellij/interactive.iml.SAMPLE @@ -4,7 +4,8 @@ @@ -23,3 +24,4 @@ + diff --git a/src/intellij/library.iml.SAMPLE b/src/intellij/library.iml.SAMPLE index cac53dff15d2..137ce6eb9c04 100644 --- a/src/intellij/library.iml.SAMPLE +++ b/src/intellij/library.iml.SAMPLE @@ -4,8 +4,9 @@ diff --git a/src/intellij/manual.iml.SAMPLE b/src/intellij/manual.iml.SAMPLE index 3295a4a877a3..8babde73eab3 100644 --- a/src/intellij/manual.iml.SAMPLE +++ b/src/intellij/manual.iml.SAMPLE @@ -4,7 +4,8 @@ @@ -18,8 +19,8 @@ - + diff --git a/src/intellij/partest-extras.iml.SAMPLE b/src/intellij/partest-extras.iml.SAMPLE new file mode 100644 index 000000000000..c2ada43493e8 --- /dev/null +++ b/src/intellij/partest-extras.iml.SAMPLE @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/intellij/partest-javaagent.iml.SAMPLE b/src/intellij/partest-javaagent.iml.SAMPLE new file mode 100644 index 000000000000..e47e0f634940 --- /dev/null +++ b/src/intellij/partest-javaagent.iml.SAMPLE @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/src/intellij/reflect.iml.SAMPLE b/src/intellij/reflect.iml.SAMPLE index 7d1052282683..d206304896d3 100644 --- a/src/intellij/reflect.iml.SAMPLE +++ b/src/intellij/reflect.iml.SAMPLE @@ -4,8 +4,9 @@ @@ -19,7 +20,6 @@ - diff --git a/src/intellij/repl.iml.SAMPLE b/src/intellij/repl.iml.SAMPLE index fc78ffe8c231..83791f4f6e0f 100644 --- a/src/intellij/repl.iml.SAMPLE +++ b/src/intellij/repl.iml.SAMPLE @@ -4,7 +4,8 @@ @@ -23,3 +24,4 @@ + diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE index a0765b3e99d3..0cd3fdae6aa2 100644 --- a/src/intellij/scala-lang.ipr.SAMPLE +++ b/src/intellij/scala-lang.ipr.SAMPLE @@ -1,8 +1,5 @@ - - - - - + + + + + + @@ -41,11 +40,16 @@ - @@ -234,45 +242,46 @@ - - - - - - - - - - - - - + - - + - - - + - - + - - - + + + - + + + + + + + + + + + + + + + + + + diff --git a/src/intellij/scaladoc.iml.SAMPLE b/src/intellij/scaladoc.iml.SAMPLE index 07bea5bf5d70..8f9a0d83443d 100644 --- a/src/intellij/scaladoc.iml.SAMPLE +++ b/src/intellij/scaladoc.iml.SAMPLE @@ -4,7 +4,8 @@ @@ -20,8 +21,8 @@ - - - + + + diff --git a/src/intellij/scalap.iml.SAMPLE b/src/intellij/scalap.iml.SAMPLE index 77eea7c38f8f..27ae45136978 100644 --- a/src/intellij/scalap.iml.SAMPLE +++ b/src/intellij/scalap.iml.SAMPLE @@ -4,7 +4,8 @@ diff --git a/src/intellij/setup.sh b/src/intellij/setup.sh index bd324ba5bd63..ec303778edf7 100755 --- a/src/intellij/setup.sh +++ b/src/intellij/setup.sh @@ -5,19 +5,10 @@ set -e export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )" -export BASE="$( cd "$( dirname "$0" )"/../.. && pwd )" echo "About to delete .ipr and .iml files and replace with the .SAMPLE files. Press enter to continue or CTRL-C to cancel." read -(rm -f *.ipr *.iml 2>/dev/null) -for f in $(ls "$SCRIPT_DIR"/*.SAMPLE); do - NEW_FILE=`echo $f | perl -pe 's/.SAMPLE//'`; - - cp $f $NEW_FILE - - # IntelliJ doesn't process the "compilerOptions" setting for variable - # replacement. If it did, we would just use "$PROJECT_DIR$". Instead, - # we do this replacement ourselves. - perl -pi -e 's/\$BASE_DIR\$/$ENV{"BASE"}/g' $NEW_FILE - echo "Created $NEW_FILE" +for f in "$SCRIPT_DIR"/*.SAMPLE; do + g=${f%.SAMPLE} + cp $f $g done diff --git a/src/intellij/test-junit.iml.SAMPLE b/src/intellij/test-junit.iml.SAMPLE new file mode 100644 index 000000000000..bb51c30a4f6b --- /dev/null +++ b/src/intellij/test-junit.iml.SAMPLE @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/intellij/test-osgi.iml.SAMPLE b/src/intellij/test-osgi.iml.SAMPLE new file mode 100644 index 000000000000..a589aaa0a9ab --- /dev/null +++ b/src/intellij/test-osgi.iml.SAMPLE @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij/test.iml.SAMPLE b/src/intellij/test.iml.SAMPLE index 423be2062c8b..cb4a8568a17e 100644 --- a/src/intellij/test.iml.SAMPLE +++ b/src/intellij/test.iml.SAMPLE @@ -1,21 +1,31 @@ + + + + + + - + + + - - + - - - + + + diff --git a/src/intellij/test/files/neg/virtpatmat_exhaust_big.check b/src/intellij/test/files/neg/virtpatmat_exhaust_big.check new file mode 100644 index 000000000000..fddc85a36295 --- /dev/null +++ b/src/intellij/test/files/neg/virtpatmat_exhaust_big.check @@ -0,0 +1,7 @@ +virtpatmat_exhaust_big.scala:27: warning: match may not be exhaustive. +It would fail on the following input: Z11() + def foo(z: Z) = z match { + ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/src/intellij/test/files/neg/virtpatmat_exhaust_big.flags b/src/intellij/test/files/neg/virtpatmat_exhaust_big.flags new file mode 100644 index 000000000000..b5a874865273 --- /dev/null +++ b/src/intellij/test/files/neg/virtpatmat_exhaust_big.flags @@ -0,0 +1 @@ +-Xfatal-warnings -unchecked diff --git a/src/intellij/test/files/neg/virtpatmat_exhaust_big.scala b/src/intellij/test/files/neg/virtpatmat_exhaust_big.scala new file mode 100644 index 000000000000..dd639eb56ee0 --- /dev/null +++ b/src/intellij/test/files/neg/virtpatmat_exhaust_big.scala @@ -0,0 +1,32 @@ +sealed abstract class Z +object Z { + object Z0 extends Z + case class Z1() extends Z + object Z2 extends Z + case class Z3() extends Z + object Z4 extends Z + case class Z5() extends Z + object Z6 extends Z + case class Z7() extends Z + object Z8 extends Z + case class Z9() extends Z + object Z10 extends Z + case class Z11() extends Z + object Z12 extends Z + case class Z13() extends Z + object Z14 extends Z + case class Z15() extends Z + object Z16 extends Z + case class Z17() extends Z + object Z18 extends Z + case class Z19() extends Z +} + +object Test { + import Z._ + def foo(z: Z) = z match { + case Z0 | Z1() | Z2 | Z3() | Z4 | Z5() | Z6 | Z7() | Z8 | Z9() | + Z10 | Z12 | Z13() | Z14 | Z15() | Z16 | Z17() | Z18 | Z19() + => + } +} diff --git a/src/intellij/test/files/pos/virtpatmat_exhaust_big.scala b/src/intellij/test/files/pos/virtpatmat_exhaust_big.scala new file mode 100644 index 000000000000..41aef3226e41 --- /dev/null +++ b/src/intellij/test/files/pos/virtpatmat_exhaust_big.scala @@ -0,0 +1,34 @@ +sealed abstract class Z +object Z { + object Z0 extends Z + case class Z1() extends Z + object Z2 extends Z + case class Z3() extends Z + object Z4 extends Z + case class Z5() extends Z + object Z6 extends Z + case class Z7() extends Z + object Z8 extends Z + case class Z9() extends Z + object Z10 extends Z + case class Z11() extends Z + object Z12 extends Z + case class Z13() extends Z + object Z14 extends Z + case class Z15() extends Z + object Z16 extends Z + case class Z17() extends Z + object Z18 extends Z + case class Z19() extends Z +} + +// drop any case and it will report an error +object Test { + import Z._ + def foo(z: Z) = z match { + case Z0 | Z1() | Z2 | Z3() | Z4 | Z5() | Z6 | Z7() | Z8 | Z9() | + Z10 | Z11() | Z12 | Z13() | Z14 | Z15() | Z16 | Z17() | Z18 | Z19() + => + } +} +- diff --git a/src/intellij/update.sh b/src/intellij/update.sh new file mode 100755 index 000000000000..eb6fea782f37 --- /dev/null +++ b/src/intellij/update.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash +# +# Updates the .SAMPLE files with the current project files. +# + +set -e +export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )" + +echo "About to create overwrite the .ipr.SAMPLE and .iml.SAMPLE files with the current project files. Press enter to continue or CTRL-C to cancel." +read + +for f in "$SCRIPT_DIR"/*.{iml,ipr}; do + cp $f $f.SAMPLE +done + +for f in "$SCRIPT_DIR"/*.SAMPLE; do + g=${f%.SAMPLE} + if [[ ! -f $g ]]; then + echo "Stale sample file, deleting $f" + rm $f + fi +done diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 95027a26b120..a192dd37383d 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -19,6 +19,8 @@ import scala.annotation.{ elidable, tailrec } import scala.language.implicitConversions import scala.tools.nsc.typechecker.Typers import scala.util.control.Breaks._ +import java.util.concurrent.ConcurrentHashMap +import scala.collection.JavaConverters.mapAsScalaMapConverter /** * This trait allows the IDE to have an instance of the PC that @@ -64,7 +66,9 @@ trait InteractiveAnalyzer extends Analyzer { // that case the definitions that were already attributed as // well as any default parameters of such methods need to be // re-entered in the current scope. - override def enterExistingSym(sym: Symbol): Context = { + // + // Tested in test/files/presentation/t8941b + override def enterExistingSym(sym: Symbol, tree: Tree): Context = { if (sym != null && sym.owner.isTerm) { enterIfNotThere(sym) if (sym.isLazy) @@ -72,8 +76,17 @@ trait InteractiveAnalyzer extends Analyzer { for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment]) defAtt.defaultGetters foreach enterIfNotThere + } else if (sym != null && sym.isClass && sym.isImplicit) { + val owningInfo = sym.owner.info + val existingDerivedSym = owningInfo.decl(sym.name.toTermName).filter(sym => sym.isSynthetic && sym.isMethod) + existingDerivedSym.alternatives foreach (owningInfo.decls.unlink) + val defTree = tree match { + case dd: DocDef => dd.definition // See SI-9011, Scala IDE's presentation compiler incorporates ScalaDocGlobal with InterativeGlobal, so we have to unwrap DocDefs. + case _ => tree + } + enterImplicitWrapper(defTree.asInstanceOf[ClassDef]) } - super.enterExistingSym(sym) + super.enterExistingSym(sym, tree) } override def enterIfNotThere(sym: Symbol) { val scope = context.scope @@ -121,8 +134,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") else NullLogger import log.logreplay - debugLog("logger: " + log.getClass + " writing to " + (new java.io.File(logName)).getAbsolutePath) - debugLog("classpath: "+classPath) + debugLog(s"logger: ${log.getClass} writing to ${(new java.io.File(logName)).getAbsolutePath}") + debugLog(s"classpath: $classPath") private var curTime = System.nanoTime private def timeStep = { @@ -142,8 +155,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") // don't keep the original owner in presentation compiler runs // (the map will grow indefinitely, and the only use case is the backend) override protected def saveOriginalOwner(sym: Symbol) { } - override protected def originalEnclosingMethod(sym: Symbol) = - abort("originalOwner is not kept in presentation compiler runs.") override def forInteractive = true override protected def synchronizeNames = true @@ -162,19 +173,18 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** A map of all loaded files to the rich compilation units that correspond to them. */ - val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with - SynchronizedMap[AbstractFile, RichCompilationUnit] { + val unitOfFile = mapAsScalaMapConverter(new ConcurrentHashMap[AbstractFile, RichCompilationUnit] { override def put(key: AbstractFile, value: RichCompilationUnit) = { val r = super.put(key, value) - if (r.isEmpty) debugLog("added unit for "+key) + if (r == null) debugLog("added unit for "+key) r } - override def remove(key: AbstractFile) = { + override def remove(key: Any) = { val r = super.remove(key) - if (r.nonEmpty) debugLog("removed unit for "+key) + if (r != null) debugLog("removed unit for "+key) r } - } + }).asScala /** A set containing all those files that need to be removed * Units are removed by getUnit, typically once a unit is finished compiled. @@ -517,7 +527,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** The current presentation compiler runner */ @volatile private[interactive] var compileRunner: Thread = newRunnerThread() - /** Check that the currenyly executing thread is the presentation compiler thread. + /** Check that the currently executing thread is the presentation compiler thread. * * Compiler initialization may happen on a different thread (signalled by globalPhase being NoPhase) */ @@ -734,7 +744,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } } - private def reloadSource(source: SourceFile) { + private[interactive] def reloadSource(source: SourceFile) { val unit = new RichCompilationUnit(source) unitOfFile(source.file) = unit toBeRemoved -= source.file @@ -783,7 +793,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } /** A fully attributed tree located at position `pos` */ - private def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match { + private[interactive] def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match { case None => reloadSources(List(pos.source)) try typedTreeAt(pos) @@ -1093,7 +1103,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") val implicitlyAdded = viaView != NoSymbol members.add(sym, pre, implicitlyAdded) { (s, st) => new TypeMember(s, st, - context.isAccessible(if (s.hasGetter) s.getter(s.owner) else s, pre, superAccess && !implicitlyAdded), + context.isAccessible(if (s.hasGetter) s.getterIn(s.owner) else s, pre, superAccess && !implicitlyAdded), inherited, viaView) } @@ -1183,7 +1193,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } } - /** Parses and enters given source file, stroring parse tree in response */ + /** Parses and enters given source file, storing parse tree in response */ private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) { respond(response) { onUnitOf(source) { unit => diff --git a/src/interactive/scala/tools/nsc/interactive/Main.scala b/src/interactive/scala/tools/nsc/interactive/Main.scala index c838606f02b6..7796c656709a 100644 --- a/src/interactive/scala/tools/nsc/interactive/Main.scala +++ b/src/interactive/scala/tools/nsc/interactive/Main.scala @@ -12,7 +12,7 @@ package interactive */ object Main extends nsc.MainClass { override def processSettingsHook(): Boolean = { - if (this.settings.Yidedebug) { + def run(): Unit = { this.settings.Xprintpos.value = true this.settings.Yrangepos.value = true val compiler = new interactive.Global(this.settings, this.reporter) @@ -27,8 +27,9 @@ object Main extends nsc.MainClass { case None => reporter.reset() // Causes other compiler errors to be ignored } askShutdown - false } - else true + super.processSettingsHook() && ( + if (this.settings.Yidedebug) { run() ; false } else true + ) } } diff --git a/src/interactive/scala/tools/nsc/interactive/Pickler.scala b/src/interactive/scala/tools/nsc/interactive/Pickler.scala index 83f3fab9256d..ddc0c8a068f0 100644 --- a/src/interactive/scala/tools/nsc/interactive/Pickler.scala +++ b/src/interactive/scala/tools/nsc/interactive/Pickler.scala @@ -6,7 +6,7 @@ import scala.language.implicitConversions import scala.reflect.ClassTag /** An abstract class for writing and reading Scala objects to and - * from a legible representation. The presesentation follows the following grammar: + * from a legible representation. The representation follows the following grammar: * {{{ * Pickled = `true` | `false` | `null` | NumericLit | StringLit | * Labelled | Pickled `,` Pickled @@ -85,7 +85,7 @@ abstract class Pickler[T] { object Pickler { /** A base class representing unpickler result. It has two subclasses: - * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures, + * `UnpickleSuccess` for successful unpicklings and `UnpickleFailure` for failures, * where a value of the given type `T` could not be unpickled from input. * @tparam T the type of unpickled values in case of success. */ @@ -154,7 +154,7 @@ object Pickler { */ def pkl[T: Pickler] = implicitly[Pickler[T]] - /** A class represenenting `~`-pairs */ + /** A class representing `~`-pairs */ case class ~[+S, +T](fst: S, snd: T) /** A wrapper class to be able to use `~` s an infix method */ diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala index 681204172b4c..4962d80a8b53 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala @@ -4,7 +4,7 @@ import scala.tools.nsc.io.Path /** Common settings for the test. */ private[tests] trait TestSettings { - protected final val TIMEOUT = 10000 // timeout in milliseconds + protected final val TIMEOUT = 30000 // timeout in milliseconds /** The root directory for this test suite, usually the test kind ("test/files/presentation"). */ protected val outDir = Path(Option(System.getProperty("partest.cwd")).getOrElse(".")) diff --git a/src/jline/LICENSE.txt b/src/jline/LICENSE.txt deleted file mode 100644 index 1cdc44c211bd..000000000000 --- a/src/jline/LICENSE.txt +++ /dev/null @@ -1,33 +0,0 @@ -Copyright (c) 2002-2006, Marc Prud'hommeaux -All rights reserved. - -Redistribution and use in source and binary forms, with or -without modification, are permitted provided that the following -conditions are met: - -Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - -Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with -the distribution. - -Neither the name of JLine nor the names of its contributors -may be used to endorse or promote products derived from this -software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, -BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, -OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING -IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED -OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/src/jline/README.md b/src/jline/README.md deleted file mode 100644 index 829476145d4e..000000000000 --- a/src/jline/README.md +++ /dev/null @@ -1,24 +0,0 @@ -Description ------------ - -JLine 2.x - -License -------- - -BSD - -Building --------- - -### Requirements - -* SBT -* Java 5+ - -This is a fork with scala specific modifications. -The original repository was: git://github.com/jdillon/jline2.git - -You can now build with sbt: - - sbt update proguard diff --git a/src/jline/build.sbt b/src/jline/build.sbt deleted file mode 100644 index 873f7574f129..000000000000 --- a/src/jline/build.sbt +++ /dev/null @@ -1,49 +0,0 @@ -seq(ProguardPlugin.proguardSettings :_*) - -name := "jline" - -organization := "org.scala-lang" - -version := "2.11.0-SNAPSHOT" - -scalaVersion := "2.10.1" - -// Only need these because of weird testing jline issues. -retrieveManaged := true - -parallelExecution in Test := false - -libraryDependencies ++= Seq( - "org.fusesource.jansi" % "jansi" % "1.10", - "com.novocode" % "junit-interface" % "0.9" % "test->default" -) - -javacOptions ++= Seq("-source", "1.5", "-target", "1.5") - -proguardOptions ++= Seq( - "-dontshrink", - "-keep class *", - "-keepdirectories" -) - -proguardInJars := Nil - -makeInJarFilter ~= { prevFilter => - val jansiFilter = List( - "!META-INF/MANIFEST.MF", - "org/fusesource/hawtjni/runtime", - "org/fusesource/hawtjni/runtime/Callback.class", - "org/fusesource/hawtjni/runtime/Library.class", - "!org/fusesource/hawtjni/**", - "!META-INF/maven/org.fusesource.hawtjni", - "!META-INF/maven/org.fusesource.jansi", - "!META-INF/maven/org.fusesource.hawtjni/**", - "!META-INF/maven/org.fusesource.jansi/**" - ).mkString(",") - // In sbt 0.9.8 the scala-library.jar line was not necessary, - // but in 0.9.9 it started showing up here. Who knows. - file => - if (file startsWith "jansi-") jansiFilter - else if (file == "scala-library.jar") "!**" - else prevFilter(file) -} diff --git a/src/jline/manual-test.sh b/src/jline/manual-test.sh deleted file mode 100755 index 744e1756e8aa..000000000000 --- a/src/jline/manual-test.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env bash -# -# Apparently the jline bundled with sbt interferes with testing some -# changes: for instance after changing the keybindings I kept seeing -# failures until I realized what was happening and bypassed sbt, like this. -CP=lib_managed/jars/com.novocode/junit-interface/junit-interface-0.9.jar:lib_managed/jars/junit/junit-dep/junit-dep-4.8.2.jar:lib_managed/jars/org.fusesource.jansi/jansi/jansi-1.10.jar:lib_managed/jars/org.hamcrest/hamcrest-core/hamcrest-core-1.1.jar:lib_managed/jars/org.scala-tools.testing/test-interface/test-interface-0.5.jar:target/scala-2.10/test-classes:target/scala-2.10/jline_2.10-2.11.0-SNAPSHOT.min.jar - -sbt proguard -java -cp $CP org.junit.runner.JUnitCore scala.tools.jline.console.EditLineTest diff --git a/src/jline/project/build.properties b/src/jline/project/build.properties deleted file mode 100644 index 9b860e23c51a..000000000000 --- a/src/jline/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=0.12.3 diff --git a/src/jline/project/plugins.sbt b/src/jline/project/plugins.sbt deleted file mode 100644 index 9c13de92d8ed..000000000000 --- a/src/jline/project/plugins.sbt +++ /dev/null @@ -1,3 +0,0 @@ -resolvers += Resolver.url("sbt-plugin-releases-scalasbt", url("http://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns) - -addSbtPlugin("org.scala-sbt" % "xsbt-proguard-plugin" % "0.1.3") diff --git a/src/jline/src/main/java/scala/tools/jline/AnsiWindowsTerminal.java b/src/jline/src/main/java/scala/tools/jline/AnsiWindowsTerminal.java deleted file mode 100644 index 94697137d307..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/AnsiWindowsTerminal.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (C) 2009 the original author(s). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * MODIFICATIONS: methods to deal with wrapping the output stream. - */ - -package scala.tools.jline; - -import org.fusesource.jansi.AnsiConsole; -import org.fusesource.jansi.AnsiOutputStream; -import org.fusesource.jansi.WindowsAnsiOutputStream; - -import java.io.ByteArrayOutputStream; -import java.io.OutputStream; - -/** - * ANSI-supported {@link WindowsTerminal}. - * - * @since 2.0 - */ -public class AnsiWindowsTerminal - extends WindowsTerminal -{ - private final boolean ansiSupported = detectAnsiSupport(); - - @Override - public OutputStream wrapOutIfNeeded(OutputStream out) { - return wrapOutputStream(out); - } - - /** - * Returns an ansi output stream handler. We return whatever was - * passed if we determine we cannot handle ansi based on Kernel32 calls. - * - * @return an @{link AltWindowAnsiOutputStream} instance or the passed - * stream. - */ - private static OutputStream wrapOutputStream(final OutputStream stream) { - String os = System.getProperty("os.name"); - if( os.startsWith("Windows") ) { - // On windows we know the console does not interpret ANSI codes.. - try { - return new WindowsAnsiOutputStream(stream); - } catch (Throwable ignore) { - // this happens when JNA is not in the path.. or - // this happens when the stdout is being redirected to a file. - } - // Use the ANSIOutputStream to strip out the ANSI escape sequences. - return new AnsiOutputStream(stream); - } - return stream; - } - - private static boolean detectAnsiSupport() { - OutputStream out = AnsiConsole.wrapOutputStream(new ByteArrayOutputStream()); - try { - out.close(); - } - catch (Exception e) { - // ignore; - } - return out instanceof WindowsAnsiOutputStream; - } - - public AnsiWindowsTerminal() throws Exception { - super(); - } - - @Override - public boolean isAnsiSupported() { - return ansiSupported; - } - - @Override - public boolean hasWeirdWrap() { - return false; - } -} diff --git a/src/jline/src/main/java/scala/tools/jline/NoInterruptUnixTerminal.java b/src/jline/src/main/java/scala/tools/jline/NoInterruptUnixTerminal.java deleted file mode 100644 index ef7cf23c4a3a..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/NoInterruptUnixTerminal.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (C) 2009 the original author(s). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package scala.tools.jline; - -// Based on Apache Karaf impl - -/** - * Non-interruptable (via CTRL-C) {@link UnixTerminal}. - * - * @since 2.0 - */ -public class NoInterruptUnixTerminal - extends UnixTerminal -{ - public NoInterruptUnixTerminal() throws Exception { - super(); - } - - @Override - public void init() throws Exception { - super.init(); - getSettings().set("intr undef"); - } - - @Override - public void restore() throws Exception { - getSettings().set("intr ^C"); - super.restore(); - } -} diff --git a/src/jline/src/main/java/scala/tools/jline/Terminal.java b/src/jline/src/main/java/scala/tools/jline/Terminal.java deleted file mode 100644 index 79611c244d0b..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/Terminal.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -/** - * Representation of the input terminal for a platform. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @since 2.0 - */ -public interface Terminal -{ - void init() throws Exception; - - void restore() throws Exception; - - void reset() throws Exception; - - boolean isSupported(); - - int getWidth(); - - int getHeight(); - - boolean isAnsiSupported(); - - /** - * When ANSI is not natively handled, the output will have to be wrapped. - */ - OutputStream wrapOutIfNeeded(OutputStream out); - - /** - * For terminals that don't wrap when character is written in last column, - * only when the next character is written. - * These are the ones that have 'am' and 'xn' termcap attributes (xterm and - * rxvt flavors falls under that category) - */ - boolean hasWeirdWrap(); - - boolean isEchoEnabled(); - - void setEchoEnabled(boolean enabled); - - int readCharacter(InputStream in) throws IOException; - - int readVirtualKey(InputStream in) throws IOException; - - InputStream getDefaultBindings(); -} diff --git a/src/jline/src/main/java/scala/tools/jline/TerminalFactory.java b/src/jline/src/main/java/scala/tools/jline/TerminalFactory.java deleted file mode 100644 index 95b7c28bd555..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/TerminalFactory.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline; - -import scala.tools.jline.internal.Configuration; -import scala.tools.jline.internal.Log; - -import java.text.MessageFormat; -import java.util.HashMap; -import java.util.Map; - -/** - * Creates terminal instances. - * - * @author Jason Dillon - * @since 2.0 - */ -public class TerminalFactory -{ - public static final String JLINE_TERMINAL = "jline.terminal"; - - public static final String AUTO = "auto"; - - public static final String UNIX = "unix"; - - public static final String WIN = "win"; - - public static final String WINDOWS = "windows"; - - public static final String NONE = "none"; - - public static final String OFF = "off"; - - public static final String FALSE = "false"; - - private static final InheritableThreadLocal holder = new InheritableThreadLocal(); - - public static synchronized Terminal create() { - if (Log.TRACE) { - //noinspection ThrowableInstanceNeverThrown - Log.trace(new Throwable("CREATE MARKER")); - } - - String type = Configuration.getString(JLINE_TERMINAL); - if (type == null) { - type = AUTO; - } - - Log.debug("Creating terminal; type=", type); - - Terminal t; - try { - String tmp = type.toLowerCase(); - - if (tmp.equals(UNIX)) { - t = getFlavor(Flavor.UNIX); - } - else if (tmp.equals(WIN) | tmp.equals(WINDOWS)) { - t = getFlavor(Flavor.WINDOWS); - } - else if (tmp.equals(NONE) || tmp.equals(OFF) || tmp.equals(FALSE)) { - t = new UnsupportedTerminal(); - } - else { - if (tmp.equals(AUTO)) { - String os = Configuration.getOsName(); - Flavor flavor = Flavor.UNIX; - if (os.contains(WINDOWS)) { - flavor = Flavor.WINDOWS; - } - t = getFlavor(flavor); - } - else { - try { - t = (Terminal) Thread.currentThread().getContextClassLoader().loadClass(type).newInstance(); - } - catch (Exception e) { - throw new IllegalArgumentException(MessageFormat.format("Invalid terminal type: {0}", type), e); - } - } - } - } - catch (Exception e) { - Log.error("Failed to construct terminal; falling back to unsupported", e); - t = new UnsupportedTerminal(); - } - - Log.debug("Created Terminal: ", t); - - try { - t.init(); - } - catch (Exception e) { - Log.error("Terminal initialization failed; falling back to unsupported", e); - return new UnsupportedTerminal(); - } - - return t; - } - - public static synchronized void reset() { - holder.remove(); - } - - public static synchronized void resetIf(final Terminal t) { - if (holder.get() == t) { - reset(); - } - } - - public static enum Type - { - AUTO, - WINDOWS, - UNIX, - NONE - } - - public static synchronized void configure(final String type) { - assert type != null; - System.setProperty(JLINE_TERMINAL, type); - } - - public static synchronized void configure(final Type type) { - assert type != null; - configure(type.name().toLowerCase()); - } - - // - // Flavor Support - // - - public static enum Flavor - { - WINDOWS, - UNIX - } - - private static final Map> FLAVORS = new HashMap>(); - - static { - registerFlavor(Flavor.WINDOWS, AnsiWindowsTerminal.class); - registerFlavor(Flavor.UNIX, UnixTerminal.class); - } - - public static synchronized Terminal get() { - Terminal t = holder.get(); - if (t == null) { - t = create(); - holder.set(t); - } - return t; - } - - public static Terminal getFlavor(final Flavor flavor) throws Exception { - Class type = FLAVORS.get(flavor); - if (type != null) { - return type.newInstance(); - } - - throw new InternalError(); - } - - public static void registerFlavor(final Flavor flavor, final Class type) { - FLAVORS.put(flavor, type); - } - -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/TerminalSupport.java b/src/jline/src/main/java/scala/tools/jline/TerminalSupport.java deleted file mode 100644 index 1ca12cb73f9c..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/TerminalSupport.java +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline; - -import scala.tools.jline.internal.Log; -import scala.tools.jline.internal.Configuration; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -/** - * Provides support for {@link Terminal} instances. - * - * @author Jason Dillon - * @since 2.0 - */ -public abstract class TerminalSupport - implements Terminal -{ - public static String DEFAULT_KEYBINDINGS_PROPERTIES = "keybindings.properties"; - - public static final String JLINE_SHUTDOWNHOOK = "jline.shutdownhook"; - - public static final int DEFAULT_WIDTH = 80; - - public static final int DEFAULT_HEIGHT = 24; - - private Thread shutdownHook; - - private boolean shutdownHookEnabled; - - private boolean supported; - - private boolean echoEnabled; - - private boolean ansiSupported; - - protected TerminalSupport(final boolean supported) { - this.supported = supported; - this.shutdownHookEnabled = Configuration.getBoolean(JLINE_SHUTDOWNHOOK, false); - } - - public void init() throws Exception { - installShutdownHook(new RestoreHook()); - } - - public void restore() throws Exception { - TerminalFactory.resetIf(this); - removeShutdownHook(); - } - - public void reset() throws Exception { - restore(); - init(); - } - - // Shutdown hooks causes classloader leakage in sbt, - // so they are only installed if -Djline.shutdownhook is true. - protected void installShutdownHook(final Thread hook) { - if (!shutdownHookEnabled) { - Log.debug("Not install shutdown hook " + hook + " because they are disabled."); - return; - } - - assert hook != null; - - if (shutdownHook != null) { - throw new IllegalStateException("Shutdown hook already installed"); - } - - try { - Runtime.getRuntime().addShutdownHook(hook); - shutdownHook = hook; - } - catch (AbstractMethodError e) { - // JDK 1.3+ only method. Bummer. - Log.trace("Failed to register shutdown hook: ", e); - } - } - - protected void removeShutdownHook() { - if (!shutdownHookEnabled) - return; - - if (shutdownHook != null) { - try { - Runtime.getRuntime().removeShutdownHook(shutdownHook); - } - catch (AbstractMethodError e) { - // JDK 1.3+ only method. Bummer. - Log.trace("Failed to remove shutdown hook: ", e); - } - catch (IllegalStateException e) { - // The VM is shutting down, not a big deal; ignore - } - shutdownHook = null; - } - } - - public final boolean isSupported() { - return supported; - } - - public synchronized boolean isAnsiSupported() { - return ansiSupported; - } - - protected synchronized void setAnsiSupported(final boolean supported) { - this.ansiSupported = supported; - Log.debug("Ansi supported: ", supported); - } - - /** - * Subclass to change behavior if needed. - * @return the passed out - */ - public OutputStream wrapOutIfNeeded(OutputStream out) { - return out; - } - - /** - * Defaults to true which was the behaviour before this method was added. - */ - public boolean hasWeirdWrap() { - return true; - } - - public int getWidth() { - return DEFAULT_WIDTH; - } - - public int getHeight() { - return DEFAULT_HEIGHT; - } - - public synchronized boolean isEchoEnabled() { - return echoEnabled; - } - - public synchronized void setEchoEnabled(final boolean enabled) { - this.echoEnabled = enabled; - Log.debug("Echo enabled: ", enabled); - } - - public int readCharacter(final InputStream in) throws IOException { - return in.read(); - } - - public int readVirtualKey(final InputStream in) throws IOException { - return readCharacter(in); - } - - public InputStream getDefaultBindings() { - return TerminalSupport.class.getResourceAsStream(DEFAULT_KEYBINDINGS_PROPERTIES); - } - - // - // RestoreHook - // - - protected class RestoreHook - extends Thread - { - public void start() { - try { - restore(); - } - catch (Exception e) { - Log.trace("Failed to restore: ", e); - } - } - } -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java b/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java deleted file mode 100644 index 94a1b98c0d09..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java +++ /dev/null @@ -1,248 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline; - -import scala.tools.jline.console.Key; -import scala.tools.jline.internal.Configuration; -import scala.tools.jline.internal.Log; -import scala.tools.jline.internal.ReplayPrefixOneCharInputStream; -import scala.tools.jline.internal.TerminalLineSettings; - -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.HashMap; -import java.util.Map; - -import static scala.tools.jline.UnixTerminal.UnixKey.*; -import static scala.tools.jline.console.Key.*; - -/** - * Terminal that is used for unix platforms. Terminal initialization - * is handled by issuing the stty command against the - * /dev/tty file to disable character echoing and enable - * character input. All known unix systems (including - * Linux and Macintosh OS X) support the stty), so this - * implementation should work for an reasonable POSIX system. - * - * @author Marc Prud'hommeaux - * @author Dale Kemp - * @author Jason Dillon - * @author Jean-Baptiste Onofré - * @since 2.0 - */ -public class UnixTerminal - extends TerminalSupport -{ - private final TerminalLineSettings settings = new TerminalLineSettings(); - - private final ReplayPrefixOneCharInputStream replayStream; - - private final InputStreamReader replayReader; - - public UnixTerminal() throws Exception { - super(true); - - this.replayStream = new ReplayPrefixOneCharInputStream(Configuration.getInputEncoding()); - this.replayReader = new InputStreamReader(replayStream, replayStream.getEncoding()); - } - - protected TerminalLineSettings getSettings() { - return settings; - } - - /** - * Remove line-buffered input by invoking "stty -icanon min 1" - * against the current terminal. - */ - @Override - public void init() throws Exception { - super.init(); - - setAnsiSupported(true); - - // set the console to be character-buffered instead of line-buffered - settings.set("-icanon min 1"); - - setEchoEnabled(false); - } - - /** - * Restore the original terminal configuration, which can be used when - * shutting down the console reader. The ConsoleReader cannot be - * used after calling this method. - */ - @Override - public void restore() throws Exception { - settings.restore(); - super.restore(); - // print a newline after the terminal exits. - // this should probably be a configurable. - System.out.println(); - } - - /** - * Returns the value of stty columns param. - */ - @Override - public int getWidth() { - int w = settings.getProperty("columns"); - return w < 1 ? DEFAULT_WIDTH : w; - } - - /** - * Returns the value of stty rows>/tt> param. - */ - @Override - public int getHeight() { - int h = settings.getProperty("rows"); - return h < 1 ? DEFAULT_HEIGHT : h; - } - - @Override - public synchronized void setEchoEnabled(final boolean enabled) { - try { - if (enabled) { - settings.set("echo"); - } - else { - settings.set("-echo"); - } - super.setEchoEnabled(enabled); - } - catch (Exception e) { - Log.error("Failed to ", (enabled ? "enable" : "disable"), " echo: ", e); - } - } - - @Override - public int readVirtualKey(final InputStream in) throws IOException { - int c = readCharacter(in); - - if (Key.valueOf(c) == DELETE && settings.getProperty("erase") == DELETE.code) { - c = BACKSPACE.code; - } - - UnixKey key = UnixKey.valueOf(c); - - // in Unix terminals, arrow keys are represented by a sequence of 3 characters. E.g., the up arrow key yields 27, 91, 68 - if (key == ARROW_START) { - // also the escape key is 27 thats why we read until we have something different than 27 - // this is a bugfix, because otherwise pressing escape and than an arrow key was an undefined state - while (key == ARROW_START) { - c = readCharacter(in); - key = UnixKey.valueOf(c); - } - - if (key == ARROW_PREFIX || key == O_PREFIX) { - c = readCharacter(in); - key = UnixKey.valueOf(c); - - if (key == ARROW_UP) { - return CTRL_P.code; - } - else if (key == ARROW_DOWN) { - return CTRL_N.code; - } - else if (key == ARROW_LEFT) { - return CTRL_B.code; - } - else if (key == ARROW_RIGHT) { - return CTRL_F.code; - } - else if (key == HOME_CODE) { - return CTRL_A.code; - } - else if (key == END_CODE) { - return CTRL_E.code; - } - else if (key == DEL_THIRD) { - readCharacter(in); // read 4th & ignore - return DELETE.code; - } - } - else if (c == 'b') { // alt-b: go back a word - return CTRL_O.code; // PREV_WORD - } - else if (c == 'f') { // alt-f: go forward a word - return CTRL_T.code; // NEXT_WORD - } - else if (key == DEL) { // alt-backspace: delete previous word - return CTRL_W.code; // DELETE_PREV_WORD - } - else if (c == 'd') { // alt-d: delete next word - return CTRL_X.code; // DELETE_NEXT_WORD - } - - } - - // handle unicode characters, thanks for a patch from amyi@inf.ed.ac.uk - if (c > 128) { - // handle unicode characters longer than 2 bytes, - // thanks to Marc.Herbert@continuent.com - replayStream.setInput(c, in); - // replayReader = new InputStreamReader(replayStream, encoding); - c = replayReader.read(); - } - - return c; - } - - /** - * Unix keys. - */ - public static enum UnixKey - { - ARROW_START(27), - - ARROW_PREFIX(91), - - ARROW_LEFT(68), - - ARROW_RIGHT(67), - - ARROW_UP(65), - - ARROW_DOWN(66), - - O_PREFIX(79), - - HOME_CODE(72), - - END_CODE(70), - - DEL_THIRD(51), - - DEL_SECOND(126), - - DEL(127); - - - public final short code; - - UnixKey(final int code) { - this.code = (short) code; - } - - private static final Map codes; - - static { - Map map = new HashMap(); - - for (UnixKey key : UnixKey.values()) { - map.put(key.code, key); - } - - codes = map; - } - - public static UnixKey valueOf(final int code) { - return codes.get((short) code); - } - } -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/UnsupportedTerminal.java b/src/jline/src/main/java/scala/tools/jline/UnsupportedTerminal.java deleted file mode 100644 index 04fe4f7f1612..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/UnsupportedTerminal.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline; - -/** - * An unsupported terminal. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @since 2.0 - */ -public class UnsupportedTerminal - extends TerminalSupport -{ - public UnsupportedTerminal() { - super(false); - setAnsiSupported(false); - setEchoEnabled(true); - } -} diff --git a/src/jline/src/main/java/scala/tools/jline/WindowsTerminal.java b/src/jline/src/main/java/scala/tools/jline/WindowsTerminal.java deleted file mode 100644 index 4c70155f59fb..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/WindowsTerminal.java +++ /dev/null @@ -1,468 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline; - -import java.io.FileDescriptor; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.HashMap; -import java.util.Map; - -import scala.tools.jline.internal.Configuration; -import org.fusesource.jansi.internal.WindowsSupport; - -import scala.tools.jline.internal.Log; -import scala.tools.jline.internal.ReplayPrefixOneCharInputStream; - -import static scala.tools.jline.WindowsTerminal.ConsoleMode.*; -import static scala.tools.jline.WindowsTerminal.WindowsKey.*; -import static scala.tools.jline.console.Key.*; - -/** - * Terminal implementation for Microsoft Windows. Terminal initialization in - * {@link #init} is accomplished by extracting the - * jline_version.dll, saving it to the system temporary - * directoy (determined by the setting of the java.io.tmpdir System - * property), loading the library, and then calling the Win32 APIs SetConsoleMode and - * GetConsoleMode to - * disable character echoing. - *

    - *

    - * By default, the {@link #readCharacter} method will attempt to test to see if - * the specified {@link InputStream} is {@link System#in} or a wrapper around - * {@link FileDescriptor#in}, and if so, will bypass the character reading to - * directly invoke the readc() method in the JNI library. This is so the class - * can read special keys (like arrow keys) which are otherwise inaccessible via - * the {@link System#in} stream. Using JNI reading can be bypassed by setting - * the jline.WindowsTerminal.directConsole system property - * to false. - *

    - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @since 2.0 - */ -public class WindowsTerminal - extends TerminalSupport -{ - public static final String JLINE_WINDOWS_TERMINAL_INPUT_ENCODING = "jline.WindowsTerminal.input.encoding"; - - public static final String JLINE_WINDOWS_TERMINAL_OUTPUT_ENCODING = "jline.WindowsTerminal.output.encoding"; - - public static final String JLINE_WINDOWS_TERMINAL_DIRECT_CONSOLE = "jline.WindowsTerminal.directConsole"; - - public static final String WINDOWSBINDINGS_PROPERTIES = "windowsbindings.properties"; - - public static final String ANSI = WindowsTerminal.class.getName() + ".ansi"; - - private boolean directConsole; - - private int originalMode; - - private final ReplayPrefixOneCharInputStream replayStream; - - private final InputStreamReader replayReader; - - public WindowsTerminal() throws Exception { - super(true); - - this.replayStream = - new ReplayPrefixOneCharInputStream(Configuration.getString(JLINE_WINDOWS_TERMINAL_INPUT_ENCODING, Configuration.getFileEncoding())); - this.replayReader = new InputStreamReader(replayStream, replayStream.getEncoding()); - } - - @Override - public void init() throws Exception { - super.init(); - - setAnsiSupported(Boolean.getBoolean(ANSI)); - - // - // FIXME: Need a way to disable direct console and sysin detection muck - // - - setDirectConsole(Boolean.getBoolean(JLINE_WINDOWS_TERMINAL_DIRECT_CONSOLE)); - - this.originalMode = getConsoleMode(); - setConsoleMode(originalMode & ~ENABLE_ECHO_INPUT.code); - setEchoEnabled(false); - } - - /** - * Restore the original terminal configuration, which can be used when - * shutting down the console reader. The ConsoleReader cannot be - * used after calling this method. - */ - @Override - public void restore() throws Exception { - // restore the old console mode - setConsoleMode(originalMode); - super.restore(); - } - - @Override - public int getWidth() { - int w = getWindowsTerminalWidth(); - return w < 1 ? DEFAULT_WIDTH : w; - } - - @Override - public int getHeight() { - int h = getWindowsTerminalHeight(); - return h < 1 ? DEFAULT_HEIGHT : h; - } - - @Override - public void setEchoEnabled(final boolean enabled) { - // Must set these four modes at the same time to make it work fine. - if (enabled) { - setConsoleMode(getConsoleMode() | - ENABLE_ECHO_INPUT.code | - ENABLE_LINE_INPUT.code | - ENABLE_PROCESSED_INPUT.code | - ENABLE_WINDOW_INPUT.code); - } - else { - setConsoleMode(getConsoleMode() & - ~(ENABLE_LINE_INPUT.code | - ENABLE_ECHO_INPUT.code | - ENABLE_PROCESSED_INPUT.code | - ENABLE_WINDOW_INPUT.code)); - } - super.setEchoEnabled(enabled); - } - - /** - * Whether or not to allow the use of the JNI console interaction. - */ - public void setDirectConsole(final boolean flag) { - this.directConsole = flag; - Log.debug("Direct console: ", flag); - } - - /** - * Whether or not to allow the use of the JNI console interaction. - */ - public Boolean getDirectConsole() { - return directConsole; - } - - - @Override - public int readCharacter(final InputStream in) throws IOException { - // if we can detect that we are directly wrapping the system - // input, then bypass the input stream and read directly (which - // allows us to access otherwise unreadable strokes, such as - // the arrow keys) - - if (directConsole || isSystemIn(in)) { - return readByte(); - } - else { - return super.readCharacter(in); - } - } - - private boolean isSystemIn(final InputStream in) throws IOException { - assert in != null; - - if (in == System.in) { - return true; - } - else if (in instanceof FileInputStream && ((FileInputStream) in).getFD() == FileDescriptor.in) { - return true; - } - - return false; - } - - @Override - public int readVirtualKey(final InputStream in) throws IOException { - int indicator = readCharacter(in); - - // in Windows terminals, arrow keys are represented by - // a sequence of 2 characters. E.g., the up arrow - // key yields 224, 72 - if (indicator == SPECIAL_KEY_INDICATOR.code || indicator == NUMPAD_KEY_INDICATOR.code) { - int c = readCharacter(in); - WindowsKey key = WindowsKey.valueOf(c); - if (key == null) - return 0; - - switch (key) { - case UP_ARROW_KEY: - return CTRL_P.code; // translate UP -> CTRL-P - - case LEFT_ARROW_KEY: - return CTRL_B.code; // translate LEFT -> CTRL-B - - case RIGHT_ARROW_KEY: - return CTRL_F.code; // translate RIGHT -> CTRL-F - - case DOWN_ARROW_KEY: - return CTRL_N.code; // translate DOWN -> CTRL-N - - case DELETE_KEY: - return CTRL_QM.code; // translate DELETE -> CTRL-? - - case HOME_KEY: - return CTRL_A.code; - - case END_KEY: - return CTRL_E.code; - - case PAGE_UP_KEY: - return CTRL_K.code; - - case PAGE_DOWN_KEY: - return CTRL_L.code; - - case ESCAPE_KEY: - return CTRL_OB.code; // translate ESCAPE -> CTRL-[ - - case INSERT_KEY: - return CTRL_C.code; - - default: - return 0; - } - } - else if (indicator > 128) { - // handle unicode characters longer than 2 bytes, - // thanks to Marc.Herbert@continuent.com - replayStream.setInput(indicator, in); - // replayReader = new InputStreamReader(replayStream, encoding); - indicator = replayReader.read(); - - } - - return indicator; - } - - @Override - public InputStream getDefaultBindings() { - return WindowsTerminal.class.getResourceAsStream(WINDOWSBINDINGS_PROPERTIES); - } - - // - // Native Bits - // - private int getConsoleMode() { - return WindowsSupport.getConsoleMode(); - } - - private void setConsoleMode(int mode) { - WindowsSupport.setConsoleMode(mode); - } - - private int readByte() { - return WindowsSupport.readByte(); - } - - private int getWindowsTerminalWidth() { - return WindowsSupport.getWindowsTerminalWidth(); - } - - private int getWindowsTerminalHeight() { - return WindowsSupport.getWindowsTerminalHeight(); - } - - /** - * Console mode - *

    - * Constants copied wincon.h. - */ - public static enum ConsoleMode - { - /** - * The ReadFile or ReadConsole function returns only when a carriage return - * character is read. If this mode is disable, the functions return when one - * or more characters are available. - */ - ENABLE_LINE_INPUT(2), - - /** - * Characters read by the ReadFile or ReadConsole function are written to - * the active screen buffer as they are read. This mode can be used only if - * the ENABLE_LINE_INPUT mode is also enabled. - */ - ENABLE_ECHO_INPUT(4), - - /** - * CTRL+C is processed by the system and is not placed in the input buffer. - * If the input buffer is being read by ReadFile or ReadConsole, other - * control keys are processed by the system and are not returned in the - * ReadFile or ReadConsole buffer. If the ENABLE_LINE_INPUT mode is also - * enabled, backspace, carriage return, and linefeed characters are handled - * by the system. - */ - ENABLE_PROCESSED_INPUT(1), - - /** - * User interactions that change the size of the console screen buffer are - * reported in the console's input buffee. Information about these events - * can be read from the input buffer by applications using - * theReadConsoleInput function, but not by those using ReadFile - * orReadConsole. - */ - ENABLE_WINDOW_INPUT(8), - - /** - * If the mouse pointer is within the borders of the console window and the - * window has the keyboard focus, mouse events generated by mouse movement - * and button presses are placed in the input buffer. These events are - * discarded by ReadFile or ReadConsole, even when this mode is enabled. - */ - ENABLE_MOUSE_INPUT(16), - - /** - * When enabled, text entered in a console window will be inserted at the - * current cursor location and all text following that location will not be - * overwritten. When disabled, all following text will be overwritten. An OR - * operation must be performed with this flag and the ENABLE_EXTENDED_FLAGS - * flag to enable this functionality. - */ - ENABLE_PROCESSED_OUTPUT(1), - - /** - * This flag enables the user to use the mouse to select and edit text. To - * enable this option, use the OR to combine this flag with - * ENABLE_EXTENDED_FLAGS. - */ - ENABLE_WRAP_AT_EOL_OUTPUT(2),; - - public final int code; - - ConsoleMode(final int code) { - this.code = code; - } - } - - /** - * Windows keys. - *

    - * Constants copied wincon.h. - */ - public static enum WindowsKey - { - /** - * On windows terminals, this character indicates that a 'special' key has - * been pressed. This means that a key such as an arrow key, or delete, or - * home, etc. will be indicated by the next character. - */ - SPECIAL_KEY_INDICATOR(224), - - /** - * On windows terminals, this character indicates that a special key on the - * number pad has been pressed. - */ - NUMPAD_KEY_INDICATOR(0), - - /** - * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR, - * this character indicates an left arrow key press. - */ - LEFT_ARROW_KEY(75), - - /** - * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR - * this character indicates an - * right arrow key press. - */ - RIGHT_ARROW_KEY(77), - - /** - * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR - * this character indicates an up - * arrow key press. - */ - UP_ARROW_KEY(72), - - /** - * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR - * this character indicates an - * down arrow key press. - */ - DOWN_ARROW_KEY(80), - - /** - * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR - * this character indicates that - * the delete key was pressed. - */ - DELETE_KEY(83), - - /** - * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR - * this character indicates that - * the home key was pressed. - */ - HOME_KEY(71), - - /** - * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR - * this character indicates that - * the end key was pressed. - */ - END_KEY(79), - - /** - * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR - * this character indicates that - * the page up key was pressed. - */ - PAGE_UP_KEY(73), - - /** - * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR - * this character indicates that - * the page down key was pressed. - */ - PAGE_DOWN_KEY(81), - - /** - * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR - * this character indicates that - * the insert key was pressed. - */ - INSERT_KEY(82), - - /** - * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR, - * this character indicates that the escape key was pressed. - */ - ESCAPE_KEY(0),; - - public final int code; - - WindowsKey(final int code) { - this.code = code; - } - - private static final Map codes; - - static { - Map map = new HashMap(); - - for (WindowsKey key : WindowsKey.values()) { - map.put(key.code, key); - } - - codes = map; - } - - public static WindowsKey valueOf(final int code) { - return codes.get(code); - } - } -} diff --git a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java deleted file mode 100644 index a375b84a5cee..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java +++ /dev/null @@ -1,2185 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console; - -import scala.tools.jline.Terminal; -import scala.tools.jline.TerminalFactory; -import scala.tools.jline.console.completer.CandidateListCompletionHandler; -import scala.tools.jline.console.completer.Completer; -import scala.tools.jline.console.completer.CompletionHandler; -import scala.tools.jline.console.history.History; -import scala.tools.jline.console.history.MemoryHistory; -import scala.tools.jline.internal.Configuration; -import scala.tools.jline.internal.Log; -import org.fusesource.jansi.AnsiOutputStream; - -import java.awt.Toolkit; -import java.awt.datatransfer.Clipboard; -import java.awt.datatransfer.DataFlavor; -import java.awt.datatransfer.Transferable; -import java.awt.datatransfer.UnsupportedFlavorException; -import java.awt.event.ActionListener; -import java.io.*; -import java.util.*; - -/** - * A reader for console applications. It supports custom tab-completion, - * saveable command history, and command line editing. On some platforms, - * platform-specific commands will need to be issued before the reader will - * function properly. See {@link jline.Terminal#init} for convenience - * methods for issuing platform-specific setup commands. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - */ -public class ConsoleReader -{ - public static final String JLINE_NOBELL = "jline.nobell"; - - public static final String JLINE_EXPANDEVENTS = "jline.expandevents"; - - public static final char BACKSPACE = '\b'; - - public static final char RESET_LINE = '\r'; - - public static final char KEYBOARD_BELL = '\07'; - - public static final char NULL_MASK = 0; - - public static final int TAB_WIDTH = 4; - - private static final ResourceBundle - resources = ResourceBundle.getBundle(CandidateListCompletionHandler.class.getName()); - - private final Terminal terminal; - - private InputStream in; - - private final Writer out; - - private final CursorBuffer buf = new CursorBuffer(); - - private String prompt; - - private boolean bellEnabled = true; - - private boolean expandEvents = false; - - private Character mask; - - private Character echoCharacter; - - private StringBuffer searchTerm = null; - - private String previousSearchTerm = ""; - - private int searchIndex = -1; - - public ConsoleReader(final InputStream in, final OutputStream out, final InputStream bindings, final Terminal term) throws - IOException - { - this.in = in; - this.terminal = term != null ? term : TerminalFactory.get(); - this.out = new PrintWriter(getTerminal().wrapOutIfNeeded(out)); - this.keyBindings = loadKeyBindings(bindings); - - setBellEnabled(!Configuration.getBoolean(JLINE_NOBELL, false)); - setExpandEvents(Configuration.getBoolean(JLINE_EXPANDEVENTS, false)); - } - - /** - * @deprecated use {@link #ConsoleReader(InputStream, OutputStream, InputStream, Terminal)} - * to let the terminal wrap the output stream if needed. - */ - public ConsoleReader(final InputStream in, final Writer out, final InputStream bindings, final Terminal term) throws - IOException - { - this.in = in; - this.out = out; - this.terminal = term != null ? term : TerminalFactory.get(); - this.keyBindings = loadKeyBindings(bindings); - - setBellEnabled(!Configuration.getBoolean(JLINE_NOBELL, false)); - } - - /** - * @deprecated use {@link #ConsoleReader(InputStream, OutputStream, InputStream, Terminal)} - * to let the terminal wrap the output stream if needed. - */ - public ConsoleReader(final InputStream in, final Writer out, final Terminal term) throws IOException { - this(in, out, null, term); - } - - /** - * @deprecated use {@link #ConsoleReader(InputStream, OutputStream, InputStream, Terminal)} - * to let the terminal wrap the output stream if needed. - */ - public ConsoleReader(final InputStream in, final Writer out) throws IOException - { - this(in, out, null, null); - } - - /** - * Create a new reader using {@link FileDescriptor#in} for input and - * {@link System#out} for output. - *

    - * {@link FileDescriptor#in} is used because it has a better chance of not being buffered. - */ - public ConsoleReader() throws IOException { - this(new FileInputStream(FileDescriptor.in), System.out, null, null ); - } - - // FIXME: Only used for tests - - void setInput(final InputStream in) { - this.in = in; - } - - public InputStream getInput() { - return in; - } - - public Writer getOutput() { - return out; - } - - public Terminal getTerminal() { - return terminal; - } - - public CursorBuffer getCursorBuffer() { - return buf; - } - - public void setBellEnabled(final boolean enabled) { - this.bellEnabled = enabled; - } - - public boolean isBellEnabled() { - return bellEnabled; - } - - public void setExpandEvents(final boolean expand) { - this.expandEvents = expand; - } - - public boolean getExpandEvents() { - return expandEvents; - } - - public void setPrompt(final String prompt) { - this.prompt = prompt; - } - - public String getPrompt() { - return prompt; - } - - /** - * Set the echo character. For example, to have "*" entered when a password is typed: - *

    - *

    -     * myConsoleReader.setEchoCharacter(new Character('*'));
    -     * 
    - *

    - * Setting the character to - *

    - *

    -     * null
    -     * 
    - *

    - * will restore normal character echoing. Setting the character to - *

    - *

    -     * new Character(0)
    -     * 
    - *

    - * will cause nothing to be echoed. - * - * @param c the character to echo to the console in place of the typed character. - */ - public void setEchoCharacter(final Character c) { - this.echoCharacter = c; - } - - /** - * Returns the echo character. - */ - public Character getEchoCharacter() { - return echoCharacter; - } - - /** - * Erase the current line. - * - * @return false if we failed (e.g., the buffer was empty) - */ - protected final boolean resetLine() throws IOException { - if (buf.cursor == 0) { - return false; - } - - backspaceAll(); - - return true; - } - - int getCursorPosition() { - // FIXME: does not handle anything but a line with a prompt absolute position - String prompt = getPrompt(); - return ((prompt == null) ? 0 : stripAnsi(lastLine(prompt)).length()) + buf.cursor; - } - - /** - * Returns the text after the last '\n'. - * prompt is returned if no '\n' characters are present. - * null is returned if prompt is null. - */ - private String lastLine(String str) { - if (str == null) return ""; - int last = str.lastIndexOf("\n"); - - if (last >= 0) { - return str.substring(last + 1, str.length()); - } - - return str; - } - - private String stripAnsi(String str) { - if (str == null) return ""; - try { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - AnsiOutputStream aos = new AnsiOutputStream(baos); - aos.write(str.getBytes()); - aos.flush(); - return baos.toString(); - } catch (IOException e) { - return str; - } - } - - /** - * Move the cursor position to the specified absolute index. - */ - public final boolean setCursorPosition(final int position) throws IOException { - return moveCursor(position - buf.cursor) != 0; - } - - /** - * Set the current buffer's content to the specified {@link String}. The - * visual console will be modified to show the current buffer. - * - * @param buffer the new contents of the buffer. - */ - private void setBuffer(final String buffer) throws IOException { - // don't bother modifying it if it is unchanged - if (buffer.equals(buf.buffer.toString())) { - return; - } - - // obtain the difference between the current buffer and the new one - int sameIndex = 0; - - for (int i = 0, l1 = buffer.length(), l2 = buf.buffer.length(); (i < l1) - && (i < l2); i++) { - if (buffer.charAt(i) == buf.buffer.charAt(i)) { - sameIndex++; - } - else { - break; - } - } - - int diff = buf.cursor - sameIndex; - if (diff < 0) { // we can't backspace here so try from the end of the buffer - moveToEnd(); - diff = buf.buffer.length() - sameIndex; - } - - backspace(diff); // go back for the differences - killLine(); // clear to the end of the line - buf.buffer.setLength(sameIndex); // the new length - putString(buffer.substring(sameIndex)); // append the differences - } - - private void setBuffer(final CharSequence buffer) throws IOException { - setBuffer(String.valueOf(buffer)); - } - - /** - * Output put the prompt + the current buffer - */ - public final void drawLine() throws IOException { - String prompt = getPrompt(); - if (prompt != null) { - print(prompt); - } - - print(buf.buffer.toString()); - - if (buf.length() != buf.cursor) { // not at end of line - back(buf.length() - buf.cursor - 1); - } - // force drawBuffer to check for weird wrap (after clear screen) - drawBuffer(); - } - - /** - * Clear the line and redraw it. - */ - public final void redrawLine() throws IOException { - print(RESET_LINE); -// flush(); - drawLine(); - } - - /** - * Clear the buffer and add its contents to the history. - * - * @return the former contents of the buffer. - */ - final String finishBuffer() throws IOException { // FIXME: Package protected because used by tests - String str = buf.buffer.toString(); - - if (expandEvents) { - str = expandEvents(str); - } - - // we only add it to the history if the buffer is not empty - // and if mask is null, since having a mask typically means - // the string was a password. We clear the mask after this call - if (str.length() > 0) { - if (mask == null && isHistoryEnabled()) { - history.add(str); - } - else { - mask = null; - } - } - - history.moveToEnd(); - - buf.buffer.setLength(0); - buf.cursor = 0; - - return str; - } - - /** - * Expand event designator such as !!, !#, !3, etc... - * See http://www.gnu.org/software/bash/manual/html_node/Event-Designators.html - * - * @param str - * @return - */ - protected String expandEvents(String str) throws IOException { - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < str.length(); i++) { - char c = str.charAt(i); - switch (c) { - case '!': - if (i + 1 < str.length()) { - c = str.charAt(++i); - boolean neg = false; - String rep = null; - int i1, idx; - switch (c) { - case '!': - if (history.size() == 0) { - throw new IllegalArgumentException("!!: event not found"); - } - rep = history.get(history.index() - 1).toString(); - break; - case '#': - sb.append(sb.toString()); - break; - case '?': - i1 = str.indexOf('?', i + 1); - if (i1 < 0) { - i1 = str.length(); - } - String sc = str.substring(i + 1, i1); - i = i1; - idx = searchBackwards(sc); - if (idx < 0) { - throw new IllegalArgumentException("!?" + sc + ": event not found"); - } else { - rep = history.get(idx).toString(); - } - break; - case ' ': - case '\t': - sb.append('!'); - sb.append(c); - break; - case '-': - neg = true; - i++; - // fall through - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - i1 = i; - for (; i < str.length(); i++) { - c = str.charAt(i); - if (c < '0' || c > '9') { - break; - } - } - idx = 0; - try { - idx = Integer.parseInt(str.substring(i1, i)); - } catch (NumberFormatException e) { - throw new IllegalArgumentException((neg ? "!-" : "!") + str.substring(i1, i) + ": event not found"); - } - if (neg) { - if (idx < history.size()) { - rep = (history.get(history.index() - idx)).toString(); - } else { - throw new IllegalArgumentException((neg ? "!-" : "!") + str.substring(i1, i) + ": event not found"); - } - } else { - if (idx >= history.index() - history.size() && idx < history.index()) { - rep = (history.get(idx)).toString(); - } else { - throw new IllegalArgumentException((neg ? "!-" : "!") + str.substring(i1, i) + ": event not found"); - } - } - break; - default: - String ss = str.substring(i); - i = str.length(); - idx = searchBackwards(ss, history.index(), true); - if (idx < 0) { - throw new IllegalArgumentException("!" + ss + ": event not found"); - } else { - rep = history.get(idx).toString(); - } - break; - } - if (rep != null) { - sb.append(rep); - } - } else { - sb.append(c); - } - break; - case '^': - if (i == 0) { - int i1 = str.indexOf('^', i + 1); - int i2 = str.indexOf('^', i1 + 1); - if (i2 < 0) { - i2 = str.length(); - } - if (i1 > 0 && i2 > 0) { - String s1 = str.substring(i + 1, i1); - String s2 = str.substring(i1 + 1, i2); - String s = history.get(history.index() - 1).toString().replace(s1, s2); - sb.append(s); - i = i2 + 1; - break; - } - } - sb.append(c); - break; - default: - sb.append(c); - break; - } - } - String result = sb.toString(); - if (!str.equals(result)) { - print(result); - println(); - flush(); - } - return result; - - } - - /** - * Write out the specified string to the buffer and the output stream. - */ - public final void putString(final CharSequence str) throws IOException { - buf.write(str); - print(str); - drawBuffer(); - } - - /** - * Output the specified character, both to the buffer and the output stream. - */ - private void putChar(final int c, final boolean print) throws IOException { - buf.write((char) c); - - if (print) { - if (mask == null) { - // no masking - print(c); - } - else if (mask == NULL_MASK) { - // Don't print anything - } - else { - print(mask); - } - - drawBuffer(); - } - } - - /** - * Redraw the rest of the buffer from the cursor onwards. This is necessary - * for inserting text into the buffer. - * - * @param clear the number of characters to clear after the end of the buffer - */ - private void drawBuffer(final int clear) throws IOException { - // debug ("drawBuffer: " + clear); - if (buf.cursor == buf.length() && clear == 0) { - } else { - char[] chars = buf.buffer.substring(buf.cursor).toCharArray(); - if (mask != null) { - Arrays.fill(chars, mask); - } - if (getTerminal().hasWeirdWrap()) { - // need to determine if wrapping will occur: - int width = getTerminal().getWidth(); - int pos = getCursorPosition(); - for (int i = 0; i < chars.length; i++) { - print(chars[i]); - if ((pos + i + 1) % width == 0) { - print(32); // move cursor to next line by printing dummy space - print(13); // CR / not newline. - } - } - } else { - print(chars); - } - clearAhead(clear, chars.length); - if (getTerminal().isAnsiSupported()) { - if (chars.length > 0) { - back(chars.length); - } - } else { - back(chars.length); - } - } - if (getTerminal().hasWeirdWrap()) { - int width = getTerminal().getWidth(); - // best guess on whether the cursor is in that weird location... - // Need to do this without calling ansi cursor location methods - // otherwise it breaks paste of wrapped lines in xterm. - if (getCursorPosition() > 0 && (getCursorPosition() % width == 0) - && buf.cursor == buf.length() && clear == 0) { - // the following workaround is reverse-engineered from looking - // at what bash sent to the terminal in the same situation - print(32); // move cursor to next line by printing dummy space - print(13); // CR / not newline. - } - } - } - - /** - * Redraw the rest of the buffer from the cursor onwards. This is necessary - * for inserting text into the buffer. - */ - private void drawBuffer() throws IOException { - drawBuffer(0); - } - - /** - * Clear ahead the specified number of characters without moving the cursor. - * - * @param num the number of characters to clear - * @param delta the difference between the internal cursor and the screen - * cursor - if > 0, assume some stuff was printed and weird wrap has to be - * checked - */ - private void clearAhead(final int num, int delta) throws IOException { - if (num == 0) { - return; - } - - if (getTerminal().isAnsiSupported()) { - int width = getTerminal().getWidth(); - int screenCursorCol = getCursorPosition() + delta; - // clear current line - printAnsiSequence("K"); - // if cursor+num wraps, then we need to clear the line(s) below too - int curCol = screenCursorCol % width; - int endCol = (screenCursorCol + num - 1) % width; - int lines = num / width; - if (endCol < curCol) lines++; - for (int i = 0; i < lines; i++) { - printAnsiSequence("B"); - printAnsiSequence("2K"); - } - for (int i = 0; i < lines; i++) { - printAnsiSequence("A"); - } - return; - } - - // print blank extra characters - print(' ', num); - - // we need to flush here so a "clever" console doesn't just ignore the redundancy - // of a space followed by a backspace. -// flush(); - - // reset the visual cursor - back(num); - -// flush(); - } - - /** - * Move the visual cursor backwards without modifying the buffer cursor. - */ - protected void back(final int num) throws IOException { - if (num == 0) return; - if (getTerminal().isAnsiSupported()) { - int width = getTerminal().getWidth(); - int cursor = getCursorPosition(); - int realCursor = cursor + num; - int realCol = realCursor % width; - int newCol = cursor % width; - int moveup = num / width; - int delta = realCol - newCol; - if (delta < 0) moveup++; - if (moveup > 0) { - printAnsiSequence(moveup + "A"); - } - printAnsiSequence((1 + newCol) + "G"); - return; - } - print(BACKSPACE, num); -// flush(); - } - - /** - * Flush the console output stream. This is important for printout out single characters (like a backspace or - * keyboard) that we want the console to handle immediately. - */ - public void flush() throws IOException { - out.flush(); - } - - private int backspaceAll() throws IOException { - return backspace(Integer.MAX_VALUE); - } - - /** - * Issue num backspaces. - * - * @return the number of characters backed up - */ - private int backspace(final int num) throws IOException { - if (buf.cursor == 0) { - return 0; - } - - int count = 0; - - int termwidth = getTerminal().getWidth(); - int lines = getCursorPosition() / termwidth; - count = moveCursor(-1 * num) * -1; - buf.buffer.delete(buf.cursor, buf.cursor + count); - if (getCursorPosition() / termwidth != lines) { - if (getTerminal().isAnsiSupported()) { - // debug("doing backspace redraw: " + getCursorPosition() + " on " + termwidth + ": " + lines); - printAnsiSequence("K"); - // if cursor+num wraps, then we need to clear the line(s) below too - // last char printed is one pos less than cursor so we subtract - // one -/* - // TODO: fixme (does not work - test with reverse search with wrapping line and CTRL-E) - int endCol = (getCursorPosition() + num - 1) % termwidth; - int curCol = getCursorPosition() % termwidth; - if (endCol < curCol) lines++; - for (int i = 1; i < lines; i++) { - printAnsiSequence("B"); - printAnsiSequence("2K"); - } - for (int i = 1; i < lines; i++) { - printAnsiSequence("A"); - } - return count; -*/ - } - } - drawBuffer(count); - - return count; - } - - /** - * Issue a backspace. - * - * @return true if successful - */ - public boolean backspace() throws IOException { - return backspace(1) == 1; - } - - protected boolean moveToEnd() throws IOException { - return moveCursor(buf.length() - buf.cursor) > 0; - } - - /** - * Delete the character at the current position and redraw the remainder of the buffer. - */ - private boolean deleteCurrentCharacter() throws IOException { - if (buf.length() == 0 || buf.cursor == buf.length()) { - return false; - } - - buf.buffer.deleteCharAt(buf.cursor); - drawBuffer(1); - return true; - } - - private boolean previousWord() throws IOException { - while (isDelimiter(buf.charLeftOfCursor()) && (moveCursor(-1) != 0)) { - // nothing - } - - while (!isDelimiter(buf.charLeftOfCursor()) && (moveCursor(-1) != 0)) { - // nothing - } - - return true; - } - - private boolean nextWord() throws IOException { - while (isDelimiter(buf.charAtCursor()) && (moveCursor(1) != 0)) { - // nothing - } - - while (!isDelimiter(buf.charAtCursor()) && (moveCursor(1) != 0)) { - // nothing - } - - return true; - } - - private boolean deletePreviousWord() throws IOException { - while (isDelimiter(buf.charLeftOfCursor()) && backspace()) { - // nothing - } - - while (!isDelimiter(buf.charLeftOfCursor()) && backspace()) { - // nothing - } - - return true; - } - - private boolean deleteNextWord() throws IOException { - while (isDelimiter(buf.charAtCursor()) && deleteCurrentCharacter()) { - // nothing - } - - while (!isDelimiter(buf.charAtCursor()) && deleteCurrentCharacter()) { - // nothing - } - - return true; - } - - /** - * Move the cursor where characters. - * - * @param num If less than 0, move abs(where) to the left, otherwise move where to the right. - * @return The number of spaces we moved - */ - public int moveCursor(final int num) throws IOException { - int where = num; - - if ((buf.cursor == 0) && (where <= 0)) { - return 0; - } - - if ((buf.cursor == buf.buffer.length()) && (where >= 0)) { - return 0; - } - - if ((buf.cursor + where) < 0) { - where = -buf.cursor; - } - else if ((buf.cursor + where) > buf.buffer.length()) { - where = buf.buffer.length() - buf.cursor; - } - - moveInternal(where); - - return where; - } - - /** - * Move the cursor where characters, without checking the current buffer. - * - * @param where the number of characters to move to the right or left. - */ - private void moveInternal(final int where) throws IOException { - // debug ("move cursor " + where + " (" - // + buf.cursor + " => " + (buf.cursor + where) + ")"); - buf.cursor += where; - - if (getTerminal().isAnsiSupported()) { - if (where < 0) { - back(Math.abs(where)); - } else { - int width = getTerminal().getWidth(); - int cursor = getCursorPosition(); - int oldLine = (cursor - where) / width; - int newLine = cursor / width; - if (newLine > oldLine) { - if (getTerminal().hasWeirdWrap()) { - // scroll up if at bottom - // note: - // on rxvt cywgin getTerminal().getHeight() is incorrect - // MacOs xterm does not seem to support scrolling - if (getCurrentAnsiRow() == getTerminal().getHeight()) { - printAnsiSequence((newLine - oldLine) + "S"); - } - } - printAnsiSequence((newLine - oldLine) + "B"); - } - printAnsiSequence(1 +(cursor % width) + "G"); - } -// flush(); - return; - } - - char c; - - if (where < 0) { - int len = 0; - for (int i = buf.cursor; i < buf.cursor - where; i++) { - if (buf.buffer.charAt(i) == '\t') { - len += TAB_WIDTH; - } - else { - len++; - } - } - - char chars[] = new char[len]; - Arrays.fill(chars, BACKSPACE); - out.write(chars); - - return; - } - else if (buf.cursor == 0) { - return; - } - else if (mask != null) { - c = mask; - } - else { - print(buf.buffer.substring(buf.cursor - where, buf.cursor).toCharArray()); - return; - } - - // null character mask: don't output anything - if (mask == NULL_MASK) { - return; - } - - print(c, Math.abs(where)); - } - - // FIXME: replace() is not used - - public final boolean replace(final int num, final String replacement) { - buf.buffer.replace(buf.cursor - num, buf.cursor, replacement); - try { - moveCursor(-num); - drawBuffer(Math.max(0, num - replacement.length())); - moveCursor(replacement.length()); - } - catch (IOException e) { - e.printStackTrace(); - return false; - } - return true; - } - - // - // Key reading - // - - /** - * Read a character from the console. - * - * @return the character, or -1 if an EOF is received. - */ - public final int readVirtualKey() throws IOException { - int c = getTerminal().readVirtualKey(in); - - Log.trace("Keystroke: ", c); - - // clear any echo characters - clearEcho(c); - - return c; - } - - /** - * Clear the echoed characters for the specified character code. - */ - private int clearEcho(final int c) throws IOException { - // if the terminal is not echoing, then ignore - if (!getTerminal().isEchoEnabled()) { - return 0; - } - - // otherwise, clear - int num = countEchoCharacters((char) c); - back(num); - drawBuffer(num); - - return num; - } - - private int countEchoCharacters(final char c) { - // tabs as special: we need to determine the number of spaces - // to cancel based on what out current cursor position is - if (c == 9) { - int tabStop = 8; // will this ever be different? - int position = getCursorPosition(); - - return tabStop - (position % tabStop); - } - - return getPrintableCharacters(c).length(); - } - - /** - * Return the number of characters that will be printed when the specified - * character is echoed to the screen - * - * Adapted from cat by Torbjorn Granlund, as repeated in stty by David MacKenzie. - */ - private StringBuilder getPrintableCharacters(final char ch) { - StringBuilder sbuff = new StringBuilder(); - - if (ch >= 32) { - if (ch < 127) { - sbuff.append(ch); - } - else if (ch == 127) { - sbuff.append('^'); - sbuff.append('?'); - } - else { - sbuff.append('M'); - sbuff.append('-'); - - if (ch >= (128 + 32)) { - if (ch < (128 + 127)) { - sbuff.append((char) (ch - 128)); - } - else { - sbuff.append('^'); - sbuff.append('?'); - } - } - else { - sbuff.append('^'); - sbuff.append((char) (ch - 128 + 64)); - } - } - } - else { - sbuff.append('^'); - sbuff.append((char) (ch + 64)); - } - - return sbuff; - } - - public final int readCharacter(final char... allowed) throws IOException { - // if we restrict to a limited set and the current character is not in the set, then try again. - char c; - - Arrays.sort(allowed); // always need to sort before binarySearch - - while (Arrays.binarySearch(allowed, c = (char) readVirtualKey()) < 0) { - // nothing - } - - return c; - } - - // - // Key Bindings - // - - public static final String JLINE_COMPLETION_THRESHOLD = "jline.completion.threshold"; - - public static final String JLINE_KEYBINDINGS = "jline.keybindings"; - - public static final String JLINEBINDINGS_PROPERTIES = ".jlinebindings.properties"; - - /** - * The map for logical operations. - */ - private final short[] keyBindings; - - private short[] loadKeyBindings(InputStream input) throws IOException { - if (input == null) { - try { - File file = new File(Configuration.getUserHome(), JLINEBINDINGS_PROPERTIES); - - String path = Configuration.getString(JLINE_KEYBINDINGS); - if (path != null) { - file = new File(path); - } - - if (file.isFile()) { - Log.debug("Loading user bindings from: ", file); - input = new FileInputStream(file); - } - } - catch (Exception e) { - Log.error("Failed to load user bindings", e); - } - } - - if (input == null) { - Log.debug("Using default bindings"); - input = getTerminal().getDefaultBindings(); - } - - short[] keyBindings = new short[Character.MAX_VALUE * 2]; - - Arrays.fill(keyBindings, Operation.UNKNOWN.code); - - // Loads the key bindings. Bindings file is in the format: - // - // keycode: operation name - - if (input != null) { - input = new BufferedInputStream(input); - Properties p = new Properties(); - p.load(input); - input.close(); - - for (Object key : p.keySet()) { - String val = (String) key; - - try { - short code = Short.parseShort(val); - String name = p.getProperty(val); - Operation op = Operation.valueOf(name); - keyBindings[code] = op.code; - } - catch (NumberFormatException e) { - Log.error("Failed to convert binding code: ", val, e); - } - } - - // hardwired arrow key bindings - // keybindings[VK_UP] = PREV_HISTORY; - // keybindings[VK_DOWN] = NEXT_HISTORY; - // keybindings[VK_LEFT] = PREV_CHAR; - // keybindings[VK_RIGHT] = NEXT_CHAR; - } - - return keyBindings; - } - - int getKeyForAction(final short logicalAction) { - for (int i = 0; i < keyBindings.length; i++) { - if (keyBindings[i] == logicalAction) { - return i; - } - } - - return -1; - } - - int getKeyForAction(final Operation op) { - assert op != null; - return getKeyForAction(op.code); - } - - public void printBindings() { - System.out.println("printBindings(): keyBindings.length = " + keyBindings.length); - for (int i = 0; i < keyBindings.length; i++) { - if (keyBindings[i] != Operation.UNKNOWN.code) { - System.out.println("keyBindings[" + i + "] = " + keyBindings[i]); - } - } - } - - /** - * Reads the console input and returns an array of the form [raw, key binding]. - */ - private int[] readBinding() throws IOException { - int c = readVirtualKey(); - - if (c == -1) { - return null; - } - - // extract the appropriate key binding - short code = keyBindings[c]; - - Log.trace("Translated: ", c, " -> ", code); - - return new int[]{c, code}; - } - - // - // Line Reading - // - - /** - * Read the next line and return the contents of the buffer. - */ - public String readLine() throws IOException { - return readLine((String) null); - } - - /** - * Read the next line with the specified character mask. If null, then - * characters will be echoed. If 0, then no characters will be echoed. - */ - public String readLine(final Character mask) throws IOException { - return readLine(null, mask); - } - - public String readLine(final String prompt) throws IOException { - return readLine(prompt, null); - } - - /** - * Read a line from the in {@link InputStream}, and return the line - * (without any trailing newlines). - * - * @param prompt The prompt to issue to the console, may be null. - * @return A line that is read from the terminal, or null if there was null input (e.g., CTRL-D - * was pressed). - */ - public String readLine(String prompt, final Character mask) throws IOException { - // prompt may be null - // mask may be null - - // FIXME: This blows, each call to readLine will reset the console's state which doesn't seem very nice. - this.mask = mask; - if (prompt != null) { - setPrompt(prompt); - } - else { - prompt = getPrompt(); - } - - try { - if (!getTerminal().isSupported()) { - beforeReadLine(prompt, mask); - } - - if (prompt != null && prompt.length() > 0) { - out.write(prompt); - out.flush(); - } - - // if the terminal is unsupported, just use plain-java reading - if (!getTerminal().isSupported()) { - return readLine(in); - } - - String originalPrompt = this.prompt; - - final int NORMAL = 1; - final int SEARCH = 2; - int state = NORMAL; - - boolean success = true; - - while (true) { - int[] next = readBinding(); - - if (next == null) { - return null; - } - - int c = next[0]; - // int code = next[1]; - Operation code = Operation.valueOf(next[1]); - - if (c == -1) { - return null; - } - - // Search mode. - // - // Note that we have to do this first, because if there is a command - // not linked to a search command, we leave the search mode and fall - // through to the normal state. - if (state == SEARCH) { - int cursorDest = -1; - - switch (code) { - // This doesn't work right now, it seems CTRL-G is not passed - // down correctly. :( - case ABORT: - state = NORMAL; - break; - - case SEARCH_PREV: - if (searchTerm.length() == 0) { - searchTerm.append(previousSearchTerm); - } - - if (searchIndex == -1) { - searchIndex = searchBackwards(searchTerm.toString()); - } else { - searchIndex = searchBackwards(searchTerm.toString(), searchIndex); - } - break; - - case DELETE_PREV_CHAR: - if (searchTerm.length() > 0) { - searchTerm.deleteCharAt(searchTerm.length() - 1); - searchIndex = searchBackwards(searchTerm.toString()); - } - break; - - case UNKNOWN: - searchTerm.appendCodePoint(c); - searchIndex = searchBackwards(searchTerm.toString()); - break; - - default: - // Set buffer and cursor position to the found string. - if (searchIndex != -1) { - history.moveTo(searchIndex); - // set cursor position to the found string - cursorDest = history.current().toString().indexOf(searchTerm.toString()); - } - state = NORMAL; - break; - } - - // if we're still in search mode, print the search status - if (state == SEARCH) { - if (searchTerm.length() == 0) { - printSearchStatus("", ""); - searchIndex = -1; - } else { - if (searchIndex == -1) { - beep(); - } else { - printSearchStatus(searchTerm.toString(), history.get(searchIndex).toString()); - } - } - } - // otherwise, restore the line - else { - restoreLine(originalPrompt, cursorDest); - } - } - - if (state == NORMAL) { - switch (code) { - case EXIT: // ctrl-d - if (buf.buffer.length() == 0) { - return null; - } else { - success = deleteCurrentCharacter(); - } - break; - - case COMPLETE: // tab - success = complete(); - break; - - case MOVE_TO_BEG: - success = setCursorPosition(0); - break; - - case KILL_LINE: // CTRL-K - success = killLine(); - break; - - case CLEAR_SCREEN: // CTRL-L - success = clearScreen(); - break; - - case KILL_LINE_PREV: // CTRL-U - success = resetLine(); - break; - - case NEWLINE: // enter - moveToEnd(); - println(); // output newline - flush(); - return finishBuffer(); - - case DELETE_PREV_CHAR: // backspace - success = backspace(); - break; - - case DELETE_NEXT_CHAR: // delete - success = deleteCurrentCharacter(); - break; - - case MOVE_TO_END: - success = moveToEnd(); - break; - - case PREV_CHAR: - success = moveCursor(-1) != 0; - break; - - case NEXT_CHAR: - success = moveCursor(1) != 0; - break; - - case NEXT_HISTORY: - success = moveHistory(true); - break; - - case PREV_HISTORY: - success = moveHistory(false); - break; - - case ABORT: - case REDISPLAY: - break; - - case PASTE: - success = paste(); - break; - - case DELETE_PREV_WORD: - success = deletePreviousWord(); - break; - - case DELETE_NEXT_WORD: - success = deleteNextWord(); - break; - - case PREV_WORD: - success = previousWord(); - break; - - case NEXT_WORD: - success = nextWord(); - break; - - case START_OF_HISTORY: - success = history.moveToFirst(); - if (success) { - setBuffer(history.current()); - } - break; - - case END_OF_HISTORY: - success = history.moveToLast(); - if (success) { - setBuffer(history.current()); - } - break; - - case CLEAR_LINE: - moveInternal(-(buf.cursor)); - killLine(); - break; - - case INSERT: - buf.setOverTyping(!buf.isOverTyping()); - break; - - case SEARCH_PREV: // CTRL-R - if (searchTerm != null) { - previousSearchTerm = searchTerm.toString(); - } - searchTerm = new StringBuffer(buf.buffer); - state = SEARCH; - if (searchTerm.length() > 0) { - searchIndex = searchBackwards(searchTerm.toString()); - if (searchIndex == -1) { - beep(); - } - printSearchStatus(searchTerm.toString(), - searchIndex > -1 ? history.get(searchIndex).toString() : ""); - } else { - searchIndex = -1; - printSearchStatus("", ""); - } - break; - - case UNKNOWN: - default: - if (c != 0) { // ignore null chars - ActionListener action = triggeredActions.get((char) c); - if (action != null) { - action.actionPerformed(null); - } - else { - putChar(c, true); - } - } - else { - success = false; - } - } - - if (!success) { - beep(); - } - - flush(); - } - } - } - finally { - if (!getTerminal().isSupported()) { - afterReadLine(); - } - } - } - - /** - * Read a line for unsupported terminals. - */ - private String readLine(final InputStream in) throws IOException { - StringBuilder buff = new StringBuilder(); - - while (true) { - int i = in.read(); - - if (i == -1 || i == '\n' || i == '\r') { - return buff.toString(); - } - - buff.append((char) i); - } - - // return new BufferedReader (new InputStreamReader (in)).readLine (); - } - - // - // Completion - // - - private final List completers = new LinkedList(); - - private CompletionHandler completionHandler = new CandidateListCompletionHandler(); - - /** - * Add the specified {@link jline.console.completer.Completer} to the list of handlers for tab-completion. - * - * @param completer the {@link jline.console.completer.Completer} to add - * @return true if it was successfully added - */ - public boolean addCompleter(final Completer completer) { - return completers.add(completer); - } - - /** - * Remove the specified {@link jline.console.completer.Completer} from the list of handlers for tab-completion. - * - * @param completer The {@link Completer} to remove - * @return True if it was successfully removed - */ - public boolean removeCompleter(final Completer completer) { - return completers.remove(completer); - } - - /** - * Returns an unmodifiable list of all the completers. - */ - public Collection getCompleters() { - return Collections.unmodifiableList(completers); - } - - public void setCompletionHandler(final CompletionHandler handler) { - assert handler != null; - this.completionHandler = handler; - } - - public CompletionHandler getCompletionHandler() { - return this.completionHandler; - } - - /** - * Use the completers to modify the buffer with the appropriate completions. - * - * @return true if successful - */ - protected boolean complete() throws IOException { - // debug ("tab for (" + buf + ")"); - if (completers.size() == 0) { - return false; - } - - List candidates = new LinkedList(); - String bufstr = buf.buffer.toString(); - int cursor = buf.cursor; - - int position = -1; - - for (Completer comp : completers) { - if ((position = comp.complete(bufstr, cursor, candidates)) != -1) { - break; - } - } - - return candidates.size() != 0 && getCompletionHandler().complete(this, candidates, position); - } - - /** - * The number of tab-completion candidates above which a warning will be - * prompted before showing all the candidates. - */ - private int autoprintThreshold = Integer.getInteger(JLINE_COMPLETION_THRESHOLD, 100); // same default as bash - - /** - * @param threshold the number of candidates to print without issuing a warning. - */ - public void setAutoprintThreshold(final int threshold) { - this.autoprintThreshold = threshold; - } - - /** - * @return the number of candidates to print without issuing a warning. - */ - public int getAutoprintThreshold() { - return autoprintThreshold; - } - - private boolean paginationEnabled; - - /** - * Whether to use pagination when the number of rows of candidates exceeds the height of the terminal. - */ - public void setPaginationEnabled(final boolean enabled) { - this.paginationEnabled = enabled; - } - - /** - * Whether to use pagination when the number of rows of candidates exceeds the height of the terminal. - */ - public boolean isPaginationEnabled() { - return paginationEnabled; - } - - // - // History - // - - private History history = new MemoryHistory(); - - public void setHistory(final History history) { - this.history = history; - } - - public History getHistory() { - return history; - } - - private boolean historyEnabled = true; - - /** - * Whether or not to add new commands to the history buffer. - */ - public void setHistoryEnabled(final boolean enabled) { - this.historyEnabled = enabled; - } - - /** - * Whether or not to add new commands to the history buffer. - */ - public boolean isHistoryEnabled() { - return historyEnabled; - } - - /** - * Move up or down the history tree. - */ - private boolean moveHistory(final boolean next) throws IOException { - if (next && !history.next()) { - return false; - } - else if (!next && !history.previous()) { - return false; - } - - setBuffer(history.current()); - - return true; - } - - // - // Printing - // - - public static final String CR = System.getProperty("line.separator"); - - /** - * Output the specified character to the output stream without manipulating the current buffer. - */ - private void print(final int c) throws IOException { - if (c == '\t') { - char chars[] = new char[TAB_WIDTH]; - Arrays.fill(chars, ' '); - out.write(chars); - return; - } - - out.write(c); - } - - /** - * Output the specified characters to the output stream without manipulating the current buffer. - */ - private void print(final char... buff) throws IOException { - int len = 0; - for (char c : buff) { - if (c == '\t') { - len += TAB_WIDTH; - } - else { - len++; - } - } - - char chars[]; - if (len == buff.length) { - chars = buff; - } - else { - chars = new char[len]; - int pos = 0; - for (char c : buff) { - if (c == '\t') { - Arrays.fill(chars, pos, pos + TAB_WIDTH, ' '); - pos += TAB_WIDTH; - } - else { - chars[pos] = c; - pos++; - } - } - } - - out.write(chars); - } - - private void print(final char c, final int num) throws IOException { - if (num == 1) { - print(c); - } - else { - char[] chars = new char[num]; - Arrays.fill(chars, c); - print(chars); - } - } - - /** - * Output the specified string to the output stream (but not the buffer). - */ - public final void print(final CharSequence s) throws IOException { - assert s != null; - print(s.toString().toCharArray()); - } - - public final void println(final CharSequence s) throws IOException { - assert s != null; - print(s.toString().toCharArray()); - println(); - } - - /** - * Output a platform-dependent newline. - */ - public final void println() throws IOException { - print(CR); -// flush(); - } - - // - // Actions - // - - /** - * Issue a delete. - * - * @return true if successful - */ - public final boolean delete() throws IOException { - return delete(1) == 1; - } - - // FIXME: delete(int) only used by above + the return is always 1 and num is ignored - - /** - * Issue num deletes. - * - * @return the number of characters backed up - */ - private int delete(final int num) throws IOException { - // TODO: Try to use jansi for this - - /* Commented out because of DWA-2949: - if (buf.cursor == 0) { - return 0; - } - */ - - buf.buffer.delete(buf.cursor, buf.cursor + 1); - drawBuffer(1); - - return 1; - } - - /** - * Kill the buffer ahead of the current cursor position. - * - * @return true if successful - */ - public boolean killLine() throws IOException { - int cp = buf.cursor; - int len = buf.buffer.length(); - - if (cp >= len) { - return false; - } - - int num = buf.buffer.length() - cp; - clearAhead(num, 0); - - for (int i = 0; i < num; i++) { - buf.buffer.deleteCharAt(len - i - 1); - } - - return true; - } - - /** - * Clear the screen by issuing the ANSI "clear screen" code. - */ - public boolean clearScreen() throws IOException { - if (!getTerminal().isAnsiSupported()) { - return false; - } - - // send the ANSI code to clear the screen - printAnsiSequence("2J"); - - // then send the ANSI code to go to position 1,1 - printAnsiSequence("1;1H"); - - redrawLine(); - - return true; - } - - /** - * Issue an audible keyboard bell, if {@link #isBellEnabled} return true. - */ - public void beep() throws IOException { - if (isBellEnabled()) { - print(KEYBOARD_BELL); - // need to flush so the console actually beeps - flush(); - } - } - - /** - * Paste the contents of the clipboard into the console buffer - * - * @return true if clipboard contents pasted - */ - public boolean paste() throws IOException { - Clipboard clipboard; - try { // May throw ugly exception on system without X - clipboard = Toolkit.getDefaultToolkit().getSystemClipboard(); - } - catch (Exception e) { - return false; - } - - if (clipboard == null) { - return false; - } - - Transferable transferable = clipboard.getContents(null); - - if (transferable == null) { - return false; - } - - try { - Object content = transferable.getTransferData(DataFlavor.plainTextFlavor); - - // This fix was suggested in bug #1060649 at - // http://sourceforge.net/tracker/index.php?func=detail&aid=1060649&group_id=64033&atid=506056 - // to get around the deprecated DataFlavor.plainTextFlavor, but it - // raises a UnsupportedFlavorException on Mac OS X - - if (content == null) { - try { - content = new DataFlavor().getReaderForText(transferable); - } - catch (Exception e) { - // ignore - } - } - - if (content == null) { - return false; - } - - String value; - - if (content instanceof Reader) { - // TODO: we might want instead connect to the input stream - // so we can interpret individual lines - value = ""; - String line; - - BufferedReader read = new BufferedReader((Reader) content); - while ((line = read.readLine()) != null) { - if (value.length() > 0) { - value += "\n"; - } - - value += line; - } - } - else { - value = content.toString(); - } - - if (value == null) { - return true; - } - - putString(value); - - return true; - } - catch (UnsupportedFlavorException e) { - Log.error("Paste failed: ", e); - - return false; - } - } - - // - // Triggered Actions - // - - private final Map triggeredActions = new HashMap(); - - /** - * Adding a triggered Action allows to give another curse of action if a character passed the pre-processing. - *

    - * Say you want to close the application if the user enter q. - * addTriggerAction('q', new ActionListener(){ System.exit(0); }); would do the trick. - */ - public void addTriggeredAction(final char c, final ActionListener listener) { - triggeredActions.put(c, listener); - } - - // - // Formatted Output - // - - /** - * Output the specified {@link Collection} in proper columns. - */ - public void printColumns(final Collection items) throws IOException { - if (items == null || items.isEmpty()) { - return; - } - - int width = getTerminal().getWidth(); - int height = getTerminal().getHeight(); - - int maxWidth = 0; - for (CharSequence item : items) { - maxWidth = Math.max(maxWidth, item.length()); - } - Log.debug("Max width: ", maxWidth); - - int showLines; - if (isPaginationEnabled()) { - showLines = height - 1; // page limit - } - else { - showLines = Integer.MAX_VALUE; - } - - StringBuilder buff = new StringBuilder(); - for (CharSequence item : items) { - if ((buff.length() + maxWidth) > width) { - println(buff); - buff.setLength(0); - - if (--showLines == 0) { - // Overflow - print(resources.getString("display-more")); - flush(); - int c = readVirtualKey(); - if (c == '\r' || c == '\n') { - // one step forward - showLines = 1; - } - else if (c != 'q') { - // page forward - showLines = height - 1; - } - - back(resources.getString("display-more").length()); - if (c == 'q') { - // cancel - break; - } - } - } - - // NOTE: toString() is important here due to AnsiString being retarded - buff.append(item.toString()); - for (int i = 0; i < (maxWidth + 3 - item.length()); i++) { - buff.append(' '); - } - } - - if (buff.length() > 0) { - println(buff); - } - } - - // - // Non-supported Terminal Support - // - - private Thread maskThread; - - private void beforeReadLine(final String prompt, final Character mask) { - if (mask != null && maskThread == null) { - final String fullPrompt = "\r" + prompt - + " " - + " " - + " " - + "\r" + prompt; - - maskThread = new Thread() - { - public void run() { - while (!interrupted()) { - try { - Writer out = getOutput(); - out.write(fullPrompt); - out.flush(); - sleep(3); - } - catch (IOException e) { - return; - } - catch (InterruptedException e) { - return; - } - } - } - }; - - maskThread.setPriority(Thread.MAX_PRIORITY); - maskThread.setDaemon(true); - maskThread.start(); - } - } - - private void afterReadLine() { - if (maskThread != null && maskThread.isAlive()) { - maskThread.interrupt(); - } - - maskThread = null; - } - - /** - * Erases the current line with the existing prompt, then redraws the line - * with the provided prompt and buffer - * @param prompt - * the new prompt - * @param buffer - * the buffer to be drawn - * @param cursorDest - * where you want the cursor set when the line has been drawn. - * -1 for end of line. - * */ - public void resetPromptLine(String prompt, String buffer, int cursorDest) throws IOException { - // move cursor to end of line - moveToEnd(); - - // backspace all text, including prompt - buf.buffer.append(this.prompt); - buf.cursor += this.prompt.length(); - this.prompt = ""; - backspaceAll(); - - this.prompt = prompt; - redrawLine(); - setBuffer(buffer); - - // move cursor to destination (-1 will move to end of line) - if (cursorDest < 0) cursorDest = buffer.length(); - setCursorPosition(cursorDest); - - flush(); - } - - public void printSearchStatus(String searchTerm, String match) throws IOException { - String prompt = "(reverse-i-search)`" + searchTerm + "': "; - String buffer = match; - int cursorDest = match.indexOf(searchTerm); - resetPromptLine(prompt, buffer, cursorDest); - } - - public void restoreLine(String originalPrompt, int cursorDest) throws IOException { - // TODO move cursor to matched string - String prompt = lastLine(originalPrompt); - String buffer = buf.buffer.toString(); - resetPromptLine(prompt, buffer, cursorDest); - } - - // - // History search - // - /** - * Search backward in history from a given position. - * - * @param searchTerm substring to search for. - * @param startIndex the index from which on to search - * @return index where this substring has been found, or -1 else. - */ - public int searchBackwards(String searchTerm, int startIndex) { - return searchBackwards(searchTerm, startIndex, false); - } - - /** - * Search backwards in history from the current position. - * - * @param searchTerm substring to search for. - * @return index where the substring has been found, or -1 else. - */ - public int searchBackwards(String searchTerm) { - return searchBackwards(searchTerm, history.index()); - } - - - public int searchBackwards(String searchTerm, int startIndex, boolean startsWith) { - ListIterator it = history.entries(startIndex); - while (it.hasPrevious()) { - History.Entry e = it.previous(); - if (startsWith) { - if (e.value().toString().startsWith(searchTerm)) { - return e.index(); - } - } else { - if (e.value().toString().contains(searchTerm)) { - return e.index(); - } - } - } - return -1; - } - - // - // Helpers - // - - /** - * Checks to see if the specified character is a delimiter. We consider a - * character a delimiter if it is anything but a letter or digit. - * - * @param c The character to test - * @return True if it is a delimiter - */ - private boolean isDelimiter(final char c) { - return !Character.isLetterOrDigit(c); - } - - private void printAnsiSequence(String sequence) throws IOException { - print(27); - print('['); - print(sequence); - flush(); // helps with step debugging - } - - // return column position, reported by the terminal - private int getCurrentPosition() { - // check for ByteArrayInputStream to disable for unit tests - if (getTerminal().isAnsiSupported() && !(in instanceof ByteArrayInputStream)) { - try { - printAnsiSequence("6n"); - flush(); - StringBuffer b = new StringBuffer(8); - // position is sent as [{ROW};{COLUMN}R - int r; - while((r = in.read()) > -1 && r != 'R') { - if (r != 27 && r != '[') { - b.append((char) r); - } - } - String[] pos = b.toString().split(";"); - return Integer.parseInt(pos[1]); - } catch (Exception x) { - // no luck - } - } - - return -1; // TODO: throw exception instead? - } - - // return row position, reported by the terminal - // needed to know whether to scroll up on cursor move in last col for weird - // wrapping terminals - not tested for anything else - private int getCurrentAnsiRow() { - // check for ByteArrayInputStream to disable for unit tests - if (getTerminal().isAnsiSupported() && !(in instanceof ByteArrayInputStream)) { - try { - printAnsiSequence("6n"); - flush(); - StringBuffer b = new StringBuffer(8); - // position is sent as [{ROW};{COLUMN}R - int r; - while((r = in.read()) > -1 && r != 'R') { - if (r != 27 && r != '[') { - b.append((char) r); - } - } - String[] pos = b.toString().split(";"); - return Integer.parseInt(pos[0]); - } catch (Exception x) { - // no luck - } - } - - return -1; // TODO: throw exception instead? - } -} diff --git a/src/jline/src/main/java/scala/tools/jline/console/CursorBuffer.java b/src/jline/src/main/java/scala/tools/jline/console/CursorBuffer.java deleted file mode 100644 index 7993def00210..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/CursorBuffer.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console; - -/** - * A holder for a {@link StringBuilder} that also contains the current cursor position. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @since 2.0 - */ -public class CursorBuffer -{ - private boolean overTyping = false; - - public int cursor = 0; - - public final StringBuilder buffer = new StringBuilder(); - - public boolean isOverTyping() { - return overTyping; - } - - public void setOverTyping(final boolean b) { - overTyping = b; - } - - public int length() { - return buffer.length(); - } - - /** - * Gets the character to the left of the cursor. - */ - public char charLeftOfCursor() { - if (cursor <= 0) { - return 0; - } - - return buffer.charAt(cursor - 1); - } - - /** - * Gets the character at the cursor. - */ - public char charAtCursor() { - if (cursor < 0 || cursor >= buffer.length()) { - return 0; - } - return buffer.charAt(cursor); - } - - /** - * Write the specific character into the buffer, setting the cursor position - * ahead one. The text may overwrite or insert based on the current setting - * of {@link #isOverTyping}. - * - * @param c the character to insert - */ - public void write(final char c) { - buffer.insert(cursor++, c); - if (isOverTyping() && cursor < buffer.length()) { - buffer.deleteCharAt(cursor); - } - } - - /** - * Insert the specified chars into the buffer, setting the cursor to the end of the insertion point. - */ - public void write(final CharSequence str) { - assert str != null; - - if (buffer.length() == 0) { - buffer.append(str); - } - else { - buffer.insert(cursor, str); - } - - cursor += str.length(); - - if (isOverTyping() && cursor < buffer.length()) { - buffer.delete(cursor, (cursor + str.length())); - } - } - - public boolean clear() { - if (buffer.length() == 0) { - return false; - } - - buffer.delete(0, buffer.length()); - cursor = 0; - return true; - } - - @Override - public String toString() { - return buffer.toString(); - } -} diff --git a/src/jline/src/main/java/scala/tools/jline/console/Key.java b/src/jline/src/main/java/scala/tools/jline/console/Key.java deleted file mode 100644 index 2e713a7da2c6..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/Key.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console; - -import java.util.HashMap; -import java.util.Map; - -/** - * Map from key name to key codes. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @see java.awt.event.KeyEvent - * @since 2.0 - */ -public enum Key -{ - CTRL_A(1), - - CTRL_B(2), - - CTRL_C(3), - - CTRL_D(4), - - CTRL_E(5), - - CTRL_F(6), - - CTRL_G(7), - - CTRL_K(11), - - CTRL_L(12), - - CTRL_N(14), - - CTRL_O(15), - - CTRL_P(16), - - CTRL_T(20), - - CTRL_W(23), - - CTRL_X(24), - - CTRL_OB(27), - - CTRL_QM(127), - - BACKSPACE('\b'), - - DELETE(127),; - - public final short code; - - Key(final int code) { - this.code = (short) code; - } - - private static final Map codes; - - static { - Map map = new HashMap(); - - for (Key op : Key.values()) { - map.put(op.code, op); - } - - codes = map; - } - - public static Key valueOf(final int code) { - return codes.get((short) code); - } -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/console/Operation.java b/src/jline/src/main/java/scala/tools/jline/console/Operation.java deleted file mode 100644 index 59ee878d4544..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/Operation.java +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console; - -import java.util.HashMap; -import java.util.Map; - -/** - * Map for console operation to virtual key bindings. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @see java.awt.event.KeyEvent - * @since 2.0 - */ -public enum Operation -{ - /** - * Unknown operation. - */ - UNKNOWN(-99), - - /** - * Operation that moves to the beginning of the buffer. - */ - MOVE_TO_BEG(-1), - - /** - * Operation that moves to the end of the buffer. - */ - MOVE_TO_END(-3), - - /** - * Operation that moved to the previous character in the buffer. - */ - PREV_CHAR(-4), - - /** - * Operation that issues a newline. - */ - NEWLINE(-6), - - /** - * Operation that deletes the buffer from the current character to the end. - */ - KILL_LINE(-7), - - /** - * Operation that clears the screen. - */ - CLEAR_SCREEN(-8), - - /** - * Operation that sets the buffer to the next history item. - */ - NEXT_HISTORY(-9), - - /** - * Operation that sets the buffer to the previous history item. - */ - PREV_HISTORY(-11), - - /** - * Operation that redisplays the current buffer. - */ - REDISPLAY(-13), - - /** - * Operation that deletes the buffer from the cursor to the beginning. - */ - KILL_LINE_PREV(-15), - - /** - * Operation that deletes the previous word in the buffer. - */ - DELETE_PREV_WORD(-16), - - /** - * Operation that moves to the next character in the buffer. - */ - NEXT_CHAR(-19), - - /** - * Operation that moves to the previous character in the buffer. - */ - REPEAT_PREV_CHAR(-20), - - /** - * Operation that searches backwards in the command history. - */ - SEARCH_PREV(-21), - - /** - * Operation that repeats the character. - */ - REPEAT_NEXT_CHAR(-24), - - /** - * Operation that searches forward in the command history. - */ - SEARCH_NEXT(-25), - - /** - * Operation that moved to the previous whitespace. - */ - PREV_SPACE_WORD(-27), - - /** - * Operation that moved to the end of the current word. - */ - TO_END_WORD(-29), - - /** - * Operation that - */ - REPEAT_SEARCH_PREV(-34), - - /** - * Operation that - */ - PASTE_PREV(-36), - - /** - * Operation that - */ - REPLACE_MODE(-37), - - /** - * Operation that - */ - SUBSTITUTE_LINE(-38), - - /** - * Operation that - */ - TO_PREV_CHAR(-39), - - /** - * Operation that - */ - NEXT_SPACE_WORD(-40), - - /** - * Operation that - */ - DELETE_PREV_CHAR(-41), - - /** - * Operation that - */ - ADD(-42), - - /** - * Operation that - */ - PREV_WORD(-43), - - /** - * Operation that - */ - CHANGE_META(-44), - - /** - * Operation that - */ - DELETE_META(-45), - - /** - * Operation that - */ - END_WORD(-46), - - /** - * Operation that toggles insert/overtype - */ - INSERT(-48), - - /** - * Operation that - */ - REPEAT_SEARCH_NEXT(-49), - - /** - * Operation that - */ - PASTE_NEXT(-50), - - /** - * Operation that - */ - REPLACE_CHAR(-51), - - /** - * Operation that - */ - SUBSTITUTE_CHAR(-52), - - /** - * Operation that - */ - TO_NEXT_CHAR(-53), - - /** - * Operation that undoes the previous operation. - */ - UNDO(-54), - - /** - * Operation that moved to the next word. - */ - NEXT_WORD(-55), - - /** - * Operation that deletes the previous character. - */ - DELETE_NEXT_CHAR(-56), - - /** - * Operation that toggles between uppercase and lowercase. - */ - CHANGE_CASE(-57), - - /** - * Operation that performs completion operation on the current word. - */ - COMPLETE(-58), - - /** - * Operation that exits the command prompt. - */ - EXIT(-59), - - /** - * Operation that pastes the contents of the clipboard into the line - */ - PASTE(-60), - - /** - * Operation that moves the current History to the beginning. - */ - START_OF_HISTORY(-61), - - /** - * Operation that moves the current History to the end. - */ - END_OF_HISTORY(-62), - - /** - * Operation that clears whatever text is on the current line. - */ - CLEAR_LINE(-63), - - /** - * Cancel search - */ - ABORT(-64), - - /** - * Delete next word - */ - DELETE_NEXT_WORD(-65), - - ; - - public final short code; - - Operation(final int code) { - this.code = (short) code; - } - - private static final Map codes; - - static { - Map map = new HashMap(); - - for (Operation op : Operation.values()) { - map.put(op.code, op); - } - - codes = map; - } - - public static Operation valueOf(final int code) { - return codes.get((short) code); - } -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/AggregateCompleter.java b/src/jline/src/main/java/scala/tools/jline/console/completer/AggregateCompleter.java deleted file mode 100644 index 3170bd1c6883..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/completer/AggregateCompleter.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package scala.tools.jline.console.completer; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.LinkedList; -import java.util.List; - -/** - * Completer which contains multiple completers and aggregates them together. - * - * @author Jason Dillon - * @since 2.3 - */ -public class AggregateCompleter - implements Completer -{ - private final List completers = new ArrayList(); - - public AggregateCompleter() { - // empty - } - - public AggregateCompleter(final Collection completers) { - assert completers != null; - this.completers.addAll(completers); - } - - public AggregateCompleter(final Completer... completers) { - this(Arrays.asList(completers)); - } - - public Collection getCompleters() { - return completers; - } - - public int complete(final String buffer, final int cursor, final List candidates) { - // buffer could be null - assert candidates != null; - - List completions = new ArrayList(completers.size()); - - // Run each completer, saving its completion results - int max = -1; - for (Completer completer : completers) { - Completion completion = new Completion(candidates); - completion.complete(completer, buffer, cursor); - - // Compute the max cursor position - max = Math.max(max, completion.cursor); - - completions.add(completion); - } - - // Append candidates from completions which have the same cursor position as max - for (Completion completion : completions) { - if (completion.cursor == max) { - candidates.addAll(completion.candidates); - } - } - - return max; - } - - @Override - public String toString() { - return getClass().getSimpleName() + "{" + - "completers=" + completers + - '}'; - } - - private class Completion - { - public final List candidates; - - public int cursor; - - public Completion(final List candidates) { - assert candidates != null; - this.candidates = new LinkedList(candidates); - } - - public void complete(final Completer completer, final String buffer, final int cursor) { - assert completer != null; - - this.cursor = completer.complete(buffer, cursor, candidates); - } - } -} diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/ArgumentCompleter.java b/src/jline/src/main/java/scala/tools/jline/console/completer/ArgumentCompleter.java deleted file mode 100644 index 6f60029a1d85..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/completer/ArgumentCompleter.java +++ /dev/null @@ -1,398 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console.completer; - -import scala.tools.jline.internal.Log; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.LinkedList; -import java.util.List; - -/** - * A {@link Completer} implementation that invokes a child completer using the appropriate separator argument. - * This can be used instead of the individual completers having to know about argument parsing semantics. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @since 2.3 - */ -public class ArgumentCompleter - implements Completer -{ - private final ArgumentDelimiter delimiter; - - private final List completers = new ArrayList(); - - private boolean strict = true; - - /** - * Create a new completer with the specified argument delimiter. - * - * @param delimiter The delimiter for parsing arguments - * @param completers The embedded completers - */ - public ArgumentCompleter(final ArgumentDelimiter delimiter, final Collection completers) { - assert delimiter != null; - this.delimiter = delimiter; - assert completers != null; - this.completers.addAll(completers); - } - - /** - * Create a new completer with the specified argument delimiter. - * - * @param delimiter The delimiter for parsing arguments - * @param completers The embedded completers - */ - public ArgumentCompleter(final ArgumentDelimiter delimiter, final Completer... completers) { - this(delimiter, Arrays.asList(completers)); - } - - /** - * Create a new completer with the default {@link WhitespaceArgumentDelimiter}. - * - * @param completers The embedded completers - */ - public ArgumentCompleter(final Completer... completers) { - this(new WhitespaceArgumentDelimiter(), completers); - } - - /** - * Create a new completer with the default {@link WhitespaceArgumentDelimiter}. - * - * @param completers The embedded completers - */ - public ArgumentCompleter(final List completers) { - this(new WhitespaceArgumentDelimiter(), completers); - } - - /** - * If true, a completion at argument index N will only succeed - * if all the completions from 0-(N-1) also succeed. - */ - public void setStrict(final boolean strict) { - this.strict = strict; - } - - /** - * Returns whether a completion at argument index N will success - * if all the completions from arguments 0-(N-1) also succeed. - * - * @return True if strict. - * @since 2.3 - */ - public boolean isStrict() { - return this.strict; - } - - /** - * @since 2.3 - */ - public ArgumentDelimiter getDelimiter() { - return delimiter; - } - - /** - * @since 2.3 - */ - public List getCompleters() { - return completers; - } - - public int complete(final String buffer, final int cursor, final List candidates) { - // buffer can be null - assert candidates != null; - - ArgumentDelimiter delim = getDelimiter(); - ArgumentList list = delim.delimit(buffer, cursor); - int argpos = list.getArgumentPosition(); - int argIndex = list.getCursorArgumentIndex(); - - if (argIndex < 0) { - return -1; - } - - List completers = getCompleters(); - Completer completer; - - // if we are beyond the end of the completers, just use the last one - if (argIndex >= completers.size()) { - completer = completers.get(completers.size() - 1); - } - else { - completer = completers.get(argIndex); - } - - // ensure that all the previous completers are successful before allowing this completer to pass (only if strict). - for (int i = 0; isStrict() && (i < argIndex); i++) { - Completer sub = completers.get(i >= completers.size() ? (completers.size() - 1) : i); - String[] args = list.getArguments(); - String arg = (args == null || i >= args.length) ? "" : args[i]; - - List subCandidates = new LinkedList(); - - if (sub.complete(arg, arg.length(), subCandidates) == -1) { - return -1; - } - - if (subCandidates.size() == 0) { - return -1; - } - } - - int ret = completer.complete(list.getCursorArgument(), argpos, candidates); - - if (ret == -1) { - return -1; - } - - int pos = ret + list.getBufferPosition() - argpos; - - // Special case: when completing in the middle of a line, and the area under the cursor is a delimiter, - // then trim any delimiters from the candidates, since we do not need to have an extra delimiter. - // - // E.g., if we have a completion for "foo", and we enter "f bar" into the buffer, and move to after the "f" - // and hit TAB, we want "foo bar" instead of "foo bar". - - if ((cursor != buffer.length()) && delim.isDelimiter(buffer, cursor)) { - for (int i = 0; i < candidates.size(); i++) { - CharSequence val = candidates.get(i); - - while (val.length() > 0 && delim.isDelimiter(val, val.length() - 1)) { - val = val.subSequence(0, val.length() - 1); - } - - candidates.set(i, val); - } - } - - Log.trace("Completing ", buffer, " (pos=", cursor, ") with: ", candidates, ": offset=", pos); - - return pos; - } - - /** - * The {@link ArgumentCompleter.ArgumentDelimiter} allows custom breaking up of a {@link String} into individual - * arguments in order to dispatch the arguments to the nested {@link Completer}. - * - * @author Marc Prud'hommeaux - */ - public static interface ArgumentDelimiter - { - /** - * Break the specified buffer into individual tokens that can be completed on their own. - * - * @param buffer The buffer to split - * @param pos The current position of the cursor in the buffer - * @return The tokens - */ - ArgumentList delimit(CharSequence buffer, int pos); - - /** - * Returns true if the specified character is a whitespace parameter. - * - * @param buffer The complete command buffer - * @param pos The index of the character in the buffer - * @return True if the character should be a delimiter - */ - boolean isDelimiter(CharSequence buffer, int pos); - } - - /** - * Abstract implementation of a delimiter that uses the {@link #isDelimiter} method to determine if a particular - * character should be used as a delimiter. - * - * @author Marc Prud'hommeaux - */ - public abstract static class AbstractArgumentDelimiter - implements ArgumentDelimiter - { - // TODO: handle argument quoting and escape characters - - private char[] quoteChars = {'\'', '"'}; - - private char[] escapeChars = {'\\'}; - - public void setQuoteChars(final char[] chars) { - this.quoteChars = chars; - } - - public char[] getQuoteChars() { - return this.quoteChars; - } - - public void setEscapeChars(final char[] chars) { - this.escapeChars = chars; - } - - public char[] getEscapeChars() { - return this.escapeChars; - } - - public ArgumentList delimit(final CharSequence buffer, final int cursor) { - List args = new LinkedList(); - StringBuilder arg = new StringBuilder(); - int argpos = -1; - int bindex = -1; - - for (int i = 0; (buffer != null) && (i <= buffer.length()); i++) { - // once we reach the cursor, set the - // position of the selected index - if (i == cursor) { - bindex = args.size(); - // the position in the current argument is just the - // length of the current argument - argpos = arg.length(); - } - - if ((i == buffer.length()) || isDelimiter(buffer, i)) { - if (arg.length() > 0) { - args.add(arg.toString()); - arg.setLength(0); // reset the arg - } - } - else { - arg.append(buffer.charAt(i)); - } - } - - return new ArgumentList(args.toArray(new String[args.size()]), bindex, argpos, cursor); - } - - /** - * Returns true if the specified character is a whitespace parameter. Check to ensure that the character is not - * escaped by any of {@link #getQuoteChars}, and is not escaped by ant of the {@link #getEscapeChars}, and - * returns true from {@link #isDelimiterChar}. - * - * @param buffer The complete command buffer - * @param pos The index of the character in the buffer - * @return True if the character should be a delimiter - */ - public boolean isDelimiter(final CharSequence buffer, final int pos) { - return !isQuoted(buffer, pos) && !isEscaped(buffer, pos) && isDelimiterChar(buffer, pos); - } - - public boolean isQuoted(final CharSequence buffer, final int pos) { - return false; - } - - public boolean isEscaped(final CharSequence buffer, final int pos) { - if (pos <= 0) { - return false; - } - - for (int i = 0; (escapeChars != null) && (i < escapeChars.length); - i++) { - if (buffer.charAt(pos) == escapeChars[i]) { - return !isEscaped(buffer, pos - 1); // escape escape - } - } - - return false; - } - - /** - * Returns true if the character at the specified position if a delimiter. This method will only be called if - * the character is not enclosed in any of the {@link #getQuoteChars}, and is not escaped by ant of the - * {@link #getEscapeChars}. To perform escaping manually, override {@link #isDelimiter} instead. - */ - public abstract boolean isDelimiterChar(CharSequence buffer, int pos); - } - - /** - * {@link ArgumentCompleter.ArgumentDelimiter} implementation that counts all whitespace (as reported by - * {@link Character#isWhitespace}) as being a delimiter. - * - * @author Marc Prud'hommeaux - */ - public static class WhitespaceArgumentDelimiter - extends AbstractArgumentDelimiter - { - /** - * The character is a delimiter if it is whitespace, and the - * preceding character is not an escape character. - */ - @Override - public boolean isDelimiterChar(final CharSequence buffer, final int pos) { - return Character.isWhitespace(buffer.charAt(pos)); - } - } - - /** - * The result of a delimited buffer. - * - * @author Marc Prud'hommeaux - */ - public static class ArgumentList - { - private String[] arguments; - - private int cursorArgumentIndex; - - private int argumentPosition; - - private int bufferPosition; - - /** - * @param arguments The array of tokens - * @param cursorArgumentIndex The token index of the cursor - * @param argumentPosition The position of the cursor in the current token - * @param bufferPosition The position of the cursor in the whole buffer - */ - public ArgumentList(final String[] arguments, final int cursorArgumentIndex, final int argumentPosition, final int bufferPosition) { - assert arguments != null; - - this.arguments = arguments; - this.cursorArgumentIndex = cursorArgumentIndex; - this.argumentPosition = argumentPosition; - this.bufferPosition = bufferPosition; - } - - public void setCursorArgumentIndex(final int i) { - this.cursorArgumentIndex = i; - } - - public int getCursorArgumentIndex() { - return this.cursorArgumentIndex; - } - - public String getCursorArgument() { - if ((cursorArgumentIndex < 0) || (cursorArgumentIndex >= arguments.length)) { - return null; - } - - return arguments[cursorArgumentIndex]; - } - - public void setArgumentPosition(final int pos) { - this.argumentPosition = pos; - } - - public int getArgumentPosition() { - return this.argumentPosition; - } - - public void setArguments(final String[] arguments) { - this.arguments = arguments; - } - - public String[] getArguments() { - return this.arguments; - } - - public void setBufferPosition(final int pos) { - this.bufferPosition = pos; - } - - public int getBufferPosition() { - return this.bufferPosition; - } - } -} diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/CandidateListCompletionHandler.java b/src/jline/src/main/java/scala/tools/jline/console/completer/CandidateListCompletionHandler.java deleted file mode 100644 index fa5bfd277777..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/completer/CandidateListCompletionHandler.java +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console.completer; - -import scala.tools.jline.console.ConsoleReader; -import scala.tools.jline.console.CursorBuffer; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.ResourceBundle; -import java.util.Set; - -/** - * A {@link CompletionHandler} that deals with multiple distinct completions - * by outputting the complete list of possibilities to the console. This - * mimics the behavior of the - * readline library. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @since 2.3 - */ -public class CandidateListCompletionHandler - implements CompletionHandler -{ - // TODO: handle quotes and escaped quotes && enable automatic escaping of whitespace - - public boolean complete(final ConsoleReader reader, final List candidates, final int pos) throws - IOException - { - CursorBuffer buf = reader.getCursorBuffer(); - - // if there is only one completion, then fill in the buffer - if (candidates.size() == 1) { - CharSequence value = candidates.get(0); - - // fail if the only candidate is the same as the current buffer - if (value.equals(buf.toString())) { - return false; - } - - setBuffer(reader, value, pos); - - return true; - } - else if (candidates.size() > 1) { - String value = getUnambiguousCompletions(candidates); - setBuffer(reader, value, pos); - } - - printCandidates(reader, candidates); - - // redraw the current console buffer - reader.drawLine(); - - return true; - } - - public static void setBuffer(final ConsoleReader reader, final CharSequence value, final int offset) throws - IOException - { - while ((reader.getCursorBuffer().cursor > offset) && reader.backspace()) { - // empty - } - - reader.putString(value); - reader.setCursorPosition(offset + value.length()); - } - - /** - * Print out the candidates. If the size of the candidates is greater than the - * {@link ConsoleReader#getAutoprintThreshold}, they prompt with a warning. - * - * @param candidates the list of candidates to print - */ - public static void printCandidates(final ConsoleReader reader, Collection candidates) throws - IOException - { - Set distinct = new HashSet(candidates); - - if (distinct.size() > reader.getAutoprintThreshold()) { - //noinspection StringConcatenation - reader.print(Messages.DISPLAY_CANDIDATES.format(candidates.size())); - reader.flush(); - - int c; - - String noOpt = Messages.DISPLAY_CANDIDATES_NO.format(); - String yesOpt = Messages.DISPLAY_CANDIDATES_YES.format(); - char[] allowed = {yesOpt.charAt(0), noOpt.charAt(0)}; - - while ((c = reader.readCharacter(allowed)) != -1) { - String tmp = new String(new char[]{(char) c}); - - if (noOpt.startsWith(tmp)) { - reader.println(); - return; - } - else if (yesOpt.startsWith(tmp)) { - break; - } - else { - reader.beep(); - } - } - } - - // copy the values and make them distinct, without otherwise affecting the ordering. Only do it if the sizes differ. - if (distinct.size() != candidates.size()) { - Collection copy = new ArrayList(); - - for (CharSequence next : candidates) { - if (!copy.contains(next)) { - copy.add(next); - } - } - - candidates = copy; - } - - reader.println(); - reader.printColumns(candidates); - } - - /** - * Returns a root that matches all the {@link String} elements of the specified {@link List}, - * or null if there are no commonalities. For example, if the list contains - * foobar, foobaz, foobuz, the method will return foob. - */ - private String getUnambiguousCompletions(final List candidates) { - if (candidates == null || candidates.isEmpty()) { - return null; - } - - // convert to an array for speed - String[] strings = candidates.toArray(new String[candidates.size()]); - - String first = strings[0]; - StringBuilder candidate = new StringBuilder(); - - for (int i = 0; i < first.length(); i++) { - if (startsWith(first.substring(0, i + 1), strings)) { - candidate.append(first.charAt(i)); - } - else { - break; - } - } - - return candidate.toString(); - } - - /** - * @return true is all the elements of candidates start with starts - */ - private boolean startsWith(final String starts, final String[] candidates) { - for (String candidate : candidates) { - if (!candidate.startsWith(starts)) { - return false; - } - } - - return true; - } - - private static enum Messages - { - DISPLAY_CANDIDATES, - DISPLAY_CANDIDATES_YES, - DISPLAY_CANDIDATES_NO,; - - private static final - ResourceBundle - bundle = - ResourceBundle.getBundle(CandidateListCompletionHandler.class.getName(), Locale.getDefault()); - - public String format(final Object... args) { - if (bundle == null) - return ""; - else - return String.format(bundle.getString(name()), args); - } - } -} diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/Completer.java b/src/jline/src/main/java/scala/tools/jline/console/completer/Completer.java deleted file mode 100644 index 52d33847f28d..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/completer/Completer.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console.completer; - -import java.util.List; - -/** - * A completer is the mechanism by which tab-completion candidates will be resolved. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @since 2.3 - */ -public interface Completer -{ - // - // FIXME: Check if we can use CharSequece for buffer? - // - - /** - * Populates candidates with a list of possible completions for the buffer. - * - * The candidates list will not be sorted before being displayed to the user: thus, the - * complete method should sort the {@link List} before returning. - * - * @param buffer The buffer - * @param cursor The current position of the cursor in the buffer - * @param candidates The {@link List} of candidates to populate - * @return The index of the buffer for which the completion will be relative - */ - int complete(String buffer, int cursor, List candidates); -} diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/CompletionHandler.java b/src/jline/src/main/java/scala/tools/jline/console/completer/CompletionHandler.java deleted file mode 100644 index 030dc84205ce..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/completer/CompletionHandler.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console.completer; - -import scala.tools.jline.console.ConsoleReader; - -import java.io.IOException; -import java.util.List; - -/** - * Handler for dealing with candidates for tab-completion. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @since 2.3 - */ -public interface CompletionHandler -{ - boolean complete(ConsoleReader reader, List candidates, int position) throws IOException; -} diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/EnumCompleter.java b/src/jline/src/main/java/scala/tools/jline/console/completer/EnumCompleter.java deleted file mode 100644 index 5ad049b857a3..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/completer/EnumCompleter.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (C) 2009 the original author(s). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package scala.tools.jline.console.completer; - -/** - * {@link Completer} for {@link Enum} names. - * - * @author Jason Dillon - * @since 2.3 - */ -public class EnumCompleter - extends StringsCompleter -{ - public EnumCompleter(Class source) { - assert source != null; - - for (Enum n : source.getEnumConstants()) { - this.getStrings().add(n.name().toLowerCase()); - } - } -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/FileNameCompleter.java b/src/jline/src/main/java/scala/tools/jline/console/completer/FileNameCompleter.java deleted file mode 100644 index 655613876999..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/completer/FileNameCompleter.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console.completer; - -import scala.tools.jline.internal.Configuration; - -import java.io.File; -import java.util.List; - -/** - * A file name completer takes the buffer and issues a list of - * potential completions. - *

    - * This completer tries to behave as similar as possible to - * bash's file name completion (using GNU readline) - * with the following exceptions: - *

    - *

      - *
    • Candidates that are directories will end with "/"
    • - *
    • Wildcard regular expressions are not evaluated or replaced
    • - *
    • The "~" character can be used to represent the user's home, - * but it cannot complete to other users' homes, since java does - * not provide any way of determining that easily
    • - *
    - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @since 2.3 - */ -public class FileNameCompleter - implements Completer -{ - // TODO: Handle files with spaces in them - - private static final boolean OS_IS_WINDOWS; - - static { - String os = Configuration.getOsName(); - OS_IS_WINDOWS = os.contains("windows"); - } - - public int complete(String buffer, final int cursor, final List candidates) { - // buffer can be null - assert candidates != null; - - if (buffer == null) { - buffer = ""; - } - - if (OS_IS_WINDOWS) { - buffer = buffer.replace('/', '\\'); - } - - String translated = buffer; - - File homeDir = getUserHome(); - - // Special character: ~ maps to the user's home directory - if (translated.startsWith("~" + separator())) { - translated = homeDir.getPath() + translated.substring(1); - } - else if (translated.startsWith("~")) { - translated = homeDir.getParentFile().getAbsolutePath(); - } - else if (!(translated.startsWith(separator()))) { - String cwd = getUserDir().getAbsolutePath(); - translated = cwd + separator() + translated; - } - - File file = new File(translated); - final File dir; - - if (translated.endsWith(separator())) { - dir = file; - } - else { - dir = file.getParentFile(); - } - - File[] entries = dir == null ? new File[0] : dir.listFiles(); - - return matchFiles(buffer, translated, entries, candidates); - } - - protected String separator() { - return File.separator; - } - - protected File getUserHome() { - return Configuration.getUserHome(); - } - - protected File getUserDir() { - return new File("."); - } - - protected int matchFiles(final String buffer, final String translated, final File[] files, final List candidates) { - if (files == null) { - return -1; - } - - int matches = 0; - - // first pass: just count the matches - for (File file : files) { - if (file.getAbsolutePath().startsWith(translated)) { - matches++; - } - } - for (File file : files) { - if (file.getAbsolutePath().startsWith(translated)) { - CharSequence name = file.getName() + (matches == 1 && file.isDirectory() ? separator() : " "); - candidates.add(render(file, name).toString()); - } - } - - final int index = buffer.lastIndexOf(separator()); - - return index + separator().length(); - } - - protected CharSequence render(final File file, final CharSequence name) { - assert file != null; - assert name != null; - - return name; - } -} diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/NullCompleter.java b/src/jline/src/main/java/scala/tools/jline/console/completer/NullCompleter.java deleted file mode 100644 index 93cf563bcd10..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/completer/NullCompleter.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package scala.tools.jline.console.completer; - -import java.util.List; - -/** - * Null completer. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @since 2.3 - */ -public final class NullCompleter - implements Completer -{ - public static final NullCompleter INSTANCE = new NullCompleter(); - - public int complete(final String buffer, final int cursor, final List candidates) { - return -1; - } -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/StringsCompleter.java b/src/jline/src/main/java/scala/tools/jline/console/completer/StringsCompleter.java deleted file mode 100644 index 2abfdd03406d..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/completer/StringsCompleter.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package scala.tools.jline.console.completer; - -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.SortedSet; -import java.util.TreeSet; - -/** - * Completer for a set of strings. - * - * @author Jason Dillon - * @since 2.3 - */ -public class StringsCompleter - implements Completer -{ - private final SortedSet strings = new TreeSet(); - - public StringsCompleter() { - // empty - } - - public StringsCompleter(final Collection strings) { - assert strings != null; - getStrings().addAll(strings); - } - - public StringsCompleter(final String... strings) { - this(Arrays.asList(strings)); - } - - public Collection getStrings() { - return strings; - } - - public int complete(final String buffer, final int cursor, final List candidates) { - // buffer could be null - assert candidates != null; - - if (buffer == null) { - candidates.addAll(strings); - } - else { - for (String match : strings.tailSet(buffer)) { - if (!match.startsWith(buffer)) { - break; - } - - candidates.add(match); - } - } - - if (candidates.size() == 1) { - candidates.set(0, candidates.get(0) + " "); - } - - return candidates.isEmpty() ? -1 : 0; - } -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/package-info.java b/src/jline/src/main/java/scala/tools/jline/console/completer/package-info.java deleted file mode 100644 index 8150710cfcaa..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/completer/package-info.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (C) 2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Console completer support. - * - * @since 2.3 - */ -package scala.tools.jline.console.completer; \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/console/history/FileHistory.java b/src/jline/src/main/java/scala/tools/jline/console/history/FileHistory.java deleted file mode 100644 index 5eccba3ce51b..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/history/FileHistory.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console.history; - -import scala.tools.jline.internal.Log; - -import java.io.BufferedOutputStream; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileOutputStream; -import java.io.FileReader; -import java.io.Flushable; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.PrintStream; -import java.io.Reader; -import java.util.ListIterator; - -/** - * {@link History} using a file for persistent backing. - *

    - * Implementers should install shutdown hook to call {@link FileHistory#flush} - * to save history to disk. - * - * @author Jason Dillon - * @since 2.0 - */ -public class FileHistory - extends MemoryHistory - implements PersistentHistory, Flushable -{ - private final File file; - - public FileHistory(final File file) throws IOException { - assert file != null; - this.file = file; - load(file); - } - - public File getFile() { - return file; - } - - public void load(final File file) throws IOException { - assert file != null; - if (file.exists()) { - Log.trace("Loading history from: ", file); - load(new FileReader(file)); - } - } - - public void load(final InputStream input) throws IOException { - assert input != null; - load(new InputStreamReader(input)); - } - - public void load(final Reader reader) throws IOException { - assert reader != null; - BufferedReader input = new BufferedReader(reader); - - String item; - while ((item = input.readLine()) != null) { - add(item); - } - } - - public void flush() throws IOException { - Log.trace("Flushing history"); - - if (!file.exists()) { - File dir = file.getParentFile(); - if (!dir.exists() && !dir.mkdirs()) { - Log.warn("Failed to create directory: ", dir); - } - if (!file.createNewFile()) { - Log.warn("Failed to create file: ", file); - } - } - - PrintStream out = new PrintStream(new BufferedOutputStream(new FileOutputStream(file))); - try { - for (Entry entry : this) { - out.println(entry.value()); - } - } - finally { - out.close(); - } - } - - public void purge() throws IOException { - Log.trace("Purging history"); - - clear(); - - if (!file.delete()) { - Log.warn("Failed to delete history file: ", file); - } - } -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/console/history/History.java b/src/jline/src/main/java/scala/tools/jline/console/history/History.java deleted file mode 100644 index d8602f215056..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/history/History.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console.history; - -import java.util.Iterator; -import java.util.ListIterator; - -/** - * Console history. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @since 2.3 - */ -public interface History - extends Iterable -{ - int size(); - - boolean isEmpty(); - - int index(); - - void clear(); - - CharSequence get(int index); - - void add(CharSequence line); - - void replace(CharSequence item); - - // - // Entries - // - - interface Entry - { - int index(); - - CharSequence value(); - } - - ListIterator entries(int index); - - ListIterator entries(); - - Iterator iterator(); - - // - // Navigation - // - - CharSequence current(); - - boolean previous(); - - boolean next(); - - boolean moveToFirst(); - - boolean moveToLast(); - - boolean moveTo(int index); - - void moveToEnd(); -} diff --git a/src/jline/src/main/java/scala/tools/jline/console/history/MemoryHistory.java b/src/jline/src/main/java/scala/tools/jline/console/history/MemoryHistory.java deleted file mode 100644 index 3af936428a38..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/history/MemoryHistory.java +++ /dev/null @@ -1,318 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console.history; - -import java.util.Iterator; -import java.util.LinkedList; -import java.util.ListIterator; -import java.util.NoSuchElementException; - -/** - * Non-persistent {@link History}. - * - * @author Marc Prud'hommeaux - * @author Jason Dillon - * @since 2.3 - */ -public class MemoryHistory - implements History -{ - public static final int DEFAULT_MAX_SIZE = 500; - - private final LinkedList items = new LinkedList(); - - private int maxSize = DEFAULT_MAX_SIZE; - - private boolean ignoreDuplicates = true; - - private boolean autoTrim = false; - - // NOTE: These are all ideas from looking at the Bash man page: - - // TODO: Add ignore space? (lines starting with a space are ignored) - - // TODO: Add ignore patterns? - - // TODO: Add history timestamp? - - // TODO: Add erase dups? - - private int offset = 0; - - private int index = 0; - - public void setMaxSize(final int maxSize) { - this.maxSize = maxSize; - maybeResize(); - } - - public int getMaxSize() { - return maxSize; - } - - public boolean isIgnoreDuplicates() { - return ignoreDuplicates; - } - - public void setIgnoreDuplicates(final boolean flag) { - this.ignoreDuplicates = flag; - } - - public boolean isAutoTrim() { - return autoTrim; - } - - public void setAutoTrim(final boolean flag) { - this.autoTrim = flag; - } - - public int size() { - return items.size(); - } - - public boolean isEmpty() { - return items.isEmpty(); - } - - public int index() { - return offset + index; - } - - public void clear() { - items.clear(); - offset = 0; - index = 0; - } - - public CharSequence get(final int index) { - return items.get(index - offset); - } - - public void add(CharSequence item) { - assert item != null; - - if (isAutoTrim()) { - item = String.valueOf(item).trim(); - } - - if (isIgnoreDuplicates()) { - if (!items.isEmpty() && item.equals(items.getLast())) { - return; - } - } - - items.add(item); - - maybeResize(); - } - - public void replace(final CharSequence item) { - items.removeLast(); - add(item); - } - - private void maybeResize() { - while (size() > getMaxSize()) { - items.removeFirst(); - offset++; - } - - index = size(); - } - - public ListIterator entries(final int index) { - return new EntriesIterator(index - offset); - } - - public ListIterator entries() { - return entries(offset); - } - - public Iterator iterator() { - return entries(); - } - - private static class EntryImpl - implements Entry - { - private final int index; - - private final CharSequence value; - - public EntryImpl(int index, CharSequence value) { - this.index = index; - this.value = value; - } - - public int index() { - return index; - } - - public CharSequence value() { - return value; - } - - @Override - public String toString() { - return String.format("%d: %s", index, value); - } - } - - private class EntriesIterator - implements ListIterator - { - private final ListIterator source; - - private EntriesIterator(final int index) { - source = items.listIterator(index); - } - - public Entry next() { - if (!source.hasNext()) { - throw new NoSuchElementException(); - } - return new EntryImpl(offset + source.nextIndex(), source.next()); - } - - public Entry previous() { - if (!source.hasPrevious()) { - throw new NoSuchElementException(); - } - return new EntryImpl(offset + source.previousIndex(), source.previous()); - } - - public int nextIndex() { - return offset + source.nextIndex(); - } - - public int previousIndex() { - return offset + source.previousIndex(); - } - - public boolean hasNext() { - return source.hasNext(); - } - - public boolean hasPrevious() { - return source.hasPrevious(); - } - - public void remove() { - throw new UnsupportedOperationException(); - } - - public void set(final Entry entry) { - throw new UnsupportedOperationException(); - } - - public void add(final Entry entry) { - throw new UnsupportedOperationException(); - } - } - - // - // Navigation - // - - /** - * This moves the history to the last entry. This entry is one position - * before the moveToEnd() position. - * - * @return Returns false if there were no history entries or the history - * index was already at the last entry. - */ - public boolean moveToLast() { - int lastEntry = size() - 1; - if (lastEntry >= 0 && lastEntry != index) { - index = size() - 1; - return true; - } - - return false; - } - - /** - * Move to the specified index in the history - * @param index - * @return - */ - public boolean moveTo(int index) { - index -= offset; - if (index >= 0 && index < size() ) { - this.index = index; - return true; - } - return false; - } - - /** - * Moves the history index to the first entry. - * - * @return Return false if there are no entries in the history or if the - * history is already at the beginning. - */ - public boolean moveToFirst() { - if (size() > 0 && index != 0) { - index = 0; - return true; - } - - return false; - } - - /** - * Move to the end of the history buffer. This will be a blank entry, after - * all of the other entries. - */ - public void moveToEnd() { - index = size(); - } - - /** - * Return the content of the current buffer. - */ - public CharSequence current() { - if (index >= size()) { - return ""; - } - - return items.get(index); - } - - /** - * Move the pointer to the previous element in the buffer. - * - * @return true if we successfully went to the previous element - */ - public boolean previous() { - if (index <= 0) { - return false; - } - - index--; - - return true; - } - - /** - * Move the pointer to the next element in the buffer. - * - * @return true if we successfully went to the next element - */ - public boolean next() { - if (index >= size()) { - return false; - } - - index++; - - return true; - } - - -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/console/history/PersistentHistory.java b/src/jline/src/main/java/scala/tools/jline/console/history/PersistentHistory.java deleted file mode 100644 index 916532e7fc7c..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/history/PersistentHistory.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.console.history; - -import java.io.IOException; - -/** - * Persistent {@link History}. - * - * @author Jason Dillon - * @since 2.3 - */ -public interface PersistentHistory - extends History -{ - /** - * Flush all items to persistent storage. - * - * @throws IOException Flush failed - */ - void flush() throws IOException; - - /** - * Purge persistent storage and {@link #clear}. - * - * @throws IOException Purge failed - */ - void purge() throws IOException; -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/console/history/package-info.java b/src/jline/src/main/java/scala/tools/jline/console/history/package-info.java deleted file mode 100644 index 4635752898b1..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/history/package-info.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (C) 2009 the original author(s). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Console history support. - * - * @since 2.0 - */ -package scala.tools.jline.console.history; \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/console/package-info.java b/src/jline/src/main/java/scala/tools/jline/console/package-info.java deleted file mode 100644 index 9f284e9c05c1..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/console/package-info.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (C) 2009 the original author(s). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Console support. - * - * @since 2.0 - */ -package scala.tools.jline.console; \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/internal/Configuration.java b/src/jline/src/main/java/scala/tools/jline/internal/Configuration.java deleted file mode 100644 index 5350d6c19edb..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/internal/Configuration.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package scala.tools.jline.internal; - -import java.io.BufferedInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.Properties; - -/** - * Provides access to configuration values. - * - * @author Jason Dillon - * @since 2.4 - */ -public final class Configuration -{ - public static final String JLINE_RC = ".jline.rc"; - - private static final Properties userprops; - - static { - Properties props = new Properties(); - - File file = new File(getUserHome(), JLINE_RC); - if (file.exists() && file.canRead()) { - try { - InputStream input = new BufferedInputStream(new FileInputStream(file)); - try { - props.load(input); - Log.debug("Loaded user configuration: ", file); - } - finally { - input.close(); - } - } - catch (IOException e) { - Log.warn("Unable to read user configuration: ", file, e); - } - } - else { - Log.trace("User configuration file missing or unreadable: ", file); - } - - userprops = props; - } - - private static boolean isEmpty(final String value) { - return value == null || value.trim().length() == 0; - } - - public static String getString(final String name, final String defaultValue) { - assert name != null; - - String value; - - // Check sysprops first, it always wins - value = System.getProperty(name); - - if (isEmpty(value)) { - // Next try userprops - value = userprops.getProperty(name); - - if (isEmpty(value)) { - // else use the default - value = defaultValue; - } - } - - return value; - } - - public static String getString(final String name) { - return getString(name, null); - } - - public static Boolean getBoolean(final String name, final Boolean defaultValue) { - String value = getString(name); - if (isEmpty(value)) { - return defaultValue; - } - return Boolean.valueOf(value); - } - - public static Boolean getBoolean(final String name) { - return getBoolean(name, null); - } - - // - // System property helpers - // - - public static File getUserHome() { - return new File(System.getProperty("user.home")); - } - - public static String getOsName() { - return System.getProperty("os.name").toLowerCase(); - } - - public static String getFileEncoding() { - return System.getProperty("file.encoding"); - } - - public static String getInputEncoding() { - return System.getProperty("input.encoding", "UTF-8"); - } -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/internal/Log.java b/src/jline/src/main/java/scala/tools/jline/internal/Log.java deleted file mode 100644 index b226a105325c..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/internal/Log.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package scala.tools.jline.internal; - -import java.io.PrintStream; - -/** - * Internal logger. - * - * @author Jason Dillon - * @since 2.0 - */ -public final class Log -{ - ///CLOVER:OFF - - public static enum Level - { - TRACE, - DEBUG, - INFO, - WARN, - ERROR - } - - @SuppressWarnings({"StringConcatenation"}) - public static final boolean DEBUG = Boolean.getBoolean(Log.class.getName() + ".debug"); - - @SuppressWarnings({"StringConcatenation"}) - public static final boolean TRACE = Boolean.getBoolean(Log.class.getName() + ".trace"); - - private static PrintStream output = System.err; - - public static PrintStream getOutput() { - return output; - } - - public static void setOutput(final PrintStream out) { - assert out != null; - output = out; - } - - private static void print(final Object message) { - if (message instanceof Throwable) { - ((Throwable) message).printStackTrace(); - } - else if (message.getClass().isArray()) { - Object[] array = (Object[]) message; - - for (int i = 0; i < array.length; i++) { - output.print(array[i]); - if (i + 1 < array.length) { - output.print(","); - } - } - } - else { - output.print(message); - } - } - - private static void log(final Level level, final Object[] messages) { - //noinspection SynchronizeOnNonFinalField - synchronized (output) { - output.format("[%s] ", level); - - for (Object message : messages) { - print(message); - } - - output.println(); - output.flush(); - } - } - - public static void trace(final Object... messages) { - if (TRACE) { - log(Level.TRACE, messages); - } - } - - public static void debug(final Object... messages) { - if (TRACE || DEBUG) { - log(Level.DEBUG, messages); - } - } - - public static void warn(final Object... messages) { - log(Level.WARN, messages); - } - - public static void error(final Object... messages) { - log(Level.ERROR, messages); - } -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/internal/ReplayPrefixOneCharInputStream.java b/src/jline/src/main/java/scala/tools/jline/internal/ReplayPrefixOneCharInputStream.java deleted file mode 100644 index 2adabdd2abd7..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/internal/ReplayPrefixOneCharInputStream.java +++ /dev/null @@ -1,95 +0,0 @@ -package scala.tools.jline.internal; - -import java.io.IOException; -import java.io.InputStream; -import java.text.MessageFormat; - -/** - * This is awkward and inefficient, but probably the minimal way to add UTF-8 support to JLine - * - * @author Marc Herbert - * @author Jason Dillon - * @since 2.0 - */ -public final class ReplayPrefixOneCharInputStream - extends InputStream -{ - private byte firstByte; - - private int byteLength; - - private InputStream wrappedStream; - - private int byteRead; - - private final String encoding; - - public ReplayPrefixOneCharInputStream(final String encoding) { - assert encoding != null; - this.encoding = encoding; - } - - public String getEncoding() { - return encoding; - } - - public void setInput(final int recorded, final InputStream wrapped) throws IOException { - this.byteRead = 0; - this.firstByte = (byte) recorded; - this.wrappedStream = wrapped; - - byteLength = 1; - if (encoding.equalsIgnoreCase("UTF-8")) { - setInputUTF8(recorded, wrapped); - } - else if (encoding.equalsIgnoreCase("UTF-16")) { - byteLength = 2; - } - else if (encoding.equalsIgnoreCase("UTF-32")) { - byteLength = 4; - } - } - - - public void setInputUTF8(final int recorded, final InputStream wrapped) throws IOException { - // 110yyyyy 10zzzzzz - if ((firstByte & (byte) 0xE0) == (byte) 0xC0) { - this.byteLength = 2; - } - // 1110xxxx 10yyyyyy 10zzzzzz - else if ((firstByte & (byte) 0xF0) == (byte) 0xE0) { - this.byteLength = 3; - } - // 11110www 10xxxxxx 10yyyyyy 10zzzzzz - else if ((firstByte & (byte) 0xF8) == (byte) 0xF0) { - this.byteLength = 4; - } - else { - throw new IOException(MessageFormat.format("Invalid UTF-8 first byte: {0}", firstByte)); - } - } - - public int read() throws IOException { - if (available() == 0) { - return -1; - } - - byteRead++; - - if (byteRead == 1) { - return firstByte; - } - - return wrappedStream.read(); - } - - /** - * InputStreamReader is greedy and will try to read bytes in advance. We - * do NOT want this to happen since we use a temporary/"losing bytes" - * InputStreamReader above, that's why we hide the real - * wrappedStream.available() here. - */ - public int available() { - return byteLength - byteRead; - } -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/internal/TerminalLineSettings.java b/src/jline/src/main/java/scala/tools/jline/internal/TerminalLineSettings.java deleted file mode 100644 index 151862c14d6c..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/internal/TerminalLineSettings.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ - -package scala.tools.jline.internal; - -import java.io.ByteArrayOutputStream; -import java.io.Closeable; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.text.MessageFormat; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Provides access to terminal line settings via stty. - * - * @author Marc Prud'hommeaux - * @author Dale Kemp - * @author Jason Dillon - * @author Jean-Baptiste Onofré - * @since 2.0 - */ -public final class TerminalLineSettings -{ - public static final String JLINE_STTY = "jline.stty"; - - public static final String DEFAULT_STTY = "stty"; - - public static final String JLINE_SH = "jline.sh"; - - public static final String DEFAULT_SH = "sh"; - - private static String sttyCommand = Configuration.getString(JLINE_STTY, DEFAULT_STTY); - - private static String shCommand = Configuration.getString(JLINE_SH, DEFAULT_SH); - - private String config; - - private long configLastFetched; - - public TerminalLineSettings() throws IOException, InterruptedException { - config = get("-a"); - configLastFetched = System.currentTimeMillis(); - - Log.debug("Config: ", config); - - // sanity check - if (config.length() == 0) { - throw new IOException(MessageFormat.format("Unrecognized stty code: {0}", config)); - } - } - - public String getConfig() { - return config; - } - - public void restore() throws IOException, InterruptedException { - set("sane"); - } - - public String get(final String args) throws IOException, InterruptedException { - return stty(args); - } - - public void set(final String args) throws IOException, InterruptedException { - stty(args); - } - - /** - *

    - * Get the value of a stty property, including the management of a cache. - *

    - * - * @param name the stty property. - * @return the stty property value. - */ - public int getProperty(String name) { - assert name != null; - try { - // tty properties are cached so we don't have to worry too much about getting term widht/height - if (config == null || System.currentTimeMillis() - configLastFetched > 1000 ) { - config = get("-a"); - configLastFetched = System.currentTimeMillis(); - } - return this.getProperty(name, config); - } catch (Exception e) { - Log.warn("Failed to query stty ", name, e); - return -1; - } - } - - /** - *

    - * Parses a stty output (provided by stty -a) and return the value of a given property. - *

    - * - * @param name property name. - * @param stty string resulting of stty -a execution. - * @return value of the given property. - */ - protected int getProperty(String name, String stty) { - // try the first kind of regex - Pattern pattern = Pattern.compile(name + "\\s+=\\s+([^;]*)[;\\n\\r]"); - Matcher matcher = pattern.matcher(stty); - if (!matcher.find()) { - // try a second kind of regex - pattern = Pattern.compile(name + "\\s+([^;]*)[;\\n\\r]"); - matcher = pattern.matcher(stty); - if (!matcher.find()) { - // try a second try of regex - pattern = Pattern.compile("(\\S*)\\s+" + name); - matcher = pattern.matcher(stty); - if (!matcher.find()) { - return -1; - } - } - } - return parseControlChar(matcher.group(1)); - } - - private int parseControlChar(String str) { - // under - if ("".equals(str)) { - return -1; - } - // octal - if (str.charAt(0) == '0') { - return Integer.parseInt(str, 8); - } - // decimal - if (str.charAt(0) >= '1' && str.charAt(0) <= '9') { - return Integer.parseInt(str, 10); - } - // control char - if (str.charAt(0) == '^') { - if (str.charAt(1) == '?') { - return 127; - } else { - return str.charAt(1) - 64; - } - } else if (str.charAt(0) == 'M' && str.charAt(1) == '-') { - if (str.charAt(2) == '^') { - if (str.charAt(3) == '?') { - return 127 + 128; - } else { - return str.charAt(3) - 64 + 128; - } - } else { - return str.charAt(2) + 128; - } - } else { - return str.charAt(0); - } - } - - private static String stty(final String args) throws IOException, InterruptedException { - assert args != null; - return exec(String.format("%s %s < /dev/tty", sttyCommand, args)); - } - - private static String exec(final String cmd) throws IOException, InterruptedException { - assert cmd != null; - return exec(shCommand, "-c", cmd); - } - - private static String exec(final String... cmd) throws IOException, InterruptedException { - assert cmd != null; - - ByteArrayOutputStream bout = new ByteArrayOutputStream(); - - Log.trace("Running: ", cmd); - - Process p = Runtime.getRuntime().exec(cmd); - - InputStream in = null; - InputStream err = null; - OutputStream out = null; - try { - int c; - in = p.getInputStream(); - while ((c = in.read()) != -1) { - bout.write(c); - } - err = p.getErrorStream(); - while ((c = err.read()) != -1) { - bout.write(c); - } - out = p.getOutputStream(); - p.waitFor(); - } - finally { - close(in, out, err); - } - - String result = bout.toString(); - - Log.trace("Result: ", result); - - return result; - } - - private static void close(final Closeable... closeables) { - for (Closeable c : closeables) { - try { - c.close(); - } - catch (Exception e) { - // Ignore - } - } - } -} \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/internal/package-info.java b/src/jline/src/main/java/scala/tools/jline/internal/package-info.java deleted file mode 100644 index d27444cfdfc5..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/internal/package-info.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (C) 2009 the original author(s). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Internal support. - * - * @since 2.0 - */ -package scala.tools.jline.internal; \ No newline at end of file diff --git a/src/jline/src/main/java/scala/tools/jline/package-info.java b/src/jline/src/main/java/scala/tools/jline/package-info.java deleted file mode 100644 index fde16f98dee6..000000000000 --- a/src/jline/src/main/java/scala/tools/jline/package-info.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (C) 2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * JLine 2. - * - * @since 2.0 - */ -package scala.tools.jline; \ No newline at end of file diff --git a/src/jline/src/main/resources/scala/tools/jline/console/completer/CandidateListCompletionHandler.properties b/src/jline/src/main/resources/scala/tools/jline/console/completer/CandidateListCompletionHandler.properties deleted file mode 100644 index fd097efb8a79..000000000000 --- a/src/jline/src/main/resources/scala/tools/jline/console/completer/CandidateListCompletionHandler.properties +++ /dev/null @@ -1,4 +0,0 @@ -DISPLAY_CANDIDATES=Display all %d possibilities? (y or n) -DISPLAY_CANDIDATES_YES=y -DISPLAY_CANDIDATES_NO=n -DISPLAY_MORE=--More-- diff --git a/src/jline/src/main/resources/scala/tools/jline/keybindings.properties b/src/jline/src/main/resources/scala/tools/jline/keybindings.properties deleted file mode 100644 index ad932d2a8039..000000000000 --- a/src/jline/src/main/resources/scala/tools/jline/keybindings.properties +++ /dev/null @@ -1,71 +0,0 @@ -# Keybinding mapping for JLine. The format is: -# [key code]=[logical operation] - -# CTRL-A: move to the beginning of the line -1=MOVE_TO_BEG - -# CTRL-B: move to the previous character -2=PREV_CHAR - -# CTRL-D: close out the input stream -4=EXIT - -# CTRL-E: move the cursor to the end of the line -5=MOVE_TO_END - -# CTRL-F: move to the next character -6=NEXT_CHAR - -# CTRL-G: abort -7=ABORT - -# BACKSPACE, CTRL-H: delete the previous character -# 8 is the ASCII code for backspace and therefor -# deleting the previous character -8=DELETE_PREV_CHAR - -# TAB, CTRL-I: signal that console completion should be attempted -9=COMPLETE - -# CTRL-J, CTRL-M: newline -10=NEWLINE - -# CTRL-K: erase the current line -11=KILL_LINE - -# CTRL-L: clear screen -12=CLEAR_SCREEN - -# ENTER: newline -13=NEWLINE - -# CTRL-N: scroll to the next element in the history buffer -14=NEXT_HISTORY - -# CTRL-O: move to the previous word -15=PREV_WORD - -# CTRL-P: scroll to the previous element in the history buffer -16=PREV_HISTORY - -# CTRL-R: search history -18=SEARCH_PREV - -# CTRL-T: move to next word -20=NEXT_WORD - -# CTRL-U: delete all the characters before the cursor position -21=KILL_LINE_PREV - -# CTRL-V: paste the contents of the clipboard (useful for Windows terminal) -22=PASTE - -# CTRL-W: delete the word directly before the cursor -23=DELETE_PREV_WORD - -# CTRL-X: delete the word directly after the cursor -24=DELETE_NEXT_WORD - -# DELETE, CTRL-?: delete the next character -# 127 is the ASCII code for delete -127=DELETE_NEXT_CHAR diff --git a/src/jline/src/main/resources/scala/tools/jline/windowsbindings.properties b/src/jline/src/main/resources/scala/tools/jline/windowsbindings.properties deleted file mode 100644 index 340b5aa5b98f..000000000000 --- a/src/jline/src/main/resources/scala/tools/jline/windowsbindings.properties +++ /dev/null @@ -1,71 +0,0 @@ -# Keybinding mapping for JLine. The format is: -# [key code]=[logical operation] - -# CTRL-A: move to the beginning of the line -1=MOVE_TO_BEG - -# CTRL-B: move to the previous character -2=PREV_CHAR - -# CTRL-C: toggle overtype mode (frankly, I wasn't sure where to bind this) -3=INSERT - -# CTRL-D: close out the input stream -4=EXIT - -# CTRL-E: move the cursor to the end of the line -5=MOVE_TO_END - -# CTRL-F: move to the next character -6=NEXT_CHAR - -# CTRL-G: move to the previous word -7=ABORT - -# CTRL-H: delete the previous character -8=DELETE_PREV_CHAR - -# TAB, CTRL-I: signal that console completion should be attempted -9=COMPLETE - -# CTRL-J, CTRL-M: newline -10=NEWLINE - -# CTRL-K: erase the current line -11=KILL_LINE - -# CTRL-L: clear screen -12=CLEAR_SCREEN - -# ENTER: newline -13=NEWLINE - -# CTRL-N: scroll to the next element in the history buffer -14=NEXT_HISTORY - -# CTRL-O: move to the previous word -15=PREV_WORD - -# CTRL-P: scroll to the previous element in the history buffer -16=PREV_HISTORY - -# CTRL-R: search backwards in history -18=SEARCH_PREV - -# CTRL-S: Move to the end of the history -19=END_OF_HISTORY - -# CTRL-U: delete all the characters before the cursor position -21=KILL_LINE_PREV - -# CTRL-V: paste the contents of the clipboard (useful for Windows terminal) -22=PASTE - -# CTRL-W: delete the word directly before the cursor -23=DELETE_PREV_WORD - -# CTRL-[: escape - clear the current line. -27=CLEAR_LINE - -# CTRL-?: delete the previous character -127=DELETE_NEXT_CHAR diff --git a/src/jline/src/test/java/scala/tools/jline/TerminalFactoryTest.java b/src/jline/src/test/java/scala/tools/jline/TerminalFactoryTest.java deleted file mode 100644 index c0c070bdfde7..000000000000 --- a/src/jline/src/test/java/scala/tools/jline/TerminalFactoryTest.java +++ /dev/null @@ -1,34 +0,0 @@ -package scala.tools.jline; - -import org.junit.Before; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -/** - * Tests for the {@link TerminalFactory}. - */ -public class TerminalFactoryTest -{ - @Before - public void setUp() throws Exception { - TerminalFactory.reset(); - } - - @Test - public void testConfigureNone() { - TerminalFactory.configure(TerminalFactory.NONE); - Terminal t = TerminalFactory.get(); - assertNotNull(t); - assertEquals(UnsupportedTerminal.class.getName(), t.getClass().getName()); - } - - @Test - public void testConfigureUnsupportedTerminal() { - TerminalFactory.configure(UnsupportedTerminal.class.getName()); - Terminal t = TerminalFactory.get(); - assertNotNull(t); - assertEquals(UnsupportedTerminal.class.getName(), t.getClass().getName()); - } -} \ No newline at end of file diff --git a/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTest.java b/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTest.java deleted file mode 100644 index 0e6cba15a0d8..000000000000 --- a/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTest.java +++ /dev/null @@ -1,261 +0,0 @@ -package scala.tools.jline.console; - -import scala.tools.jline.TerminalFactory; -import scala.tools.jline.WindowsTerminal; -import scala.tools.jline.console.history.History; -import scala.tools.jline.console.history.MemoryHistory; -import org.junit.Before; -import org.junit.Test; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.io.StringWriter; -import java.io.Writer; - -import static scala.tools.jline.WindowsTerminal.WindowsKey.DELETE_KEY; -import static scala.tools.jline.WindowsTerminal.WindowsKey.END_KEY; -import static scala.tools.jline.WindowsTerminal.WindowsKey.ESCAPE_KEY; -import static scala.tools.jline.WindowsTerminal.WindowsKey.HOME_KEY; -import static scala.tools.jline.WindowsTerminal.WindowsKey.INSERT_KEY; -import static scala.tools.jline.WindowsTerminal.WindowsKey.LEFT_ARROW_KEY; -import static scala.tools.jline.WindowsTerminal.WindowsKey.NUMPAD_KEY_INDICATOR; -import static scala.tools.jline.WindowsTerminal.WindowsKey.PAGE_DOWN_KEY; -import static scala.tools.jline.WindowsTerminal.WindowsKey.PAGE_UP_KEY; -import static scala.tools.jline.WindowsTerminal.WindowsKey.SPECIAL_KEY_INDICATOR; -import static scala.tools.jline.console.Operation.DELETE_NEXT_CHAR; -import static scala.tools.jline.console.Operation.DELETE_PREV_CHAR; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -/** - * Tests for the {@link ConsoleReader}. - */ -public class ConsoleReaderTest -{ - @Before - public void setUp() throws Exception { - System.setProperty(WindowsTerminal.JLINE_WINDOWS_TERMINAL_DIRECT_CONSOLE, "false"); - } - - private void assertWindowsKeyBehavior(String expected, char[] input) throws Exception { - StringBuilder buffer = new StringBuilder(); - buffer.append(input); - ConsoleReader reader = createConsole(buffer.toString().getBytes()); - assertNotNull(reader); - String line = reader.readLine(); - assertEquals(expected, line); - } - - private ConsoleReader createConsole(byte[] bytes) throws Exception { - InputStream in = new ByteArrayInputStream(bytes); - Writer writer = new StringWriter(); - ConsoleReader reader = new ConsoleReader(in, writer); - reader.setHistory(createSeededHistory()); - return reader; - } - - private History createSeededHistory() { - History history = new MemoryHistory(); - history.add("dir"); - history.add("cd c:\\"); - history.add("mkdir monkey"); - return history; - } - - @Test - public void testDeleteAndBackspaceKeymappings() throws Exception { - // test only works on Windows - if (!(TerminalFactory.get() instanceof WindowsTerminal)) { - return; - } - - ConsoleReader consoleReader = new ConsoleReader(); - assertNotNull(consoleReader); - assertEquals(127, consoleReader.getKeyForAction(DELETE_NEXT_CHAR)); - assertEquals(8, consoleReader.getKeyForAction(DELETE_PREV_CHAR)); - } - - @Test - public void testReadline() throws Exception { - ConsoleReader consoleReader = createConsole("Sample String\r\n".getBytes()); - assertNotNull(consoleReader); - String line = consoleReader.readLine(); - assertEquals("Sample String", line); - } - - @Test - public void testDeleteOnWindowsTerminal() throws Exception { - // test only works on Windows - if (!(TerminalFactory.get() instanceof WindowsTerminal)) { - return; - } - - char[] characters = new char[]{ - 'S', 's', - (char) SPECIAL_KEY_INDICATOR.code, - (char) LEFT_ARROW_KEY.code, - (char) SPECIAL_KEY_INDICATOR.code, - (char) DELETE_KEY.code, '\r', 'n' - }; - assertWindowsKeyBehavior("S", characters); - } - - @Test - public void testNumpadDeleteOnWindowsTerminal() throws Exception { - // test only works on Windows - if (!(TerminalFactory.get() instanceof WindowsTerminal)) { - return; - } - - char[] characters = new char[]{ - 'S', 's', - (char) NUMPAD_KEY_INDICATOR.code, - (char) LEFT_ARROW_KEY.code, - (char) NUMPAD_KEY_INDICATOR.code, - (char) DELETE_KEY.code, '\r', 'n' - }; - assertWindowsKeyBehavior("S", characters); - } - - @Test - public void testHomeKeyOnWindowsTerminal() throws Exception { - // test only works on Windows - if (!(TerminalFactory.get() instanceof WindowsTerminal)) { - return; - } - - char[] characters = new char[]{ - 'S', 's', - (char) SPECIAL_KEY_INDICATOR.code, - (char) HOME_KEY.code, 'x', '\r', '\n' - }; - assertWindowsKeyBehavior("xSs", characters); - - } - - @Test - public void testEndKeyOnWindowsTerminal() throws Exception { - // test only works on Windows - if (!(TerminalFactory.get() instanceof WindowsTerminal)) { - return; - } - - char[] characters = new char[]{ - 'S', 's', - (char) SPECIAL_KEY_INDICATOR.code, - (char) HOME_KEY.code, 'x', - (char) SPECIAL_KEY_INDICATOR.code, (char) END_KEY.code, - 'j', '\r', '\n' - }; - assertWindowsKeyBehavior("xSsj", characters); - } - - @Test - public void testPageUpOnWindowsTerminal() throws Exception { - // test only works on Windows - if (!(TerminalFactory.get() instanceof WindowsTerminal)) { - return; - } - - char[] characters = new char[]{ - (char) SPECIAL_KEY_INDICATOR.code, - (char) PAGE_UP_KEY.code, '\r', '\n' - }; - assertWindowsKeyBehavior("dir", characters); - } - - @Test - public void testPageDownOnWindowsTerminal() throws Exception { - // test only works on Windows - if (!(TerminalFactory.get() instanceof WindowsTerminal)) { - return; - } - - char[] characters = new char[]{ - (char) SPECIAL_KEY_INDICATOR.code, - (char) PAGE_DOWN_KEY.code, '\r', '\n' - }; - assertWindowsKeyBehavior("mkdir monkey", characters); - } - - @Test - public void testEscapeOnWindowsTerminal() throws Exception { - // test only works on Windows - if (!(TerminalFactory.get() instanceof WindowsTerminal)) { - return; - } - - char[] characters = new char[]{ - 's', 's', 's', - (char) SPECIAL_KEY_INDICATOR.code, - (char) ESCAPE_KEY.code, '\r', '\n' - }; - assertWindowsKeyBehavior("", characters); - } - - @Test - public void testInsertOnWindowsTerminal() throws Exception { - // test only works on Windows - if (!(TerminalFactory.get() instanceof WindowsTerminal)) { - return; - } - - char[] characters = new char[]{ - 'o', 'p', 's', - (char) SPECIAL_KEY_INDICATOR.code, - (char) HOME_KEY.code, - (char) SPECIAL_KEY_INDICATOR.code, - (char) INSERT_KEY.code, 'o', 'o', 'p', 's', '\r', '\n' - }; - assertWindowsKeyBehavior("oops", characters); - } - - @Test - public void testExpansion() throws Exception { - ConsoleReader reader = new ConsoleReader(); - MemoryHistory history = new MemoryHistory(); - history.setMaxSize(3); - history.add("foo"); - history.add("dir"); - history.add("cd c:\\"); - history.add("mkdir monkey"); - reader.setHistory(history); - - assertEquals("echo a!", reader.expandEvents("echo a!")); - assertEquals("mkdir monkey ; echo a!", reader.expandEvents("!! ; echo a!")); - assertEquals("echo ! a", reader.expandEvents("echo ! a")); - assertEquals("echo !\ta", reader.expandEvents("echo !\ta")); - - assertEquals("mkdir barey", reader.expandEvents("^monk^bar^")); - assertEquals("mkdir barey", reader.expandEvents("^monk^bar")); - assertEquals("a^monk^bar", reader.expandEvents("a^monk^bar")); - - assertEquals("mkdir monkey", reader.expandEvents("!!")); - assertEquals("echo echo a", reader.expandEvents("echo !#a")); - - assertEquals("mkdir monkey", reader.expandEvents("!mk")); - try { - reader.expandEvents("!mz"); - } catch (IllegalArgumentException e) { - assertEquals("!mz: event not found", e.getMessage()); - } - - assertEquals("mkdir monkey", reader.expandEvents("!?mo")); - assertEquals("mkdir monkey", reader.expandEvents("!?mo?")); - - assertEquals("mkdir monkey", reader.expandEvents("!-1")); - assertEquals("cd c:\\", reader.expandEvents("!-2")); - assertEquals("cd c:\\", reader.expandEvents("!2")); - assertEquals("mkdir monkey", reader.expandEvents("!3")); - try { - reader.expandEvents("!20"); - } catch (IllegalArgumentException e) { - assertEquals("!20: event not found", e.getMessage()); - } - try { - reader.expandEvents("!-20"); - } catch (IllegalArgumentException e) { - assertEquals("!-20: event not found", e.getMessage()); - } - } -} diff --git a/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTestSupport.java b/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTestSupport.java deleted file mode 100644 index c19099f0b248..000000000000 --- a/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTestSupport.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ -package scala.tools.jline.console; - -import scala.tools.jline.UnixTerminal; -import org.junit.Before; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStreamWriter; -import java.io.PrintWriter; - -import static scala.tools.jline.UnixTerminal.UnixKey.ARROW_DOWN; -import static scala.tools.jline.UnixTerminal.UnixKey.ARROW_LEFT; -import static scala.tools.jline.UnixTerminal.UnixKey.ARROW_PREFIX; -import static scala.tools.jline.UnixTerminal.UnixKey.ARROW_RIGHT; -import static scala.tools.jline.UnixTerminal.UnixKey.ARROW_START; -import static scala.tools.jline.UnixTerminal.UnixKey.ARROW_UP; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -/** - * Provides support for console reader tests. - */ -public abstract class ConsoleReaderTestSupport -{ - protected ConsoleReader console; - - @Before - public void setUp() throws Exception { - console = new ConsoleReader(null, new PrintWriter(new OutputStreamWriter(new ByteArrayOutputStream())), new UnixTerminal()); - } - - protected void assertBuffer(final String expected, final Buffer buffer) throws IOException { - assertBuffer(expected, buffer, true); - } - - protected void assertBuffer(final String expected, final Buffer buffer, final boolean clear) throws IOException { - // clear current buffer, if any - if (clear) { - console.finishBuffer(); - console.getHistory().clear(); - } - - console.setInput(new ByteArrayInputStream(buffer.getBytes())); - - // run it through the reader - while (console.readLine((String) null) != null) { - // ignore - } - - assertEquals(expected, console.getCursorBuffer().toString()); - } - - private int getKeyForAction(final Operation key) { - return getKeyForAction(key.code); - } - - private int getKeyForAction(final short logicalAction) { - int action = console.getKeyForAction(logicalAction); - - if (action == -1) { - console.printBindings(); - fail("Keystroke for logical action " + logicalAction + " was not bound in the console"); - } - - return action; - } - - protected class Buffer - { - private final ByteArrayOutputStream out = new ByteArrayOutputStream(); - - public Buffer() { - // nothing - } - - public Buffer(final String str) { - append(str); - } - - public byte[] getBytes() { - return out.toByteArray(); - } - - public Buffer op(final short operation) { - return append(getKeyForAction(operation)); - } - - public Buffer op(final Operation op) { - return op(op.code); - } - - public Buffer ctrlA() { - return append(getKeyForAction(Operation.MOVE_TO_BEG)); - } - - public Buffer ctrlU() { - return append(getKeyForAction(Operation.KILL_LINE_PREV)); - } - - public Buffer tab() { - return append(getKeyForAction(Operation.COMPLETE)); - } - - public Buffer back() { - return append(getKeyForAction(Operation.DELETE_PREV_CHAR)); - } - - public Buffer left() { - return append(ARROW_START.code).append(ARROW_PREFIX.code).append(ARROW_LEFT.code); - } - - public Buffer right() { - return append(ARROW_START.code).append(ARROW_PREFIX.code).append(ARROW_RIGHT.code); - } - - public Buffer up() { - return append(ARROW_START.code).append(ARROW_PREFIX.code).append(ARROW_UP.code); - } - - public Buffer down() { - return append(ARROW_START.code).append(ARROW_PREFIX.code).append(ARROW_DOWN.code); - } - - public Buffer append(final String str) { - for (byte b : str.getBytes()) { - append(b); - } - return this; - } - - public Buffer append(final int i) { - out.write((byte) i); - return this; - } - } -} diff --git a/src/jline/src/test/java/scala/tools/jline/console/EditLineTest.java b/src/jline/src/test/java/scala/tools/jline/console/EditLineTest.java deleted file mode 100644 index 6f5d46121e17..000000000000 --- a/src/jline/src/test/java/scala/tools/jline/console/EditLineTest.java +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ -package scala.tools.jline.console; - -import org.junit.Test; - -import static scala.tools.jline.console.Operation.*; - -/** - * Tests various features of editing lines. - * - * @author Marc Prud'hommeaux - */ -public class EditLineTest - extends ConsoleReaderTestSupport -{ - @Test - public void testDeletePreviousWord() throws Exception { - Buffer b = new Buffer("This is a test"); - - assertBuffer("This is a ", b = b.op(DELETE_PREV_WORD)); - assertBuffer("This is ", b = b.op(DELETE_PREV_WORD)); - assertBuffer("This ", b = b.op(DELETE_PREV_WORD)); - assertBuffer("", b = b.op(DELETE_PREV_WORD)); - assertBuffer("", b = b.op(DELETE_PREV_WORD)); - assertBuffer("", b.op(DELETE_PREV_WORD)); - } - - @Test - public void testDeleteNextWord() throws Exception { - Buffer b = new Buffer("This is a test "); - - assertBuffer(" is a test ", b = b.op(MOVE_TO_BEG).op(DELETE_NEXT_WORD)); - assertBuffer(" a test ", b = b.op(DELETE_NEXT_WORD)); - assertBuffer(" test ", b = b.op(DELETE_NEXT_WORD)); - assertBuffer(" ", b = b.op(DELETE_NEXT_WORD)); - assertBuffer("", b = b.op(DELETE_NEXT_WORD)); - assertBuffer("", b.op(DELETE_NEXT_WORD)); - } - - @Test - public void testMoveToEnd() throws Exception { - Buffer b = new Buffer("This is a test"); - - assertBuffer("This is a XtestX", - new Buffer("This is a test").op(PREV_WORD) - .append('X') - .op(MOVE_TO_END) - .append('X')); - - assertBuffer("This is Xa testX", - new Buffer("This is a test").op(PREV_WORD) - .op(PREV_WORD) - .append('X') - .op(MOVE_TO_END) - .append('X')); - - assertBuffer("This Xis a testX", - new Buffer("This is a test").op(PREV_WORD) - .op(PREV_WORD) - .op(PREV_WORD) - .append('X') - .op(MOVE_TO_END) - .append('X')); - } - - @Test - public void testPreviousWord() throws Exception { - assertBuffer("This is a Xtest", - new Buffer("This is a test").op(PREV_WORD) - .append('X')); - assertBuffer("This is Xa test", - new Buffer("This is a test").op(PREV_WORD) - .op(PREV_WORD) - .append('X')); - assertBuffer("This Xis a test", - new Buffer("This is a test").op(PREV_WORD) - .op(PREV_WORD) - .op(PREV_WORD) - .append('X')); - assertBuffer("XThis is a test", - new Buffer("This is a test").op(PREV_WORD) - .op(PREV_WORD) - .op(PREV_WORD) - .op(PREV_WORD) - .append('X')); - assertBuffer("XThis is a test", - new Buffer("This is a test").op(PREV_WORD) - .op(PREV_WORD) - .op(PREV_WORD) - .op(PREV_WORD) - .op(PREV_WORD) - .append('X')); - assertBuffer("XThis is a test", - new Buffer("This is a test").op(PREV_WORD) - .op(PREV_WORD) - .op(PREV_WORD) - .op(PREV_WORD) - .op(PREV_WORD) - .op(PREV_WORD) - .append('X')); - } - - @Test - public void testNextWord() throws Exception { - assertBuffer("ThisX is a test", - new Buffer("This is a test").op(MOVE_TO_BEG) - .op(NEXT_WORD) - .append('X')); - assertBuffer("This isX a test", - new Buffer("This is a test").op(MOVE_TO_BEG) - .op(NEXT_WORD) - .op(NEXT_WORD) - .append('X')); - assertBuffer("This is aX test", - new Buffer("This is a test").op(MOVE_TO_BEG) - .op(NEXT_WORD) - .op(NEXT_WORD) - .op(NEXT_WORD) - .append('X')); - assertBuffer("This is a testX ", - new Buffer("This is a test ").op(MOVE_TO_BEG) - .op(NEXT_WORD) - .op(NEXT_WORD) - .op(NEXT_WORD) - .op(NEXT_WORD) - .append('X')); - } - - @Test - public void testLineStart() throws Exception { - assertBuffer("XThis is a test", - new Buffer("This is a test").ctrlA().append('X')); - assertBuffer("TXhis is a test", - new Buffer("This is a test").ctrlA().right().append('X')); - } - - @Test - public void testClearLine() throws Exception { - assertBuffer("", new Buffer("This is a test").ctrlU()); - assertBuffer("t", new Buffer("This is a test").left().ctrlU()); - assertBuffer("st", new Buffer("This is a test").left().left().ctrlU()); - } - - @Test - public void testRight() throws Exception { - Buffer b = new Buffer("This is a test"); - b = b.left().right().back(); - assertBuffer("This is a tes", b); - b = b.left().left().left().right().left().back(); - assertBuffer("This is ates", b); - b.append('X'); - assertBuffer("This is aXtes", b); - } - - @Test - public void testLeft() throws Exception { - Buffer b = new Buffer("This is a test"); - b = b.left().left().left(); - assertBuffer("This is a est", b = b.back()); - assertBuffer("This is aest", b = b.back()); - assertBuffer("This is est", b = b.back()); - assertBuffer("This isest", b = b.back()); - assertBuffer("This iest", b = b.back()); - assertBuffer("This est", b = b.back()); - assertBuffer("Thisest", b = b.back()); - assertBuffer("Thiest", b = b.back()); - assertBuffer("Thest", b = b.back()); - assertBuffer("Test", b = b.back()); - assertBuffer("est", b = b.back()); - assertBuffer("est", b = b.back()); - assertBuffer("est", b = b.back()); - assertBuffer("est", b = b.back()); - assertBuffer("est", b.back()); - } - - @Test - public void testBackspace() throws Exception { - Buffer b = new Buffer("This is a test"); - assertBuffer("This is a tes", b = b.back()); - assertBuffer("This is a te", b = b.back()); - assertBuffer("This is a t", b = b.back()); - assertBuffer("This is a ", b = b.back()); - assertBuffer("This is a", b = b.back()); - assertBuffer("This is ", b = b.back()); - assertBuffer("This is", b = b.back()); - assertBuffer("This i", b = b.back()); - assertBuffer("This ", b = b.back()); - assertBuffer("This", b = b.back()); - assertBuffer("Thi", b = b.back()); - assertBuffer("Th", b = b.back()); - assertBuffer("T", b = b.back()); - assertBuffer("", b = b.back()); - assertBuffer("", b = b.back()); - assertBuffer("", b = b.back()); - assertBuffer("", b = b.back()); - assertBuffer("", b.back()); - } - - @Test - public void testBuffer() throws Exception { - assertBuffer("This is a test", new Buffer("This is a test")); - } -} diff --git a/src/jline/src/test/java/scala/tools/jline/console/completer/ArgumentCompleterTest.java b/src/jline/src/test/java/scala/tools/jline/console/completer/ArgumentCompleterTest.java deleted file mode 100644 index 9e2a2ab031ee..000000000000 --- a/src/jline/src/test/java/scala/tools/jline/console/completer/ArgumentCompleterTest.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (C) 2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package scala.tools.jline.console.completer; - -import scala.tools.jline.console.ConsoleReaderTestSupport; -import scala.tools.jline.console.completer.ArgumentCompleter; -import scala.tools.jline.console.completer.StringsCompleter; -import org.junit.Test; - -/** - * Tests for {@link jline.console.completer.ArgumentCompleter}. - * - * @author Marc Prud'hommeaux - */ -public class ArgumentCompleterTest - extends ConsoleReaderTestSupport -{ - @Test - public void test1() throws Exception { - console.addCompleter(new ArgumentCompleter(new StringsCompleter("foo", "bar", "baz"))); - - assertBuffer("foo foo ", new Buffer("foo f").tab()); - assertBuffer("foo ba", new Buffer("foo b").tab()); - assertBuffer("foo ba", new Buffer("foo ba").tab()); - assertBuffer("foo baz ", new Buffer("foo baz").tab()); - - // test completion in the mid range - assertBuffer("foo baz", new Buffer("f baz").left().left().left().left().tab()); - assertBuffer("ba foo", new Buffer("b foo").left().left().left().left().tab()); - assertBuffer("foo ba baz", new Buffer("foo b baz").left().left().left().left().tab()); - assertBuffer("foo foo baz", new Buffer("foo f baz").left().left().left().left().tab()); - } -} \ No newline at end of file diff --git a/src/jline/src/test/java/scala/tools/jline/console/completer/NullCompleterTest.java b/src/jline/src/test/java/scala/tools/jline/console/completer/NullCompleterTest.java deleted file mode 100644 index 70a4c3b554b0..000000000000 --- a/src/jline/src/test/java/scala/tools/jline/console/completer/NullCompleterTest.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (C) 2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package scala.tools.jline.console.completer; - -import scala.tools.jline.console.ConsoleReaderTestSupport; -import scala.tools.jline.console.completer.NullCompleter; -import org.junit.Test; - -/** - * Tests for {@link NullCompleter}. - * - * @author Jason Dillon - */ -public class NullCompleterTest - extends ConsoleReaderTestSupport -{ - @Test - public void test1() throws Exception { - console.addCompleter(NullCompleter.INSTANCE); - - assertBuffer("f", new Buffer("f").tab()); - assertBuffer("ba", new Buffer("ba").tab()); - assertBuffer("baz", new Buffer("baz").tab()); - } -} \ No newline at end of file diff --git a/src/jline/src/test/java/scala/tools/jline/console/completer/StringsCompleterTest.java b/src/jline/src/test/java/scala/tools/jline/console/completer/StringsCompleterTest.java deleted file mode 100644 index 518b88d03174..000000000000 --- a/src/jline/src/test/java/scala/tools/jline/console/completer/StringsCompleterTest.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (C) 2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package scala.tools.jline.console.completer; - -import scala.tools.jline.console.ConsoleReaderTestSupport; -import scala.tools.jline.console.completer.StringsCompleter; -import org.junit.Test; - -/** - * Tests for {@link jline.console.completer.StringsCompleter}. - * - * @author Marc Prud'hommeaux - */ -public class StringsCompleterTest - extends ConsoleReaderTestSupport -{ - @Test - public void test1() throws Exception { - console.addCompleter(new StringsCompleter("foo", "bar", "baz")); - - assertBuffer("foo ", new Buffer("f").tab()); - // single tab completes to unambiguous "ba" - assertBuffer("ba", new Buffer("b").tab()); - assertBuffer("ba", new Buffer("ba").tab()); - assertBuffer("baz ", new Buffer("baz").tab()); - } -} \ No newline at end of file diff --git a/src/jline/src/test/java/scala/tools/jline/console/history/HistoryTest.java b/src/jline/src/test/java/scala/tools/jline/console/history/HistoryTest.java deleted file mode 100644 index 0a987b2b2662..000000000000 --- a/src/jline/src/test/java/scala/tools/jline/console/history/HistoryTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ -package scala.tools.jline.console.history; - -import scala.tools.jline.console.ConsoleReaderTestSupport; -import org.junit.Test; - -import static scala.tools.jline.console.Operation.MOVE_TO_BEG; -import static scala.tools.jline.console.Operation.NEWLINE; -import static scala.tools.jline.console.Operation.NEXT_HISTORY; -import static scala.tools.jline.console.Operation.PREV_HISTORY; -import static scala.tools.jline.console.Operation.PREV_CHAR; - -/** - * Tests command history. - * - * @author Marc Prud'hommeaux - */ -public class HistoryTest - extends ConsoleReaderTestSupport -{ - @Test - public void testSingleHistory() throws Exception { - Buffer b = new Buffer(). - append("test line 1").op(NEWLINE). - append("test line 2").op(NEWLINE). - append("test line 3").op(NEWLINE). - append("test line 4").op(NEWLINE). - append("test line 5").op(NEWLINE). - append(""); - - assertBuffer("", b); - - assertBuffer("test line 5", b = b.op(PREV_HISTORY)); - assertBuffer("test line 5", b = b.op(PREV_CHAR)); - assertBuffer("test line 4", b = b.op(PREV_HISTORY)); - assertBuffer("test line 5", b = b.op(NEXT_HISTORY)); - assertBuffer("test line 4", b = b.op(PREV_HISTORY)); - assertBuffer("test line 3", b = b.op(PREV_HISTORY)); - assertBuffer("test line 2", b = b.op(PREV_HISTORY)); - assertBuffer("test line 1", b = b.op(PREV_HISTORY)); - - // beginning of history - assertBuffer("test line 1", b = b.op(PREV_HISTORY)); - assertBuffer("test line 1", b = b.op(PREV_HISTORY)); - assertBuffer("test line 1", b = b.op(PREV_HISTORY)); - assertBuffer("test line 1", b = b.op(PREV_HISTORY)); - - assertBuffer("test line 2", b = b.op(NEXT_HISTORY)); - assertBuffer("test line 3", b = b.op(NEXT_HISTORY)); - assertBuffer("test line 4", b = b.op(NEXT_HISTORY)); - assertBuffer("test line 5", b = b.op(NEXT_HISTORY)); - - // end of history - assertBuffer("", b = b.op(NEXT_HISTORY)); - assertBuffer("", b = b.op(NEXT_HISTORY)); - assertBuffer("", b = b.op(NEXT_HISTORY)); - - assertBuffer("test line 5", b = b.op(PREV_HISTORY)); - assertBuffer("test line 4", b = b.op(PREV_HISTORY)); - b = b.op(MOVE_TO_BEG).append("XXX").op(NEWLINE); - assertBuffer("XXXtest line 4", b = b.op(PREV_HISTORY)); - assertBuffer("test line 5", b = b.op(PREV_HISTORY)); - assertBuffer("test line 4", b = b.op(PREV_HISTORY)); - assertBuffer("test line 5", b = b.op(NEXT_HISTORY)); - assertBuffer("XXXtest line 4", b = b.op(NEXT_HISTORY)); - assertBuffer("", b = b.op(NEXT_HISTORY)); - - assertBuffer("XXXtest line 4", b = b.op(PREV_HISTORY)); - assertBuffer("XXXtest line 4", b = b.op(NEWLINE).op(PREV_HISTORY)); - assertBuffer("XXXtest line 4", b = b.op(NEWLINE).op(PREV_HISTORY)); - assertBuffer("XXXtest line 4", b = b.op(NEWLINE).op(PREV_HISTORY)); - assertBuffer("XXXtest line 4", b = b.op(NEWLINE).op(PREV_HISTORY)); - } -} diff --git a/src/jline/src/test/java/scala/tools/jline/console/history/MemoryHistoryTest.java b/src/jline/src/test/java/scala/tools/jline/console/history/MemoryHistoryTest.java deleted file mode 100644 index 91b81548c8c9..000000000000 --- a/src/jline/src/test/java/scala/tools/jline/console/history/MemoryHistoryTest.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (C) 2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package scala.tools.jline.console.history; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import static junit.framework.Assert.*; - -/** - * Tests for {@link MemoryHistory}. - * - * @author Marc Prud'hommeaux - */ -public class MemoryHistoryTest -{ - private MemoryHistory history; - - @Before - public void setUp() { - history = new MemoryHistory(); - } - - @After - public void tearDown() { - history = null; - } - - @Test - public void testAdd() { - assertEquals(0, history.size()); - - history.add("test"); - - assertEquals(1, history.size()); - assertEquals("test", history.get(0)); - assertEquals(1, history.index()); - } - - private void assertHistoryContains(final int offset, final String... items) { - assertEquals(items.length, history.size()); - int i=0; - for (History.Entry entry : history) { - assertEquals(offset + i, entry.index()); - assertEquals(items[i++], entry.value()); - } - } - - @Test - public void testOffset() { - history.setMaxSize(5); - - assertEquals(0, history.size()); - assertEquals(0, history.index()); - - history.add("a"); - history.add("b"); - history.add("c"); - history.add("d"); - history.add("e"); - - assertEquals(5, history.size()); - assertEquals(5, history.index()); - assertHistoryContains(0, "a", "b", "c", "d", "e"); - - history.add("f"); - - assertEquals(5, history.size()); - assertEquals(6, history.index()); - - assertHistoryContains(1, "b", "c", "d", "e", "f"); - assertEquals("f", history.get(5)); - } - - @Test - public void testReplace() { - assertEquals(0, history.size()); - - history.add("a"); - history.add("b"); - history.replace("c"); - - assertHistoryContains(0, "a", "c"); - } -} \ No newline at end of file diff --git a/src/jline/src/test/java/scala/tools/jline/example/Example.java b/src/jline/src/test/java/scala/tools/jline/example/Example.java deleted file mode 100644 index a89a09c5c971..000000000000 --- a/src/jline/src/test/java/scala/tools/jline/example/Example.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2002-2006, Marc Prud'hommeaux. All rights reserved. - * - * This software is distributable under the BSD license. See the terms of the - * BSD license in the documentation provided with this software. - */ -package scala.tools.jline.example; - -import scala.tools.jline.console.completer.*; -import scala.tools.jline.console.ConsoleReader; - -import java.io.*; -import java.util.*; - -public class Example -{ - public static void usage() { - System.out.println("Usage: java " + Example.class.getName() - + " [none/simple/files/dictionary [trigger mask]]"); - System.out.println(" none - no completors"); - System.out.println(" simple - a simple completor that comples " - + "\"foo\", \"bar\", and \"baz\""); - System.out - .println(" files - a completor that comples " + "file names"); - System.out.println(" classes - a completor that comples " - + "java class names"); - System.out - .println(" trigger - a special word which causes it to assume " - + "the next line is a password"); - System.out.println(" mask - is the character to print in place of " - + "the actual password character"); - System.out.println(" color - colored prompt and feedback"); - System.out.println("\n E.g - java Example simple su '*'\n" - + "will use the simple compleator with 'su' triggering\n" - + "the use of '*' as a password mask."); - } - - public static void main(String[] args) throws IOException { - Character mask = null; - String trigger = null; - boolean color = false; - - ConsoleReader reader = new ConsoleReader(); - - reader.setBellEnabled(false); - reader.setPrompt("prompt> "); - - if ((args == null) || (args.length == 0)) { - usage(); - - return; - } - - List completors = new LinkedList(); - - if (args.length > 0) { - if (args[0].equals("none")) { - } - else if (args[0].equals("files")) { - completors.add(new FileNameCompleter()); - } - else if (args[0].equals("simple")) { - completors.add(new StringsCompleter("foo", "bar", "baz")); - } - else if (args[0].equals("color")) { - color = true; - reader.setPrompt("\u001B[1mfoo\u001B[0m@bar\u001B[32m@baz\u001B[0m> "); - } - else { - usage(); - - return; - } - } - - if (args.length == 3) { - mask = args[2].charAt(0); - trigger = args[1]; - } - - for (Completer c : completors) { - reader.addCompleter(c); - } - - String line; - PrintWriter out = new PrintWriter( - reader.getTerminal().wrapOutIfNeeded(System.out)); - - while ((line = reader.readLine()) != null) { - if (color){ - out.println("\u001B[33m======>\u001B[0m\"" + line + "\""); - } else { - out.println("======>\"" + line + "\""); - } - out.flush(); - - // If we input the special word then we will mask - // the next line. - if ((trigger != null) && (line.compareTo(trigger) == 0)) { - line = reader.readLine("password> ", mask); - } - if (line.equalsIgnoreCase("quit") || line.equalsIgnoreCase("exit")) { - break; - } - } - } -} diff --git a/src/jline/src/test/java/scala/tools/jline/internal/TerminalLineSettingsTest.java b/src/jline/src/test/java/scala/tools/jline/internal/TerminalLineSettingsTest.java deleted file mode 100644 index 3af10887f144..000000000000 --- a/src/jline/src/test/java/scala/tools/jline/internal/TerminalLineSettingsTest.java +++ /dev/null @@ -1,146 +0,0 @@ -package scala.tools.jline.internal; - -import org.junit.Before; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; - -/** - * Tests for the {@link TerminalLineSettings}. - * - * @author Jean-Baptiste Onofré - */ -public class TerminalLineSettingsTest -{ - private TerminalLineSettings settings; - - private final String linuxSttySample = "speed 38400 baud; rows 85; columns 244; line = 0;\n" + - "intr = ^C; quit = ^\\; erase = ^?; kill = ^U; eof = ^D; eol = M-^?; eol2 = M-^?; swtch = M-^?; start = ^Q; stop = ^S; susp = ^Z; rprnt = ^R; werase = ^W; lnext = ^V; flush = ^O; min = 1; time = 0;\n" + - "-parenb -parodd cs8 hupcl -cstopb cread -clocal -crtscts\n" + - "-ignbrk brkint -ignpar -parmrk -inpck -istrip -inlcr -igncr icrnl ixon -ixoff -iuclc ixany imaxbel iutf8\n" + - "opost -olcuc -ocrnl onlcr -onocr -onlret -ofill -ofdel nl0 cr0 tab0 bs0 vt0 ff0\n" + - "isig icanon iexten echo echoe echok -echonl -noflsh -xcase -tostop -echoprt echoctl echoke"; - - private final String solarisSttySample = "speed 38400 baud; \n" + - "rows = 85; columns = 244; ypixels = 0; xpixels = 0;\n" + - "csdata ?\n" + - "eucw 1:0:0:0, scrw 1:0:0:0\n" + - "intr = ^c; quit = ^\\; erase = ^?; kill = ^u;\n" + - "eof = ^d; eol = -^?; eol2 = -^?; swtch = ;\n" + - "start = ^q; stop = ^s; susp = ^z; dsusp = ^y;\n" + - "rprnt = ^r; flush = ^o; werase = ^w; lnext = ^v;\n" + - "-parenb -parodd cs8 -cstopb -hupcl cread -clocal -loblk -crtscts -crtsxoff -parext \n" + - "-ignbrk brkint -ignpar -parmrk -inpck -istrip -inlcr -igncr icrnl -iuclc \n" + - "ixon ixany -ixoff imaxbel \n" + - "isig icanon -xcase echo echoe echok -echonl -noflsh \n" + - "-tostop echoctl -echoprt echoke -defecho -flusho -pendin iexten \n" + - "opost -olcuc onlcr -ocrnl -onocr -onlret -ofill -ofdel tab3"; - - private final String aixSttySample = "speed 38400 baud; 85 rows; 244 columns;\n" + - "eucw 1:1:0:0, scrw 1:1:0:0:\n" + - "intr = ^C; quit = ^\\; erase = ^?; kill = ^U; eof = ^D; eol = \n" + - "eol2 = ; start = ^Q; stop = ^S; susp = ^Z; dsusp = ^Y; reprint = ^R\n" + - "discard = ^O; werase = ^W; lnext = ^V\n" + - "-parenb -parodd cs8 -cstopb -hupcl cread -clocal -parext \n" + - "-ignbrk brkint -ignpar -parmrk -inpck -istrip -inlcr -igncr icrnl -iuclc \n" + - "ixon ixany -ixoff imaxbel \n" + - "isig icanon -xcase echo echoe echok -echonl -noflsh \n" + - "-tostop echoctl -echoprt echoke -flusho -pending iexten \n" + - "opost -olcuc onlcr -ocrnl -onocr -onlret -ofill -ofdel tab3"; - - private final String macOsSttySample = "speed 9600 baud; 47 rows; 155 columns;\n" + - "lflags: icanon isig iexten echo echoe -echok echoke -echonl echoctl\n" + - "-echoprt -altwerase -noflsh -tostop -flusho pendin -nokerninfo\n" + - "-extproc\n" + - "iflags: -istrip icrnl -inlcr -igncr ixon -ixoff ixany imaxbel iutf8\n" + - "-ignbrk brkint -inpck -ignpar -parmrk\n" + - "oflags: opost onlcr -oxtabs -onocr -onlret\n" + - "cflags: cread cs8 -parenb -parodd hupcl -clocal -cstopb -crtscts -dsrflow\n" + - "-dtrflow -mdmbuf\n" + - "cchars: discard = ^O; dsusp = ^Y; eof = ^D; eol = ;\n" + - "eol2 = ; erase = ^?; intr = ^C; kill = ^U; lnext = ^V;\n" + - "min = 1; quit = ^\\; reprint = ^R; start = ^Q; status = ^T;\n" + - "stop = ^S; susp = ^Z; time = 0; werase = ^W;"; - - private final String netBsdSttySample = "speed 38400 baud; 85 rows; 244 columns;\n" + - "lflags: icanon isig iexten echo echoe echok echoke -echonl echoctl\n" + - " -echoprt -altwerase -noflsh -tostop -flusho pendin -nokerninfo\n" + - " -extproc\n" + - "iflags: -istrip icrnl -inlcr -igncr ixon -ixoff ixany imaxbel -ignbrk\n" + - " brkint -inpck -ignpar -parmrk\n" + - "oflags: opost onlcr -ocrnl oxtabs onocr onlret\n" + - "cflags: cread cs8 -parenb -parodd hupcl -clocal -cstopb -crtscts -mdmbuf\n" + - " -cdtrcts\n" + - "cchars: discard = ^O; dsusp = ^Y; eof = ^D; eol = ;\n" + - " eol2 = ; erase = ^?; intr = ^C; kill = ^U; lnext = ^V;\n" + - " min = 1; quit = ^\\; reprint = ^R; start = ^Q; status = ^T;\n" + - " stop = ^S; susp = ^Z; time = 0; werase = ^W;"; - - private final String freeBsdSttySample = "speed 9600 baud; 32 rows; 199 columns;\n" + - "lflags: icanon isig iexten echo echoe echok echoke -echonl echoctl\n" + - " -echoprt -altwerase -noflsh -tostop -flusho -pendin -nokerninfo\n" + - " -extproc\n" + - "iflags: -istrip icrnl -inlcr -igncr ixon -ixoff ixany imaxbel -ignbrk\n" + - " brkint -inpck -ignpar -parmrk\n" + - "oflags: opost onlcr -ocrnl tab0 -onocr -onlret\n" + - "cflags: cread cs8 -parenb -parodd hupcl -clocal -cstopb -crtscts -dsrflow\n" + - " -dtrflow -mdmbuf\n" + - "cchars: discard = ^O; dsusp = ^Y; eof = ^D; eol = ;\n" + - " eol2 = ; erase = ^?; erase2 = ^H; intr = ^C; kill = ^U;\n" + - " lnext = ^V; min = 1; quit = ^\\; reprint = ^R; start = ^Q;\n" + - " status = ^T; stop = ^S; susp = ^Z; time = 0; werase = ^W;"; - - @Before - public void setUp() throws Exception { - settings = new TerminalLineSettings(); - } - - @Test - public void testGetConfig() { - String config = settings.getConfig(); - System.out.println(config); - } - - @Test - public void testLinuxSttyParsing() { - assertEquals(0x7f, settings.getProperty("erase", linuxSttySample)); - assertEquals(244, settings.getProperty("columns", linuxSttySample)); - assertEquals(85, settings.getProperty("rows", linuxSttySample)); - } - - @Test - public void testSolarisSttyParsing() { - assertEquals(0x7f, settings.getProperty("erase", solarisSttySample)); - assertEquals(244, settings.getProperty("columns", solarisSttySample)); - assertEquals(85, settings.getProperty("rows", solarisSttySample)); - } - - @Test - public void testAixSttyParsing() { - assertEquals(0x7f, settings.getProperty("erase", aixSttySample)); - assertEquals(244, settings.getProperty("columns", aixSttySample)); - assertEquals(85, settings.getProperty("rows", aixSttySample)); - } - - @Test - public void testMacOsSttyParsing() { - assertEquals(0x7f, settings.getProperty("erase", macOsSttySample)); - assertEquals(155, settings.getProperty("columns", macOsSttySample)); - assertEquals(47, settings.getProperty("rows", macOsSttySample)); - } - - @Test - public void testNetBsdSttyParsing() { - assertEquals(0x7f, settings.getProperty("erase", netBsdSttySample)); - assertEquals(244, settings.getProperty("columns", netBsdSttySample)); - assertEquals(85, settings.getProperty("rows", netBsdSttySample)); - } - - @Test - public void testFreeBsdSttyParsing() { - assertEquals(0x7f, settings.getProperty("erase", freeBsdSttySample)); - assertEquals(199, settings.getProperty("columns", freeBsdSttySample)); - assertEquals(32, settings.getProperty("rows", freeBsdSttySample)); - } - -} \ No newline at end of file diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt index 4795a47efe63..e84942b8c4bc 100644 --- a/src/library/rootdoc.txt +++ b/src/library/rootdoc.txt @@ -12,7 +12,7 @@ Notable packages include: - [[scala.collection.immutable `scala.collection.immutable`]] - Immutable, sequential data-structures such as [[scala.collection.immutable.Vector `Vector`]], [[scala.collection.immutable.List `List`]], [[scala.collection.immutable.Range `Range`]], [[scala.collection.immutable.HashMap `HashMap`]] or - [[scala.collection.immutable.HashSet `HasSet`]] + [[scala.collection.immutable.HashSet `HashSet`]] - [[scala.collection.mutable `scala.collection.mutable`]] - Mutable, sequential data-structures such as [[scala.collection.mutable.ArrayBuffer `ArrayBuffer`]], [[scala.collection.mutable.StringBuilder `StringBuilder`]], diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index d4b9c17eab95..c4aa511cd75f 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -121,7 +121,8 @@ abstract class Enumeration (initial: Int) extends Serializable { * @throws NoSuchElementException if no `Value` with a matching * name is in this `Enumeration` */ - final def withName(s: String): Value = values.find(_.toString == s).get + final def withName(s: String): Value = values.find(_.toString == s).getOrElse( + throw new NoSuchElementException(s"No value found for '$s'")) /** Creates a fresh value, part of this enumeration. */ protected final def Value: Value = Value(nextId) @@ -239,6 +240,7 @@ abstract class Enumeration (initial: Int) extends Serializable { * * @param nnIds The set of ids of values (adjusted so that the lowest value does * not fall below zero), organized as a `BitSet`. + * @define Coll `collection.immutable.SortedSet` */ class ValueSet private[ValueSet] (private[this] var nnIds: immutable.BitSet) extends AbstractSet[Value] diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index 905e925f578f..f134f5ce3d6a 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -94,6 +94,7 @@ object Option { * @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current * representation type `Repr` and the new element type `B`. */ +@SerialVersionUID(-114498752079829388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 sealed abstract class Option[+A] extends Product with Serializable { self => @@ -107,7 +108,7 @@ sealed abstract class Option[+A] extends Product with Serializable { /** Returns the option's value. * @note The option must be nonEmpty. - * @throws Predef.NoSuchElementException if the option is empty. + * @throws java.util.NoSuchElementException if the option is empty. */ def get: A @@ -124,8 +125,8 @@ sealed abstract class Option[+A] extends Product with Serializable { * Although the use of null is discouraged, code written to use * $option must often interface with code that expects and returns nulls. * @example {{{ - * val initalText: Option[String] = getInitialText - * val textField = new JComponent(initalText.orNull,20) + * val initialText: Option[String] = getInitialText + * val textField = new JComponent(initialText.orNull,20) * }}} */ @inline final def orNull[A1 >: A](implicit ev: Null <:< A1): A1 = this getOrElse ev(null) @@ -210,6 +211,17 @@ sealed abstract class Option[+A] extends Product with Serializable { } /** Tests whether the option contains a given value as an element. + * + * @example {{{ + * // Returns true because Some instance contains string "something" which equals "something". + * Some("something") contains "something" + * + * // Returns false because "something" != "anything". + * Some("something") contains "anything" + * + * // Returns false when method called on None. + * None contains "anything" + * }}} * * @param elem the element to test. * @return `true` if the option has an element that is equal (as @@ -251,6 +263,17 @@ sealed abstract class Option[+A] extends Product with Serializable { * nonempty '''and''' `pf` is defined for that value. * Returns $none otherwise. * + * @example {{{ + * // Returns Some(HTTP) because the partial function covers the case. + * Some("http") collect {case "http" => "HTTP"} + * + * // Returns None because the partial function doesn't cover the case. + * Some("ftp") collect {case "http" => "HTTP"} + * + * // Returns None because None is passed to the collect method. + * None collect {case value => value} + * }}} + * * @param pf the partial function. * @return the result of applying `pf` to this $option's * value (if possible), or $none. @@ -306,6 +329,7 @@ sealed abstract class Option[+A] extends Product with Serializable { * @author Martin Odersky * @version 1.0, 16/07/2003 */ +@SerialVersionUID(1234815782226070388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 final case class Some[+A](x: A) extends Option[A] { def isEmpty = false def get = x @@ -317,6 +341,7 @@ final case class Some[+A](x: A) extends Option[A] { * @author Martin Odersky * @version 1.0, 16/07/2003 */ +@SerialVersionUID(5066590221178148012L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 case object None extends Option[Nothing] { def isEmpty = true def get = throw new NoSuchElementException("None.get") diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala index 7f4a9dc45d70..98dd35d306e6 100644 --- a/src/library/scala/PartialFunction.scala +++ b/src/library/scala/PartialFunction.scala @@ -20,6 +20,11 @@ package scala * {{{ * val f: PartialFunction[Int, Any] = { case _ => 1/0 } * }}} + * + * It is the responsibility of the caller to call `isDefinedAt` before + * calling `apply`, because if `isDefinedAt` is false, it is not guaranteed + * `apply` will throw an exception to indicate an error condition. If an + * exception is not thrown, evaluation may result in an arbitrary value. * * The main distinction between `PartialFunction` and [[scala.Function1]] is * that the user of a `PartialFunction` may choose to do something different @@ -156,10 +161,10 @@ trait PartialFunction[-A, +B] extends (A => B) { self => object PartialFunction { /** Composite function produced by `PartialFunction#orElse` method */ - private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends PartialFunction[A, B] { + private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends scala.runtime.AbstractPartialFunction[A, B] { def isDefinedAt(x: A) = f1.isDefinedAt(x) || f2.isDefinedAt(x) - def apply(x: A): B = f1.applyOrElse(x, f2) + override def apply(x: A): B = f1.applyOrElse(x, f2) override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = { val z = f1.applyOrElse(x, checkFallback[B]) diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index faeb1dcbe2bc..060ecbfead19 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -126,7 +126,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { def optManifest[T](implicit m: OptManifest[T]) = m // Minor variations on identity functions - def identity[A](x: A): A = x // @see `conforms` for the implicit version + @inline def identity[A](x: A): A = x // @see `conforms` for the implicit version @inline def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero` @inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements @@ -220,7 +220,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { } /** `???` can be used for marking methods that remain to be implemented. - * @throws A `NotImplementedError` + * @throws NotImplementedError */ def ??? : Nothing = throw new NotImplementedError @@ -303,7 +303,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { @inline implicit def augmentString(x: String): StringOps = new StringOps(x) @inline implicit def unaugmentString(x: StringOps): String = x.repr - // printing and reading ----------------------------------------------- + // printing ----------------------------------------------------------- def print(x: Any) = Console.print(x) def println() = Console.println() diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala index 0798587772c4..9cd38ed14838 100644 --- a/src/library/scala/Product.scala +++ b/src/library/scala/Product.scala @@ -22,7 +22,7 @@ trait Product extends Any with Equals { * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 < n < k`. * * @param n the index of the element to return - * @throws `IndexOutOfBoundsException` + * @throws IndexOutOfBoundsException * @return the element `n` elements after the first element */ def productElement(n: Int): Any diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala index cd928a2b61f9..e60fa2f29043 100644 --- a/src/library/scala/StringContext.scala +++ b/src/library/scala/StringContext.scala @@ -8,6 +8,9 @@ package scala +import java.lang.{ StringBuilder => JLSBuilder } +import scala.annotation.tailrec + /** This class provides the basic mechanism to do String Interpolation. * String Interpolation allows users * to embed variable references directly in *processed* string literals. @@ -35,13 +38,13 @@ package scala * To provide your own string interpolator, create an implicit class * which adds a method to `StringContext`. Here's an example: * {{{ - * implicit class JsonHelper(val sc: StringContext) extends AnyVal { + * implicit class JsonHelper(private val sc: StringContext) extends AnyVal { * def json(args: Any*): JSONObject = ... * } * val x: JSONObject = json"{ a: $a }" * }}} * - * Here the `JsonHelper` extenion class implicitly adds the `json` method to + * Here the `JsonHelper` extension class implicitly adds the `json` method to * `StringContext` which can be used for `json` string literals. * * @since 2.10.0 @@ -55,7 +58,7 @@ case class StringContext(parts: String*) { /** Checks that the length of the given argument `args` is one less than the number * of `parts` supplied to the enclosing `StringContext`. * @param `args` The arguments to be checked. - * @throws An `IllegalArgumentException` if this is not the case. + * @throws IllegalArgumentException if this is not the case. */ def checkLengths(args: Seq[Any]): Unit = if (parts.length != args.length + 1) @@ -82,10 +85,11 @@ case class StringContext(parts: String*) { * will print the string `1 + 1 = 2`. * * @param `args` The arguments to be inserted into the resulting string. - * @throws An `IllegalArgumentException` + * @throws IllegalArgumentException * if the number of `parts` in the enclosing `StringContext` does not exceed * the number of arguments `arg` by exactly 1. - * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character + * @throws StringContext.InvalidEscapeException + * if a `parts` string contains a backslash (`\`) character * that does not start a valid escape sequence. */ def s(args: Any*): String = standardInterpolator(treatEscapes, args) @@ -101,16 +105,14 @@ case class StringContext(parts: String*) { * ''Note:'' Even when using the raw interpolator, Scala will preprocess unicode escapes. * For example: * {{{ - * scala> raw"\u005cu0025" + * scala> raw"\u005cu0023" * res0: String = # * }}} * * @param `args` The arguments to be inserted into the resulting string. - * @throws An `IllegalArgumentException` + * @throws IllegalArgumentException * if the number of `parts` in the enclosing `StringContext` does not exceed * the number of arguments `arg` by exactly 1. - * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character - * that does not start a valid escape sequence. */ def raw(args: Any*): String = standardInterpolator(identity, args) @@ -118,7 +120,7 @@ case class StringContext(parts: String*) { checkLengths(args) val pi = parts.iterator val ai = args.iterator - val bldr = new java.lang.StringBuilder(process(pi.next())) + val bldr = new JLSBuilder(process(pi.next())) while (ai.hasNext) { bldr append ai.next bldr append process(pi.next()) @@ -143,10 +145,11 @@ case class StringContext(parts: String*) { * }}} * * @param `args` The arguments to be inserted into the resulting string. - * @throws An `IllegalArgumentException` + * @throws IllegalArgumentException * if the number of `parts` in the enclosing `StringContext` does not exceed * the number of arguments `arg` by exactly 1. - * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character + * @throws StringContext.InvalidEscapeException + * if a `parts` string contains a backslash (`\`) character * that does not start a valid escape sequence. * * Note: The `f` method works by assembling a format string from all the `parts` strings and using @@ -162,7 +165,7 @@ case class StringContext(parts: String*) { */ // The implementation is hardwired to `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f` // Using the mechanism implemented in `scala.tools.reflect.FastTrack` - def f(args: Any*): String = macro ??? + def f[A >: Any](args: A*): String = macro ??? } object StringContext { @@ -172,8 +175,13 @@ object StringContext { * @param str The offending string * @param idx The index of the offending backslash character in `str`. */ - class InvalidEscapeException(str: String, @deprecatedName('idx) val index: Int) - extends IllegalArgumentException("invalid escape character at index "+index+" in \""+str+"\"") + class InvalidEscapeException(str: String, @deprecatedName('idx) val index: Int) extends IllegalArgumentException( + s"""invalid escape ${ + require(index >= 0 && index < str.length) + val ok = """[\b, \t, \n, \f, \r, \\, \", \']""" + if (index == str.length - 1) "at terminal" else s"'\\${str(index + 1)}' not one of $ok at" + } index $index in "$str". Use \\\\ for literal \\.""" + ) /** Expands standard Scala escape sequences in a string. * Escape sequences are: @@ -186,60 +194,60 @@ object StringContext { */ def treatEscapes(str: String): String = treatEscapes0(str, strict = false) + /** Treats escapes, but disallows octal escape sequences. */ def processEscapes(str: String): String = treatEscapes0(str, strict = true) private def treatEscapes0(str: String, strict: Boolean): String = { - lazy val bldr = new java.lang.StringBuilder val len = str.length - var start = 0 - var cur = 0 - var idx = 0 - def output(ch: Char) = { - bldr.append(str, start, cur) - bldr append ch - start = idx - } - while (idx < len) { - cur = idx - if (str(idx) == '\\') { - idx += 1 - if (idx >= len) throw new InvalidEscapeException(str, cur) - if ('0' <= str(idx) && str(idx) <= '7') { - if (strict) throw new InvalidEscapeException(str, cur) - val leadch = str(idx) - var oct = leadch - '0' - idx += 1 - if (idx < len && '0' <= str(idx) && str(idx) <= '7') { - oct = oct * 8 + str(idx) - '0' - idx += 1 - if (idx < len && leadch <= '3' && '0' <= str(idx) && str(idx) <= '7') { - oct = oct * 8 + str(idx) - '0' + // replace escapes with given first escape + def replace(first: Int): String = { + val b = new JLSBuilder + // append replacement starting at index `i`, with `next` backslash + @tailrec def loop(i: Int, next: Int): String = { + if (next >= 0) { + //require(str(next) == '\\') + if (next > i) b.append(str, i, next) + var idx = next + 1 + if (idx >= len) throw new InvalidEscapeException(str, next) + val c = str(idx) match { + case 'b' => '\b' + case 't' => '\t' + case 'n' => '\n' + case 'f' => '\f' + case 'r' => '\r' + case '"' => '"' + case '\'' => '\'' + case '\\' => '\\' + case o if '0' <= o && o <= '7' => + if (strict) throw new InvalidEscapeException(str, next) + val leadch = str(idx) + var oct = leadch - '0' idx += 1 - } + if (idx < len && '0' <= str(idx) && str(idx) <= '7') { + oct = oct * 8 + str(idx) - '0' + idx += 1 + if (idx < len && leadch <= '3' && '0' <= str(idx) && str(idx) <= '7') { + oct = oct * 8 + str(idx) - '0' + idx += 1 + } + } + idx -= 1 // retreat + oct.toChar + case _ => throw new InvalidEscapeException(str, next) } - output(oct.toChar) + idx += 1 // advance + b append c + loop(idx, str.indexOf('\\', idx)) } else { - val ch = str(idx) - idx += 1 - output { - ch match { - case 'b' => '\b' - case 't' => '\t' - case 'n' => '\n' - case 'f' => '\f' - case 'r' => '\r' - case '\"' => '\"' - case '\'' => '\'' - case '\\' => '\\' - case _ => throw new InvalidEscapeException(str, cur) - } - } + if (i < len) b.append(str, i, len) + b.toString } - } else { - idx += 1 } + loop(0, first) + } + str indexOf '\\' match { + case -1 => str + case i => replace(i) } - if (start == 0) str - else bldr.append(str, start, idx).toString } } diff --git a/src/library/scala/beans/BeanInfo.scala b/src/library/scala/beans/BeanInfo.scala index 799e93e71a87..d7f0a1618be7 100644 --- a/src/library/scala/beans/BeanInfo.scala +++ b/src/library/scala/beans/BeanInfo.scala @@ -17,4 +17,5 @@ package scala.beans * * @author Ross Judson (rjudson@managedobjects.com) */ +@deprecated(message = "the generation of BeanInfo classes is no longer supported", since = "2.12.0") class BeanInfo extends scala.annotation.Annotation diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala index 4e7d35925106..bce974052247 100644 --- a/src/library/scala/collection/GenMapLike.scala +++ b/src/library/scala/collection/GenMapLike.scala @@ -102,7 +102,7 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals */ def mapValues[C](f: B => C): GenMap[A, C] - /** Compares two maps structurally; i.e. checks if all mappings + /** Compares two maps structurally; i.e., checks if all mappings * contained in this map are also contained in the other map, * and vice versa. * diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala index c3bad600721e..cf1de0c8e661 100644 --- a/src/library/scala/collection/GenSeqLike.scala +++ b/src/library/scala/collection/GenSeqLike.scala @@ -47,7 +47,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal * * @param idx The index to select. * @return the element of this $coll at index `idx`, where `0` indicates the first element. - * @throws `IndexOutOfBoundsException` if `idx` does not satisfy `0 <= idx < length`. + * @throws IndexOutOfBoundsException if `idx` does not satisfy `0 <= idx < length`. */ def apply(idx: Int): A @@ -397,7 +397,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal * @inheritdoc * * Another way to express this - * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`. + * is that `xs union ys` computes the order-preserving multi-set union of `xs` and `ys`. * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets. * * $willNotTerminateInf diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala index ca098e57b9af..8b9d3e7a176d 100644 --- a/src/library/scala/collection/GenTraversableLike.scala +++ b/src/library/scala/collection/GenTraversableLike.scala @@ -63,7 +63,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with /** Selects the first element of this $coll. * $orderDependent * @return the first element of this $coll. - * @throws `NoSuchElementException` if the $coll is empty. + * @throws NoSuchElementException if the $coll is empty. */ def head: A @@ -83,7 +83,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with * $orderDependent * @return a $coll consisting of all elements of this $coll * except the first one. - * @throws `UnsupportedOperationException` if the $coll is empty. + * @throws UnsupportedOperationException if the $coll is empty. */ def tail: Repr @@ -105,7 +105,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with * $orderDependent * @return a $coll consisting of all elements of this $coll * except the last one. - * @throws `UnsupportedOperationException` if the $coll is empty. + * @throws UnsupportedOperationException if the $coll is empty. */ def init: Repr diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala index 01d179aeb6f0..8c7c754af869 100644 --- a/src/library/scala/collection/GenTraversableOnce.scala +++ b/src/library/scala/collection/GenTraversableOnce.scala @@ -268,7 +268,7 @@ trait GenTraversableOnce[+A] extends Any { * op(x_1, op(x_2, ..., op(x_{n-1}, x_n)...)) * }}} * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - * @throws `UnsupportedOperationException` if this $coll is empty. + * @throws UnsupportedOperationException if this $coll is empty. */ def reduceRight[B >: A](op: (A, B) => B): B @@ -506,7 +506,6 @@ trait GenTraversableOnce[+A] extends Any { def toIndexedSeq: immutable.IndexedSeq[A] /** Converts this $coll to a stream. - * $willNotTerminateInf * @return a stream containing all elements of this $coll. */ def toStream: Stream[A] diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala index ade04e4de884..a7e06b4d1ac0 100755 --- a/src/library/scala/collection/IndexedSeqOptimized.scala +++ b/src/library/scala/collection/IndexedSeqOptimized.scala @@ -141,10 +141,10 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { def drop(n: Int): Repr = slice(n, length) override /*IterableLike*/ - def takeRight(n: Int): Repr = slice(length - n, length) + def takeRight(n: Int): Repr = slice(length - math.max(n, 0), length) override /*IterableLike*/ - def dropRight(n: Int): Repr = slice(0, length - n) + def dropRight(n: Int): Repr = slice(0, length - math.max(n, 0)) override /*TraversableLike*/ def splitAt(n: Int): (Repr, Repr) = (take(n), drop(n)) @@ -206,7 +206,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { override /*SeqLike*/ def lastIndexWhere(p: A => Boolean, end: Int): Int = { - var i = end + var i = math.min(end, length - 1) while (i >= 0 && !p(this(i))) i -= 1 i } diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala index a5ab8efd5c2d..afbffd36c694 100644 --- a/src/library/scala/collection/Iterable.scala +++ b/src/library/scala/collection/Iterable.scala @@ -38,7 +38,7 @@ trait Iterable[+A] extends Traversable[A] } /** $factoryInfo - * The current default implementation of a $Coll is a `Vector`. + * The current default implementation of a $Coll is a `List`. * @define coll iterable collection * @define Coll `Iterable` */ diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala index 3a0e2ab1158e..97aa830c5a0b 100644 --- a/src/library/scala/collection/IterableProxy.scala +++ b/src/library/scala/collection/IterableProxy.scala @@ -16,4 +16,5 @@ package collection * @version 2.8 * @since 2.8 */ +@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3") trait IterableProxy[+A] extends Iterable[A] with IterableProxyLike[A, Iterable[A]] diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala index 668190f700ce..c254ed748008 100644 --- a/src/library/scala/collection/IterableViewLike.scala +++ b/src/library/scala/collection/IterableViewLike.scala @@ -69,6 +69,10 @@ trait IterableViewLike[+A, trait Appended[B >: A] extends super.Appended[B] with Transformed[B] { def iterator = self.iterator ++ rest } + + trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] { + def iterator = fst.toIterator ++ self + } trait Filtered extends super.Filtered with Transformed[A] { def iterator = self.iterator filter pred @@ -110,6 +114,7 @@ trait IterableViewLike[+A, } with AbstractTransformed[(A1, B)] with ZippedAll[A1, B] protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B] protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B] + protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B] protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B] protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B] protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered @@ -150,10 +155,10 @@ trait IterableViewLike[+A, sliding(size, 1) // we could inherit this, but that implies knowledge of the way the super class is implemented. override def dropRight(n: Int): This = - take(thisSeq.length - n) + take(thisSeq.length - math.max(n, 0)) override def takeRight(n: Int): This = - drop(thisSeq.length - n) + drop(thisSeq.length - math.max(n, 0)) override def stringPrefix = "IterableView" } diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 01a0aa3b514f..34a025e5b86a 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -165,11 +165,11 @@ object Iterator { /** Avoid stack overflows when applying ++ to lots of iterators by * flattening the unevaluated iterators out into a vector of closures. */ - private[scala] final class ConcatIterator[+A](initial: Vector[() => Iterator[A]]) extends Iterator[A] { - // current set to null when all iterators are exhausted - private[this] var current: Iterator[A] = Iterator.empty + private[scala] final class ConcatIterator[+A](private[this] var current: Iterator[A], initial: Vector[() => Iterator[A]]) extends Iterator[A] { + @deprecated def this(initial: Vector[() => Iterator[A]]) = this(Iterator.empty, initial) // for binary compatibility private[this] var queue: Vector[() => Iterator[A]] = initial // Advance current to the next non-empty iterator + // current is set to null when all iterators are exhausted private[this] def advance(): Boolean = { if (queue.isEmpty) { current = null @@ -182,19 +182,63 @@ object Iterator { } } def hasNext = (current ne null) && (current.hasNext || advance()) - def next() = if (hasNext) current.next else Iterator.empty.next + def next() = if (hasNext) current.next() else Iterator.empty.next() override def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = - new ConcatIterator(queue :+ (() => that.toIterator)) + new ConcatIterator(current, queue :+ (() => that.toIterator)) } private[scala] final class JoinIterator[+A](lhs: Iterator[A], that: => GenTraversableOnce[A]) extends Iterator[A] { private[this] lazy val rhs: Iterator[A] = that.toIterator def hasNext = lhs.hasNext || rhs.hasNext - def next = if (lhs.hasNext) lhs.next else rhs.next + def next() = if (lhs.hasNext) lhs.next() else rhs.next() override def ++[B >: A](that: => GenTraversableOnce[B]) = - new ConcatIterator(Vector(() => this, () => that.toIterator)) + new ConcatIterator(this, Vector(() => that.toIterator)) + } + + /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded. + * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing. + */ + private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] { + private var remaining = limit + private var dropping = start + @inline private def unbounded = remaining < 0 + private def skip(): Unit = + while (dropping > 0) { + if (underlying.hasNext) { + underlying.next() + dropping -= 1 + } else + dropping = 0 + } + def hasNext = { skip(); remaining != 0 && underlying.hasNext } + def next() = { + skip() + if (remaining > 0) { + remaining -= 1 + underlying.next() + } + else if (unbounded) underlying.next() + else empty.next() + } + override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + val lo = from max 0 + def adjustedBound = + if (unbounded) -1 + else 0 max (remaining - lo) + val rest = + if (until < 0) adjustedBound // respect current bound, if any + else if (until <= lo) 0 // empty + else if (unbounded) until - lo // now finite + else adjustedBound min (until - lo) // keep lesser bound + if (rest == 0) empty + else { + dropping += lo + remaining = rest + this + } + } } } @@ -307,11 +351,11 @@ trait Iterator[+A] extends TraversableOnce[A] { /** Selects first ''n'' values of this iterator. * * @param n the number of values to take - * @return an iterator producing only of the first `n` values of this iterator, or else the + * @return an iterator producing only the first `n` values of this iterator, or else the * whole iterator, if it produces fewer than `n` values. * @note Reuse: $consumesAndProducesIterator */ - def take(n: Int): Iterator[A] = slice(0, n) + def take(n: Int): Iterator[A] = sliceIterator(0, n max 0) /** Advances this iterator past the first ''n'' elements, or the length of the iterator, whichever is smaller. * @@ -320,34 +364,36 @@ trait Iterator[+A] extends TraversableOnce[A] { * it omits the first `n` values. * @note Reuse: $consumesAndProducesIterator */ - def drop(n: Int): Iterator[A] = slice(n, Int.MaxValue) + def drop(n: Int): Iterator[A] = { + var j = 0 + while (j < n && hasNext) { + next() + j += 1 + } + this + } /** Creates an iterator returning an interval of the values produced by this iterator. * * @param from the index of the first element in this iterator which forms part of the slice. - * @param until the index of the first element following the slice. + * If negative, the slice starts at zero. + * @param until the index of the first element following the slice. If negative, the slice is empty. * @return an iterator which advances this iterator past the first `from` elements using `drop`, * and then takes `until - from` elements, using `take`. * @note Reuse: $consumesAndProducesIterator */ - def slice(from: Int, until: Int): Iterator[A] = { + def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0) + + /** Creates an optionally bounded slice, unbounded if `until` is negative. */ + protected def sliceIterator(from: Int, until: Int): Iterator[A] = { val lo = from max 0 - var toDrop = lo - while (toDrop > 0 && self.hasNext) { - self.next() - toDrop -= 1 - } + val rest = + if (until < 0) -1 // unbounded + else if (until <= lo) 0 // empty + else until - lo // finite - new AbstractIterator[A] { - private var remaining = until - lo - def hasNext = remaining > 0 && self.hasNext - def next(): A = - if (remaining > 0) { - remaining -= 1 - self.next() - } - else empty.next() - } + if (rest == 0) empty + else new Iterator.SliceIterator(this, lo, rest) } /** Creates a new iterator that maps all produced values of this iterator @@ -472,7 +518,7 @@ trait Iterator[+A] extends TraversableOnce[A] { } } - /** Produces a collection containing cummulative results of applying the + /** Produces a collection containing cumulative results of applying the * operator going left to right. * * $willNotTerminateInf @@ -495,8 +541,8 @@ trait Iterator[+A] extends TraversableOnce[A] { } else Iterator.empty.next() } - /** Produces a collection containing cummulative results of applying the operator going right to left. - * The head of the collection is the last cummulative result. + /** Produces a collection containing cumulative results of applying the operator going right to left. + * The head of the collection is the last cumulative result. * * $willNotTerminateInf * $orderDependent @@ -922,11 +968,16 @@ trait Iterator[+A] extends TraversableOnce[A] { /** For reasons which remain to be determined, calling * self.take(n).toSeq cause an infinite loop, so we have * a slight variation on take for local usage. + * NB: self.take.toSeq is slice.toStream, lazily built on self, + * so a subsequent self.hasNext would not test self after the + * group was consumed. */ private def takeDestructively(size: Int): Seq[A] = { val buf = new ArrayBuffer[A] var i = 0 - while (self.hasNext && i < size) { + // The order of terms in the following condition is important + // here as self.hasNext could be blocking + while (i < size && self.hasNext) { buf += self.next i += 1 } @@ -943,12 +994,10 @@ trait Iterator[+A] extends TraversableOnce[A] { // so the rest of the code can be oblivious val xs: Seq[B] = { val res = takeDestructively(count) - // extra checks so we don't calculate length unless there's reason - if (pad.isDefined && !self.hasNext) { - val shortBy = count - res.length - if (shortBy > 0) res ++ padding(shortBy) else res - } - else res + // was: extra checks so we don't calculate length unless there's reason + // but since we took the group eagerly, just use the fast length + val shortBy = count - res.length + if (shortBy > 0 && pad.isDefined) res ++ padding(shortBy) else res } lazy val len = xs.length lazy val incomplete = len < count @@ -1085,6 +1134,9 @@ trait Iterator[+A] extends TraversableOnce[A] { } /** Returns this iterator with patched values. + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original iterator appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. * * @param from The start index from which to patch * @param patchElems The iterator of patch values @@ -1093,18 +1145,33 @@ trait Iterator[+A] extends TraversableOnce[A] { */ def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = new AbstractIterator[B] { private var origElems = self - private var i = 0 - def hasNext: Boolean = - if (i < from) origElems.hasNext - else patchElems.hasNext || origElems.hasNext + private var i = (if (from > 0) from else 0) // Counts down, switch to patch on 0, -1 means use patch first + def hasNext: Boolean = { + if (i == 0) { + origElems = origElems drop replaced + i = -1 + } + origElems.hasNext || patchElems.hasNext + } def next(): B = { - // We have to do this *first* just in case from = 0. - if (i == from) origElems = origElems drop replaced - val result: B = - if (i < from || !patchElems.hasNext) origElems.next() - else patchElems.next() - i += 1 - result + if (i == 0) { + origElems = origElems drop replaced + i = -1 + } + if (i < 0) { + if (patchElems.hasNext) patchElems.next() + else origElems.next() + } + else { + if (origElems.hasNext) { + i -= 1 + origElems.next() + } + else { + i = -1 + patchElems.next() + } + } } } @@ -1127,9 +1194,8 @@ trait Iterator[+A] extends TraversableOnce[A] { * $willNotTerminateInf */ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = { - require(start >= 0 && (start < xs.length || xs.length == 0), s"start $start out of range ${xs.length}") var i = start - val end = start + math.min(len, xs.length - start) + val end = start + math.min(len, xs.length - start) while (i < end && hasNext) { xs(i) = next() i += 1 diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala index a4fa58b13c8f..875f6e1c023d 100755 --- a/src/library/scala/collection/JavaConverters.scala +++ b/src/library/scala/collection/JavaConverters.scala @@ -37,8 +37,8 @@ import convert._ * val sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala * assert(sl eq sl2) * }}} - * The following conversions also are supported, but the - * direction Scala to Java is done my a more specifically named method: + * The following conversions are also supported, but the + * direction from Scala to Java is done by the more specifically named methods: * `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`. * * - `scala.collection.Iterable` <=> `java.util.Collection` diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala index 1e4975a0a7a3..5a7bb5891e07 100644 --- a/src/library/scala/collection/LinearSeq.scala +++ b/src/library/scala/collection/LinearSeq.scala @@ -15,7 +15,14 @@ import generic._ import mutable.Builder /** A base trait for linear sequences. + * * $linearSeqInfo + * + * @define linearSeqInfo + * Linear sequences have reasonably efficient `head`, `tail`, and `isEmpty` methods. + * If these methods provide the fastest way to traverse the collection, a + * collection `Coll` that extends this trait should also extend + * `LinearSeqOptimized[A, Coll[A]]`. */ trait LinearSeq[+A] extends Seq[A] with GenericTraversableTemplate[A, LinearSeq] @@ -25,7 +32,7 @@ trait LinearSeq[+A] extends Seq[A] } /** $factoryInfo - * The current default implementation of a $Coll is a `Vector`. + * The current default implementation of a $Coll is a `List`. * @define coll linear sequence * @define Coll `LinearSeq` */ diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala index ff7985bf0df1..96e2135fd1ec 100644 --- a/src/library/scala/collection/LinearSeqLike.scala +++ b/src/library/scala/collection/LinearSeqLike.scala @@ -14,22 +14,10 @@ import scala.annotation.tailrec /** A template trait for linear sequences of type `LinearSeq[A]`. * - * $linearSeqInfo - * - * This trait just implements `iterator` in terms of `isEmpty, ``head`, and `tail`. - * However, see `LinearSeqOptimized` for an implementation trait that overrides operations + * This trait just implements `iterator` and `corresponds` in terms of `isEmpty, ``head`, and `tail`. + * However, see `LinearSeqOptimized` for an implementation trait that overrides many more operations * to make them run faster under the assumption of fast linear access with `head` and `tail`. * - * @define linearSeqInfo - * Linear sequences are defined in terms of three abstract methods, which are assumed - * to have efficient implementations. These are: - * {{{ - * def isEmpty: Boolean - * def head: A - * def tail: Repr - * }}} - * Here, `A` is the type of the sequence elements and `Repr` is the type of the sequence itself. - * * Linear sequences do not add any new methods to `Seq`, but promise efficient implementations * of linear access patterns. * @author Martin Odersky @@ -58,12 +46,18 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr val result = these.head; these = these.tail; result } else Iterator.empty.next() - /** Have to clear `these` so the iterator is exhausted like - * it would be without the optimization. - */ override def toList: List[A] = { + /* Have to clear `these` so the iterator is exhausted like + * it would be without the optimization. + * + * Calling "newBuilder.result()" in toList method + * prevents original seq from garbage collection, + * so we use these.take(0) here. + * + * Check SI-8924 for details + */ val xs = these.toList - these = newBuilder.result() + these = these.take(0) xs } } diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala index 8635b090b96e..9c336e8e3171 100755 --- a/src/library/scala/collection/LinearSeqOptimized.scala +++ b/src/library/scala/collection/LinearSeqOptimized.scala @@ -13,10 +13,24 @@ import mutable.ListBuffer import immutable.List import scala.annotation.tailrec -/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes - * the implementation of several methods under the assumption of fast linear access. +/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes + * the implementation of various methods under the assumption of fast linear access. + * + * $linearSeqOptim + * + * @define linearSeqOptim + * Linear-optimized sequences implement most operations in in terms of three methods, + * which are assumed to have efficient implementations. These are: + * {{{ + * def isEmpty: Boolean + * def head: A + * def tail: Repr + * }}} + * Here, `A` is the type of the sequence elements and `Repr` is the type of the sequence itself. + * Note that default implementations are provided via inheritance, but these + * should be overridden for performance. + * * - * $linearSeqInfo */ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends LinearSeqLike[A, Repr] { self: Repr => @@ -30,7 +44,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea * * $willNotTerminateInf * - * Note: the execution of `length` may take time proportial to the length of the sequence. + * Note: the execution of `length` may take time proportional to the length of the sequence. */ def length: Int = { var these = self @@ -43,8 +57,8 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea } /** Selects an element by its index in the $coll. - * Note: the execution of `apply` may take time proportial to the index value. - * @throws `IndexOutOfBoundsException` if `idx` does not satisfy `0 <= idx < length`. + * Note: the execution of `apply` may take time proportional to the index value. + * @throws IndexOutOfBoundsException if `idx` does not satisfy `0 <= idx < length`. */ def apply(n: Int): A = { val rest = drop(n) @@ -235,13 +249,16 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea override /*IterableLike*/ def sameElements[B >: A](that: GenIterable[B]): Boolean = that match { case that1: LinearSeq[_] => - var these = this - var those = that1 - while (!these.isEmpty && !those.isEmpty && these.head == those.head) { - these = these.tail - those = those.tail + // Probably immutable, so check reference identity first (it's quick anyway) + (this eq that1) || { + var these = this + var those = that1 + while (!these.isEmpty && !those.isEmpty && these.head == those.head) { + these = these.tail + those = those.tail + } + these.isEmpty && those.isEmpty } - these.isEmpty && those.isEmpty case _ => super.sameElements(that) } diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala index 5ec7d5c6155f..b474abc12a72 100644 --- a/src/library/scala/collection/MapLike.scala +++ b/src/library/scala/collection/MapLike.scala @@ -222,7 +222,7 @@ self => * but it might be overridden in subclasses. * * @param key the given key value for which a binding is missing. - * @throws `NoSuchElementException` + * @throws NoSuchElementException */ def default(key: A): B = throw new NoSuchElementException("key not found: " + key) @@ -230,11 +230,15 @@ self => protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] { override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv) def iterator = self.iterator.filter(kv => p(kv._1)) - override def contains(key: A) = self.contains(key) && p(key) + override def contains(key: A) = p(key) && self.contains(key) def get(key: A) = if (!p(key)) None else self.get(key) } /** Filters this map by retaining only keys satisfying a predicate. + * + * '''Note''': the predicate must accept any key of type `A`, not just those already + * present in the map, as the predicate is tested before the underlying map is queried. + * * @param p the predicate used to test keys * @return an immutable map consisting only of those key value pairs of this map where the key satisfies * the predicate `p`. The resulting map wraps the original map without copying any elements. @@ -319,11 +323,20 @@ self => res } - /* Overridden for efficiency. */ - override def toSeq: Seq[(A, B)] = toBuffer[(A, B)] + override def toSeq: Seq[(A, B)] = { + if (isEmpty) Vector.empty[(A, B)] + else { + // Default appropriate for immutable collections; mutable collections override this + val vb = Vector.newBuilder[(A, B)] + foreach(vb += _) + vb.result + } + } + override def toBuffer[C >: (A, B)]: mutable.Buffer[C] = { val result = new mutable.ArrayBuffer[C](size) - copyToBuffer(result) + // Faster to let the map iterate itself than to defer through copyToBuffer + foreach(result += _) result } diff --git a/src/library/scala/collection/MapProxy.scala b/src/library/scala/collection/MapProxy.scala index 941c1f5a4a43..26a7c710ee43 100644 --- a/src/library/scala/collection/MapProxy.scala +++ b/src/library/scala/collection/MapProxy.scala @@ -17,4 +17,5 @@ package collection * @version 1.0, 21/07/2003 * @since 1 */ +@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3") trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] diff --git a/src/library/scala/collection/Searching.scala b/src/library/scala/collection/Searching.scala index fec4bbf5025a..b68124b3f8e0 100644 --- a/src/library/scala/collection/Searching.scala +++ b/src/library/scala/collection/Searching.scala @@ -54,7 +54,7 @@ object Searching { */ final def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult = coll match { - case _: IndexedSeq[A] => binarySearch(elem, -1, coll.length)(ord) + case _: IndexedSeq[A] => binarySearch(elem, 0, coll.length)(ord) case _ => linearSearch(coll.view, elem, 0)(ord) } @@ -81,18 +81,18 @@ object Searching { final def search[B >: A](elem: B, from: Int, to: Int) (implicit ord: Ordering[B]): SearchResult = coll match { - case _: IndexedSeq[A] => binarySearch(elem, from-1, to)(ord) + case _: IndexedSeq[A] => binarySearch(elem, from, to)(ord) case _ => linearSearch(coll.view(from, to), elem, from)(ord) } @tailrec private def binarySearch[B >: A](elem: B, from: Int, to: Int) (implicit ord: Ordering[B]): SearchResult = { - if ((to-from) == 1) InsertionPoint(from) else { - val idx = from+(to-from)/2 + if (to == from) InsertionPoint(from) else { + val idx = from+(to-from-1)/2 math.signum(ord.compare(elem, coll(idx))) match { case -1 => binarySearch(elem, from, idx)(ord) - case 1 => binarySearch(elem, idx, to)(ord) + case 1 => binarySearch(elem, idx + 1, to)(ord) case _ => Found(idx) } } @@ -105,7 +105,7 @@ object Searching { while (it.hasNext) { val cur = it.next() if (ord.equiv(elem, cur)) return Found(idx) - else if (ord.lt(elem, cur)) return InsertionPoint(idx-1) + else if (ord.lt(elem, cur)) return InsertionPoint(idx) idx += 1 } InsertionPoint(idx) diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala index fdfb1f2efca5..329273df5bc6 100644 --- a/src/library/scala/collection/SeqLike.scala +++ b/src/library/scala/collection/SeqLike.scala @@ -140,7 +140,15 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ if (isEmpty) Iterator(repr) else new PermutationsItr - /** Iterates over combinations. + /** Iterates over combinations. A _combination_ of length `n` is a subsequence of + * the original sequence, with the elements taken in order. Thus, `"xy"` and `"yy"` + * are both length-2 combinations of `"xyy"`, but `"yx"` is not. If there is + * more than one way to generate the same subsequence, only one will be returned. + * + * For example, `"xyyy"` has three different ways to generate `"xy"` depending on + * whether the first, second, or third `"y"` is selected. However, since all are + * identical, only one will be chosen. Which of the three will be taken is an + * implementation detail that is not defined. * * @return An Iterator which traverses the possible n-element combinations of this $coll. * @example `"abbbc".combinations(2) = Iterator(ab, ac, bb, bc)` diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala index 5e31ac4a5301..587ec133a5c7 100644 --- a/src/library/scala/collection/SeqViewLike.scala +++ b/src/library/scala/collection/SeqViewLike.scala @@ -55,7 +55,7 @@ trait SeqViewLike[+A, trait Sliced extends super.Sliced with Transformed[A] { def length = iterator.size def apply(idx: Int): A = - if (idx + from < until) self.apply(idx + from) + if (idx >= 0 && idx + from < until) self.apply(idx + from) else throw new IndexOutOfBoundsException(idx.toString) override def foreach[U](f: A => U) = iterator foreach f @@ -83,6 +83,7 @@ trait SeqViewLike[+A, } def length = index(self.length) def apply(idx: Int) = { + if (idx < 0 || idx >= self.length) throw new IndexOutOfBoundsException(idx.toString) val row = findRow(idx, 0, self.length - 1) mapping(self(row)).seq.toSeq(idx - index(row)) } @@ -95,6 +96,14 @@ trait SeqViewLike[+A, if (idx < self.length) self(idx) else restSeq(idx - self.length) } + trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] { + protected[this] lazy val fstSeq = fst.toSeq + def length: Int = fstSeq.length + self.length + def apply(idx: Int): B = + if (idx < fstSeq.length) fstSeq(idx) + else self.apply(idx - fstSeq.length) + } + trait Filtered extends super.Filtered with Transformed[A] { protected[this] lazy val index = { var len = 0 @@ -154,35 +163,36 @@ trait SeqViewLike[+A, } } + // Note--for this to work, must ensure 0 <= from and 0 <= replaced + // Must also take care to allow patching inside an infinite stream + // (patching in an infinite stream is not okay) trait Patched[B >: A] extends Transformed[B] { protected[this] val from: Int protected[this] val patch: GenSeq[B] protected[this] val replaced: Int private lazy val plen = patch.length override def iterator: Iterator[B] = self.iterator patch (from, patch.iterator, replaced) - def length: Int = self.length + plen - replaced - def apply(idx: Int): B = - if (idx < from) self.apply(idx) - else if (idx < from + plen) patch.apply(idx - from) + def length: Int = { + val len = self.length + val pre = math.min(from, len) + val post = math.max(0, len - pre - replaced) + pre + plen + post + } + def apply(idx: Int): B = { + val actualFrom = if (self.lengthCompare(from) < 0) self.length else from + if (idx < actualFrom) self.apply(idx) + else if (idx < actualFrom + plen) patch.apply(idx - actualFrom) else self.apply(idx - plen + replaced) + } final override protected[this] def viewIdentifier = "P" } - trait Prepended[B >: A] extends Transformed[B] { - protected[this] val fst: B - override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator - def length: Int = 1 + self.length - def apply(idx: Int): B = - if (idx == 0) fst - else self.apply(idx - 1) - final override protected[this] def viewIdentifier = "A" - } - /** Boilerplate method, to override in each subclass * This method could be eliminated if Scala had virtual classes */ protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B] protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B] + protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B] protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B] protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B] protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered @@ -201,7 +211,6 @@ trait SeqViewLike[+A, val patch = _patch val replaced = _replaced } with AbstractTransformed[B] with Patched[B] - protected def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B] // see comment in IterableViewLike. protected override def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n)) @@ -210,7 +219,10 @@ trait SeqViewLike[+A, override def reverse: This = newReversed.asInstanceOf[This] override def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That = { - newPatched(from, patch, replaced).asInstanceOf[That] + // Be careful to not evaluate the entire sequence! Patch should work (slowly, perhaps) on infinite streams. + val nonNegFrom = math.max(0,from) + val nonNegRep = math.max(0,replaced) + newPatched(nonNegFrom, patch, nonNegRep).asInstanceOf[That] // was: val b = bf(repr) // if (b.isInstanceOf[NoBuilder[_]]) newPatched(from, patch, replaced).asInstanceOf[That] // else super.patch[B, That](from, patch, replaced)(bf) @@ -228,7 +240,7 @@ trait SeqViewLike[+A, } override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = - newPrepended(elem).asInstanceOf[That] + newPrepended(elem :: Nil).asInstanceOf[That] override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = ++(Iterator.single(elem))(bf) diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala index 0c5c7e0b29ce..80a344e6a88e 100644 --- a/src/library/scala/collection/SetLike.scala +++ b/src/library/scala/collection/SetLike.scala @@ -17,6 +17,9 @@ import parallel.ParSet /** A template trait for sets. * * $setNote + * '''Implementation note:''' + * This trait provides most of the operations of a `Set` independently of its representation. + * It is typically inherited by concrete implementations of sets. * $setTags * @since 2.8 * @@ -24,10 +27,6 @@ import parallel.ParSet * * A set is a collection that contains no duplicate elements. * - * '''Implementation note:''' - * This trait provides most of the operations of a `Set` independently of its representation. - * It is typically inherited by concrete implementations of sets. - * * To implement a concrete set, you need to provide implementations of the * following methods: * {{{ @@ -78,11 +77,20 @@ self => protected[this] override def parCombiner = ParSet.newCombiner[A] - /* Overridden for efficiency. */ - override def toSeq: Seq[A] = toBuffer[A] + // Default collection type appropriate for immutable collections; mutable collections override this + override def toSeq: Seq[A] = { + if (isEmpty) Vector.empty[A] + else { + val vb = Vector.newBuilder[A] + foreach(vb += _) + vb.result + } + } + override def toBuffer[A1 >: A]: mutable.Buffer[A1] = { val result = new mutable.ArrayBuffer[A1](size) - copyToBuffer(result) + // Faster to let the map iterate itself than to defer through copyToBuffer + foreach(result += _) result } diff --git a/src/library/scala/collection/SetProxy.scala b/src/library/scala/collection/SetProxy.scala index f9f38f148a66..e17fb215b972 100644 --- a/src/library/scala/collection/SetProxy.scala +++ b/src/library/scala/collection/SetProxy.scala @@ -17,4 +17,5 @@ package collection * @author Martin Odersky * @version 2.0, 01/01/2007 */ +@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3") trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala index b53724c5683c..a35750a35f9f 100644 --- a/src/library/scala/collection/Traversable.scala +++ b/src/library/scala/collection/Traversable.scala @@ -87,7 +87,7 @@ trait Traversable[+A] extends TraversableLike[A, Traversable[A]] } /** $factoryInfo - * The current default implementation of a $Coll is a `Vector`. + * The current default implementation of a $Coll is a `List`. */ object Traversable extends TraversableFactory[Traversable] { self => diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala index b60ea86ab069..5a07874fd678 100644 --- a/src/library/scala/collection/TraversableLike.scala +++ b/src/library/scala/collection/TraversableLike.scala @@ -253,7 +253,7 @@ trait TraversableLike[+A, +Repr] extends Any b.result } - private def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = { + private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = { val b = newBuilder for (x <- this) if (p(x) != isFlipped) b += x @@ -345,8 +345,8 @@ trait TraversableLike[+A, +Repr] extends Any * $mayNotTerminateInf * * @param p the predicate used to test elements. - * @return `true` if the given predicate `p` holds for all elements - * of this $coll, otherwise `false`. + * @return `true` if this $coll is empty, otherwise `true` if the given predicate `p` + * holds for all elements of this $coll, otherwise `false`. */ def forall(p: A => Boolean): Boolean = { var result = true @@ -362,8 +362,8 @@ trait TraversableLike[+A, +Repr] extends Any * $mayNotTerminateInf * * @param p the predicate used to test elements. - * @return `true` if the given predicate `p` holds for some of the - * elements of this $coll, otherwise `false`. + * @return `false` if this $coll is empty, otherwise `true` if the given predicate `p` + * holds for some of the elements of this $coll, otherwise `false` */ def exists(p: A => Boolean): Boolean = { var result = false @@ -419,7 +419,7 @@ trait TraversableLike[+A, +Repr] extends Any /** Selects the first element of this $coll. * $orderDependent * @return the first element of this $coll. - * @throws `NoSuchElementException` if the $coll is empty. + * @throws NoSuchElementException if the $coll is empty. */ def head: A = { var result: () => A = () => throw new NoSuchElementException @@ -473,7 +473,7 @@ trait TraversableLike[+A, +Repr] extends Any * $orderDependent * @return a $coll consisting of all elements of this $coll * except the last one. - * @throws `UnsupportedOperationException` if the $coll is empty. + * @throws UnsupportedOperationException if the $coll is empty. */ def init: Repr = { if (isEmpty) throw new UnsupportedOperationException("empty.init") diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index 072fd3da44b3..2eab58009c2d 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -75,7 +75,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { // at least indirectly. Currently, these are `ArrayOps` and `StringOps`. // It is also implemented in `TraversableOnce[A]`. /** A version of this collection with all - * of the operations implemented sequentially (i.e. in a single-threaded manner). + * of the operations implemented sequentially (i.e., in a single-threaded manner). * * This method returns a reference to this collection. In parallel collections, * it is redefined to return a sequential implementation of this collection. In @@ -85,10 +85,9 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { */ def seq: TraversableOnce[A] - /** Presently these are abstract because the Traversable versions use - * breakable/break, and I wasn't sure enough of how that's supposed to - * function to consolidate them with the Iterator versions. - */ + // Presently these are abstract because the Traversable versions use + // breakable/break, and I wasn't sure enough of how that's supposed to + // function to consolidate them with the Iterator versions. def forall(p: A => Boolean): Boolean def exists(p: A => Boolean): Boolean def find(p: A => Boolean): Option[A] @@ -160,7 +159,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { * op( op( ... op(x_1, x_2) ..., x_{n-1}), x_n) * }}} * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - * @throws `UnsupportedOperationException` if this $coll is empty. */ + * @throws UnsupportedOperationException if this $coll is empty. */ def reduceLeft[B >: A](op: (B, A) => B): B = { if (isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala index 65936da0e4ba..9eec685d101d 100644 --- a/src/library/scala/collection/TraversableProxy.scala +++ b/src/library/scala/collection/TraversableProxy.scala @@ -21,4 +21,5 @@ package collection * @version 2.8 * @since 2.8 */ +@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3") trait TraversableProxy[+A] extends Traversable[A] with TraversableProxyLike[A, Traversable[A]] diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala index 5926c69ebf64..0901d749c33c 100644 --- a/src/library/scala/collection/TraversableViewLike.scala +++ b/src/library/scala/collection/TraversableViewLike.scala @@ -189,6 +189,15 @@ trait TraversableViewLike[+A, } final override protected[this] def viewIdentifier = "A" } + + trait Prepended[B >: A] extends Transformed[B] { + protected[this] val fst: GenTraversable[B] + def foreach[U](f: B => U) { + fst foreach f + self foreach f + } + final override protected[this] def viewIdentifier = "A" + } trait Filtered extends Transformed[A] { protected[this] val pred: A => Boolean @@ -222,11 +231,15 @@ trait TraversableViewLike[+A, final override protected[this] def viewIdentifier = "D" } - override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = { + override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = newAppended(xs.seq.toTraversable).asInstanceOf[That] -// was: if (bf.isInstanceOf[ByPassCanBuildFrom]) newAppended(that).asInstanceOf[That] -// else super.++[B, That](that)(bf) - } + + override def ++:[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = + newPrepended(xs.seq.toTraversable).asInstanceOf[That] + + // Need second one because of optimization in TraversableLike + override def ++:[B >: A, That](xs: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That = + newPrepended(xs).asInstanceOf[That] override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = { newMapped(f).asInstanceOf[That] @@ -253,6 +266,7 @@ trait TraversableViewLike[+A, */ protected def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B] protected def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B] + protected def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B] protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B] protected def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B] protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala index 02e5dd01f5c2..2eea15b8dcff 100644 --- a/src/library/scala/collection/concurrent/Map.scala +++ b/src/library/scala/collection/concurrent/Map.scala @@ -20,7 +20,7 @@ package collection.concurrent * @tparam A the key type of the map * @tparam B the value type of the map * - * @define Coll `ConcurrentMap` + * @define Coll `concurrent.Map` * @define coll concurrent map * @define concurrentmapinfo * This is a base trait for all Scala concurrent map implementations. It diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala index c724831c5427..5448f5f91c8d 100644 --- a/src/library/scala/collection/convert/DecorateAsScala.scala +++ b/src/library/scala/collection/convert/DecorateAsScala.scala @@ -135,6 +135,12 @@ trait DecorateAsScala { * If the Java `Map` was previously obtained from an implicit or explicit * call of `asMap(scala.collection.mutable.Map)` then the original * Scala `Map` will be returned. + * + * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`), + * it is your responsibility to wrap all + * non-atomic operations with `underlying.synchronized`. + * This includes `get`, as `java.util.Map`'s API does not allow for an + * atomic `get` when `null` values may be present. * * @param m The `Map` to be converted. * @return An object with an `asScala` method that returns a Scala mutable diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala index d4ab451b0d28..ab151a677824 100644 --- a/src/library/scala/collection/convert/WrapAsScala.scala +++ b/src/library/scala/collection/convert/WrapAsScala.scala @@ -133,7 +133,13 @@ trait WrapAsScala { * If the Java `Map` was previously obtained from an implicit or * explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then * the original Scala Map will be returned. - * + * + * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`), + * it is your responsibility to wrap all + * non-atomic operations with `underlying.synchronized`. + * This includes `get`, as `java.util.Map`'s API does not allow for an + * atomic `get` when `null` values may be present. + * * @param m The Map to be converted. * @return A Scala mutable Map view of the argument. */ diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala index 14ae57c43a03..9f9732c62f4d 100644 --- a/src/library/scala/collection/convert/Wrappers.scala +++ b/src/library/scala/collection/convert/Wrappers.scala @@ -194,7 +194,7 @@ private[collection] trait Wrappers { def getKey = k def getValue = v def setValue(v1 : B) = self.put(k, v1) - override def hashCode = byteswap32(k.hashCode) + (byteswap32(v.hashCode) << 16) + override def hashCode = byteswap32(k.##) + (byteswap32(v.##) << 16) override def equals(other: Any) = other match { case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue case _ => false @@ -288,6 +288,13 @@ private[collection] trait Wrappers { override def empty: Repr = null.asInstanceOf[Repr] } + /** Wraps a Java map as a Scala one. If the map is to support concurrent access, + * use [[JConcurrentMapWrapper]] instead. If the wrapped map is synchronized + * (e.g. from `java.util.Collections.synchronizedMap`), it is your responsibility + * to wrap all non-atomic operations with `underlying.synchronized`. + * This includes `get`, as `java.util.Map`'s API does not allow for an + * atomic `get` when `null` values may be present. + */ case class JMapWrapper[A, B](underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] { override def empty = JMapWrapper(new ju.HashMap[A, B]) } @@ -314,6 +321,10 @@ private[collection] trait Wrappers { def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval) } + /** Wraps a concurrent Java map as a Scala one. Single-element concurrent + * access is supported; multi-element operations such as maps and filters + * are not guaranteed to be atomic. + */ case class JConcurrentMapWrapper[A, B](underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] { override def get(k: A) = { val v = underlying get k diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala index cd48cd23f4b3..54455c531a35 100644 --- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala +++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala @@ -25,7 +25,7 @@ import scala.language.higherKinds * @author Martin Odersky * @since 2.8 * @define coll collection - * @define Coll CC + * @define Coll Traversable */ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] { @@ -45,7 +45,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew /** Selects the first element of this $coll. * * @return the first element of this $coll. - * @throws `NoSuchElementException` if the $coll is empty. + * @throws NoSuchElementException if the $coll is empty. */ def head: A @@ -202,7 +202,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew * element type of this $coll is a `Traversable`. * @return a two-dimensional $coll of ${coll}s which has as ''n''th row * the ''n''th column of this $coll. - * @throws `IllegalArgumentException` if all collections in this $coll + * @throws IllegalArgumentException if all collections in this $coll * are not of the same size. */ @migration("`transpose` throws an `IllegalArgumentException` if collections are not uniformly sized.", "2.9.0") diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala index ab0d443a0385..a0b0e1318bde 100644 --- a/src/library/scala/collection/generic/Sorted.scala +++ b/src/library/scala/collection/generic/Sorted.scala @@ -62,7 +62,8 @@ trait Sorted[K, +This <: Sorted[K, This]] { /** Creates a ranged projection of this collection with both a lower-bound * and an upper-bound. * - * @param from The upper-bound (exclusive) of the ranged projection. + * @param from The lower-bound (inclusive) of the ranged projection. + * @param until The upper-bound (exclusive) of the ranged projection. */ def range(from: K, until: K): This = rangeImpl(Some(from), Some(until)) diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 726937efd963..49b4397cf2b2 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -162,6 +162,13 @@ class HashSet[A] extends AbstractSet[A] def - (e: A): HashSet[A] = nullToEmpty(removed0(e, computeHash(e), 0)) + /** Returns this $coll as an immutable set. + * + * A new set will not be built; lazy collections will stay lazy. + */ + @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") + override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] + override def filter(p: A => Boolean) = { val buffer = new Array[HashSet[A]](bufferSize(size)) nullToEmpty(filter0(p, false, 0, buffer, 0)) @@ -187,7 +194,7 @@ class HashSet[A] extends AbstractSet[A] protected def get0(key: A, hash: Int, level: Int): Boolean = false - def updated0(key: A, hash: Int, level: Int): HashSet[A] = + private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = new HashSet.HashSet1(key, hash) protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = this @@ -249,10 +256,10 @@ object HashSet extends ImmutableSetFactory[HashSet] { class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends LeafHashSet[A] { override def size = 1 - override def get0(key: A, hash: Int, level: Int): Boolean = + override protected def get0(key: A, hash: Int, level: Int): Boolean = (hash == this.hash && key == this.key) - override def subsetOf0(that: HashSet[A], level: Int) = { + override protected def subsetOf0(that: HashSet[A], level: Int) = { // check if that contains this.key // we use get0 with our key and hash at the correct level instead of calling contains, // which would not work since that might not be a top-level HashSet @@ -260,7 +267,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { that.get0(key, hash, level) } - override def updated0(key: A, hash: Int, level: Int): HashSet[A] = + override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = if (hash == this.hash && key == this.key) this else { if (hash != this.hash) { @@ -305,7 +312,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = if (that.get0(key, hash, level)) null else this - override def removed0(key: A, hash: Int, level: Int): HashSet[A] = + override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = if (hash == this.hash && key == this.key) null else this override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = @@ -319,10 +326,10 @@ object HashSet extends ImmutableSetFactory[HashSet] { override def size = ks.size - override def get0(key: A, hash: Int, level: Int): Boolean = + override protected def get0(key: A, hash: Int, level: Int): Boolean = if (hash == this.hash) ks.contains(key) else false - override def subsetOf0(that: HashSet[A], level: Int) = { + override protected def subsetOf0(that: HashSet[A], level: Int) = { // we have to check each element // we use get0 with our hash at the correct level instead of calling contains, // which would not work since that might not be a top-level HashSet @@ -330,11 +337,11 @@ object HashSet extends ImmutableSetFactory[HashSet] { ks.forall(key => that.get0(key, hash, level)) } - override def updated0(key: A, hash: Int, level: Int): HashSet[A] = + override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = if (hash == this.hash) new HashSetCollision1(hash, ks + key) else makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level) - override def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match { + override private[immutable] def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match { case that if that.hash != this.hash => // different hash code, so there is no need to investigate further. // Just create a branch node containing the two. @@ -367,7 +374,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { } } - override def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match { + override private[immutable] def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match { case that: LeafHashSet[A] => // switch to the simpler Tree/Leaf implementation this.union0(that, level) @@ -399,7 +406,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { // create a new HashSet1 with the hash we already know new HashSet1(ks1.head, hash) case _ => - // create a new HashSetCollison with the hash we already know and the new keys + // create a new HashSetCollision with the hash we already know and the new keys new HashSetCollision1(hash, ks1) } } @@ -419,12 +426,12 @@ object HashSet extends ImmutableSetFactory[HashSet] { // create a new HashSet1 with the hash we already know new HashSet1(ks1.head, hash) case _ => - // create a new HashSetCollison with the hash we already know and the new keys + // create a new HashSetCollision with the hash we already know and the new keys new HashSetCollision1(hash, ks1) } } - override def removed0(key: A, hash: Int, level: Int): HashSet[A] = + override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = if (hash == this.hash) { val ks1 = ks - key ks1.size match { @@ -438,7 +445,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { // Should only have HSC1 if size > 1 this case _ => - // create a new HashSetCollison with the hash we already know and the new keys + // create a new HashSetCollision with the hash we already know and the new keys new HashSetCollision1(hash, ks1) } } else this @@ -521,7 +528,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { override def size = size0 - override def get0(key: A, hash: Int, level: Int): Boolean = { + override protected def get0(key: A, hash: Int, level: Int): Boolean = { val index = (hash >>> level) & 0x1f val mask = (1 << index) if (bitmap == - 1) { @@ -533,7 +540,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { false } - override def updated0(key: A, hash: Int, level: Int): HashSet[A] = { + override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = { val index = (hash >>> level) & 0x1f val mask = (1 << index) val offset = Integer.bitCount(bitmap & (mask-1)) @@ -835,7 +842,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { case _ => this } - override def removed0(key: A, hash: Int, level: Int): HashSet[A] = { + override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = { val index = (hash >>> level) & 0x1f val mask = (1 << index) val offset = Integer.bitCount(bitmap & (mask-1)) @@ -872,7 +879,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { } } - override def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match { + override protected def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match { case that: HashTrieSet[A] if this.size0 <= that.size0 => // create local mutable copies of members var abm = this.bitmap diff --git a/src/library/scala/collection/immutable/Iterable.scala b/src/library/scala/collection/immutable/Iterable.scala index 6e4eb1e45fa4..df322396d0c6 100644 --- a/src/library/scala/collection/immutable/Iterable.scala +++ b/src/library/scala/collection/immutable/Iterable.scala @@ -35,6 +35,7 @@ trait Iterable[+A] extends Traversable[A] } /** $factoryInfo + * The current default implementation of a $Coll is a `List`. * @define Coll `immutable.Iterable` * @define coll immutable iterable collection */ diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 930e13a9d363..89b4ee114579 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -13,7 +13,7 @@ package immutable import generic._ import mutable.{Builder, ListBuffer} import scala.annotation.tailrec -import java.io._ +import java.io.{ObjectOutputStream, ObjectInputStream} /** A class for immutable linked lists representing ordered collections * of elements of type. @@ -80,12 +80,13 @@ import java.io._ * @define mayNotTerminateInf * @define willNotTerminateInf */ +@SerialVersionUID(-6084104484083858598L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A, List] with LinearSeqOptimized[A, List[A]] - with Serializable { + with scala.Serializable { override def companion: GenericCompanion[List] = List import scala.collection.{Iterable, Traversable, Seq, IndexedSeq} @@ -190,11 +191,9 @@ sealed abstract class List[+A] extends AbstractSeq[A] // Overridden methods from IterableLike and SeqLike or overloaded variants of such methods - override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = { - val b = bf(this) - if (b.isInstanceOf[ListBuffer[_]]) (this ::: that.seq.toList).asInstanceOf[That] + override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = + if (bf eq List.ReusableCBF) (this ::: that.seq.toList).asInstanceOf[That] else super.++(that) - } override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[List[A], B, That]): That = bf match { case _: List.GenericCanBuildFrom[_] => (elem :: this).asInstanceOf[That] @@ -292,7 +291,6 @@ sealed abstract class List[+A] extends AbstractSeq[A] if (this eq Nil) Nil.asInstanceOf[That] else { var rest = this var h: ::[B] = null - var x: A = null.asInstanceOf[A] // Special case for first element do { val x: Any = pf.applyOrElse(rest.head, List.partialNotApplied) @@ -430,13 +428,14 @@ case object Nil extends List[Nothing] { } /** A non empty list characterized by a head and a tail. - * @param hd the first element of the list + * @param head the first element of the list * @param tl the list containing the remaining elements of this list after the first one. * @tparam B the type of the list elements. * @author Martin Odersky * @version 1.0, 15/07/2003 * @since 2.8 */ +@SerialVersionUID(509929039250432923L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 final case class ::[B](override val head: B, private[scala] var tl: List[B]) extends List[B] { override def tail : List[B] = tl override def isEmpty: Boolean = false diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala index 7c40e84280c4..c5773338f544 100644 --- a/src/library/scala/collection/immutable/ListMap.scala +++ b/src/library/scala/collection/immutable/ListMap.scala @@ -29,7 +29,11 @@ object ListMap extends ImmutableMapFactory[ListMap] { new MapCanBuildFrom[A, B] def empty[A, B]: ListMap[A, B] = EmptyListMap.asInstanceOf[ListMap[A, B]] - private object EmptyListMap extends ListMap[Any, Nothing] { } + @SerialVersionUID(-8256686706655863282L) + private object EmptyListMap extends ListMap[Any, Nothing] { + override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) + override def contains(key: Any) = false + } } /** This class implements immutable maps using a list-based data structure. @@ -159,7 +163,6 @@ extends AbstractMap[A, B] */ override def apply(k: A): B1 = apply0(this, k) - @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 = if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k) else if (k == cur.key) cur.value @@ -176,7 +179,16 @@ extends AbstractMap[A, B] @tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] = if (k == cur.key) Some(cur.value) else if (cur.next.nonEmpty) get0(cur.next, k) else None - + + + override def contains(key: A): Boolean = contains0(this, key) + + @tailrec private def contains0(cur: ListMap[A, B1], k: A): Boolean = + if (k == cur.key) true + else if (cur.next.nonEmpty) contains0(cur.next, k) + else false + + /** This method allows one to create a new map with an additional mapping * from `key` to `value`. If the map contains already a mapping for `key`, * it will be overridden by this function. @@ -186,6 +198,7 @@ extends AbstractMap[A, B] new m.Node[B2](k, v) } + /** Creates a new mapping without the given `key`. * If the map does not contain a mapping for the given key, the * method returns the same map. diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index 1bb07eb02d5e..a6e6fba0a567 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -111,7 +111,7 @@ class ListSet[A] extends AbstractSet[A] /** Creates a new iterator over all elements contained in this set. * - * @throws Predef.NoSuchElementException + * @throws java.util.NoSuchElementException * @return the new iterator */ def iterator: Iterator[A] = new AbstractIterator[A] { @@ -127,17 +127,24 @@ class ListSet[A] extends AbstractSet[A] } /** - * @throws Predef.NoSuchElementException + * @throws java.util.NoSuchElementException */ override def head: A = throw new NoSuchElementException("Set has no elements") /** - * @throws Predef.NoSuchElementException + * @throws java.util.NoSuchElementException */ override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set") override def stringPrefix = "ListSet" + /** Returns this $coll as an immutable set. + * + * A new set will not be built; lazy collections will stay lazy. + */ + @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") + override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] + /** Represents an entry in the `ListSet`. */ protected class Node(override val head: A) extends ListSet[A] with Serializable { diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala index 5178d5a862cf..63ddcb18cfa0 100644 --- a/src/library/scala/collection/immutable/Map.scala +++ b/src/library/scala/collection/immutable/Map.scala @@ -94,6 +94,8 @@ object Map extends ImmutableMapFactory[Map] { private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable { override def size: Int = 0 + override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) + override def contains(key: Any) = false def get(key: Any): Option[Nothing] = None def iterator: Iterator[(Any, Nothing)] = Iterator.empty override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value) @@ -103,6 +105,8 @@ object Map extends ImmutableMapFactory[Map] { class Map1[A, +B](key1: A, value1: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { override def size = 1 + override def apply(key: A) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) + override def contains(key: A) = key == key1 def get(key: A): Option[B] = if (key == key1) Some(value1) else None def iterator = Iterator((key1, value1)) @@ -119,6 +123,11 @@ object Map extends ImmutableMapFactory[Map] { class Map2[A, +B](key1: A, value1: B, key2: A, value2: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { override def size = 2 + override def apply(key: A) = + if (key == key1) value1 + else if (key == key2) value2 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: A) = (key == key1) || (key == key2) def get(key: A): Option[B] = if (key == key1) Some(value1) else if (key == key2) Some(value2) @@ -140,6 +149,12 @@ object Map extends ImmutableMapFactory[Map] { class Map3[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { override def size = 3 + override def apply(key: A) = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: A) = (key == key1) || (key == key2) || (key == key3) def get(key: A): Option[B] = if (key == key1) Some(value1) else if (key == key2) Some(value2) @@ -164,6 +179,13 @@ object Map extends ImmutableMapFactory[Map] { class Map4[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B, key4: A, value4: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { override def size = 4 + override def apply(key: A) = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: A) = (key == key1) || (key == key2) || (key == key3) || (key == key4) def get(key: A): Option[B] = if (key == key1) Some(value1) else if (key == key2) Some(value2) diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala index 3a64820be694..8910ee16b964 100644 --- a/src/library/scala/collection/immutable/PagedSeq.scala +++ b/src/library/scala/collection/immutable/PagedSeq.scala @@ -12,7 +12,7 @@ package scala package collection package immutable -import java.io._ +import java.io.{File, FileReader, Reader} import scala.util.matching.Regex import scala.reflect.ClassTag @@ -158,7 +158,7 @@ extends scala.collection.AbstractSeq[T] * @note Calling this method will force the entire sequence to be read. */ def length: Int = { - while (!latest.isLast) addMore() + while (!latest.isLast && latest.end < end) addMore() (latest.end min end) - start } @@ -175,7 +175,8 @@ extends scala.collection.AbstractSeq[T] */ override def isDefinedAt(index: Int) = index >= 0 && index < end - start && { - val p = page(index + start); index + start < p.end + val absidx = index + start + absidx >= 0 && absidx < page(absidx).end } /** The subsequence from index `start` up to `end -1` if `end` @@ -192,6 +193,9 @@ extends scala.collection.AbstractSeq[T] if (f.next eq null) f.addMore(more) f = f.next } + // Warning -- not refining `more` means that slices can freely request and obtain + // data outside of their slice. This is part of the design of PagedSeq + // (to read pages!) but can be surprising. new PagedSeq(more, f, s, e) } diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index 264304db6881..98266716cc0d 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -53,7 +53,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) * * @param n index of the element to return * @return the element at position `n` in this queue. - * @throws Predef.NoSuchElementException if the queue is too short. + * @throws java.util.NoSuchElementException if the queue is too short. */ override def apply(n: Int): A = { val len = out.length @@ -120,7 +120,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) /** Returns a tuple with the first element in the queue, * and a new queue with this element removed. * - * @throws Predef.NoSuchElementException + * @throws java.util.NoSuchElementException * @return the first element of the queue. */ def dequeue: (A, Queue[A]) = out match { @@ -139,7 +139,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) /** Returns the first element in the queue, or throws an error if there * is no element contained in the queue. * - * @throws Predef.NoSuchElementException + * @throws java.util.NoSuchElementException * @return the first element. */ def front: A = head diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 26ccd0980345..0b380517f8dc 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -33,7 +33,13 @@ import scala.collection.parallel.immutable.ParRange * `init`) are also permitted on overfull ranges. * * @param start the start of this range. - * @param end the exclusive end of the range. + * @param end the end of the range. For exclusive ranges, e.g. + * `Range(0,3)` or `(0 until 3)`, this is one + * step past the last one in the range. For inclusive + * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`, + * it may be in the range if it is not skipped by the step size. + * To find the last element inside a non-empty range, + use `last` instead. * @param step the step for the range. * * @author Martin Odersky @@ -196,7 +202,24 @@ extends scala.collection.AbstractSeq[Int] copy(locationAfterN(n), end, step) } ) - + + /** Creates a new range containing the elements starting at `from` up to but not including `until`. + * + * $doesNotUseBuilders + * + * @param from the element at which to start + * @param until the element at which to end (not included in the range) + * @return a new range consisting of a contiguous interval of values in the old range + */ + override def slice(from: Int, until: Int): Range = + if (from <= 0) take(until) + else if (until >= numRangeElements && numRangeElements >= 0) drop(from) + else { + val fromValue = locationAfterN(from) + if (from >= until) newEmptyRange(fromValue) + else new Range.Inclusive(fromValue, locationAfterN(until-1), step) + } + /** Creates a new range containing all the elements of this range except the last one. * * $doesNotUseBuilders @@ -364,15 +387,16 @@ extends scala.collection.AbstractSeq[Int] override def equals(other: Any) = other match { case x: Range => // Note: this must succeed for overfull ranges (length > Int.MaxValue) - (x canEqual this) && ( - isEmpty || // all empty sequences are equal - (start == x.start && { // Otherwise, must have same start - val l0 = last - (l0 == x.last && ( // And same end - start == l0 || step == x.step // And either the same step, or not take any steps - )) - }) - ) + (x canEqual this) && { + if (isEmpty) x.isEmpty // empty sequences are equal + else // this is non-empty... + x.nonEmpty && start == x.start && { // ...so other must contain something and have same start + val l0 = last + (l0 == x.last && ( // And same end + start == l0 || step == x.step // And either the same step, or not take any steps + )) + } + } case _ => super.equals(other) } diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala index 0fbf7942d426..7725ad9ee337 100644 --- a/src/library/scala/collection/immutable/Set.scala +++ b/src/library/scala/collection/immutable/Set.scala @@ -35,12 +35,7 @@ trait Set[A] extends Iterable[A] override def companion: GenericCompanion[Set] = Set - /** Returns this $coll as an immutable map. - * - * A new map will not be built; lazy collections will stay lazy. - */ - @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] + override def toSet[B >: A]: Set[B] = to[({type l[a] = immutable.Set[B]})#l] // for bincompat; remove in dev override def seq: Set[A] = this protected override def parCombiner = ParSet.newCombiner[A] // if `immutable.SetLike` gets introduced, please move this there! @@ -62,6 +57,7 @@ object Set extends ImmutableSetFactory[Set] { def - (elem: Any): Set[Any] = this def iterator: Iterator[Any] = Iterator.empty override def foreach[U](f: Any => U): Unit = {} + override def toSet[B >: Any]: Set[B] = this.asInstanceOf[Set[B]] } private[collection] def emptyInstance: Set[Any] = EmptySet @@ -92,6 +88,8 @@ object Set extends ImmutableSetFactory[Set] { if (f(elem1)) Some(elem1) else None } + @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") + override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] } /** An optimized representation for immutable sets of size 2 */ @@ -123,6 +121,8 @@ object Set extends ImmutableSetFactory[Set] { else if (f(elem2)) Some(elem2) else None } + @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") + override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] } /** An optimized representation for immutable sets of size 3 */ @@ -156,6 +156,8 @@ object Set extends ImmutableSetFactory[Set] { else if (f(elem3)) Some(elem3) else None } + @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") + override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] } /** An optimized representation for immutable sets of size 4 */ @@ -191,6 +193,8 @@ object Set extends ImmutableSetFactory[Set] { else if (f(elem4)) Some(elem4) else None } + @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") + override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] } } diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala index b77b16f23ff6..1c28093b2c51 100644 --- a/src/library/scala/collection/immutable/Stack.scala +++ b/src/library/scala/collection/immutable/Stack.scala @@ -95,7 +95,7 @@ class Stack[+A] protected (protected val elems: List[A]) /** Returns the top element of the stack. An error is signaled if * there is no element on the stack. * - * @throws Predef.NoSuchElementException + * @throws java.util.NoSuchElementException * @return the top element. */ def top: A = @@ -105,7 +105,7 @@ class Stack[+A] protected (protected val elems: List[A]) /** Removes the top element from the stack. * Note: should return `(A, Stack[A])` as for queues (mics) * - * @throws Predef.NoSuchElementException + * @throws java.util.NoSuchElementException * @return the new stack without the former top element. */ def pop: Stack[A] = diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index 60de147477cf..b8193024602d 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -97,6 +97,14 @@ import scala.language.implicitConversions * If, on the other hand, there is nothing holding on to the head (e.g. we used * `def` to define the `Stream`) then once it is no longer being used directly, * it disappears. + * + * - Note that some operations, including [[drop]], [[dropWhile]], + * [[flatMap]] or [[collect]] may process a large number of intermediate + * elements before returning. These necessarily hold onto the head, since + * they are methods on `Stream`, and a stream holds its own head. For + * computations of this sort where memoization is not desired, use + * `Iterator` when possible. + * * {{{ * // For example, let's build the natural numbers and do some silly iteration * // over them. @@ -168,6 +176,12 @@ import scala.language.implicitConversions * loop(1, 1) * } * }}} + * + * Note that `mkString` forces evaluation of a `Stream`, but `addString` does + * not. In both cases, a `Stream` that is or ends in a cycle + * (e.g. `lazy val s: Stream[Int] = 0 #:: s`) will convert additional trips + * through the cycle to `...`. Additionally, `addString` will display an + * un-memoized tail as `?`. * * @tparam A the type of the elements contained in this stream. * @@ -211,7 +225,7 @@ self => * }}} * * @return The first element of the `Stream`. - * @throws Predef.NoSuchElementException if the stream is empty. + * @throws java.util.NoSuchElementException if the stream is empty. */ def head: A @@ -222,7 +236,7 @@ self => * returns the lazy result. * * @return The tail of the `Stream`. - * @throws Predef.UnsupportedOperationException if the stream is empty. + * @throws UnsupportedOperationException if the stream is empty. */ def tail: Stream[A] @@ -245,12 +259,22 @@ self => * @note Often we use `Stream`s to represent an infinite set or series. If * that's the case for your particular `Stream` then this function will never * return and will probably crash the VM with an `OutOfMemory` exception. + * This function will not hang on a finite cycle, however. * * @return The fully realized `Stream`. */ def force: Stream[A] = { - var these = this - while (!these.isEmpty) these = these.tail + // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) + var these, those = this + if (!these.isEmpty) these = these.tail + while (those ne these) { + if (these.isEmpty) return this + these = these.tail + if (these.isEmpty) return this + these = these.tail + if (these eq those) return this + those = those.tail + } this } @@ -301,9 +325,24 @@ self => override def toStream: Stream[A] = this - override def hasDefiniteSize = { - def loop(s: Stream[A]): Boolean = s.isEmpty || s.tailDefined && loop(s.tail) - loop(this) + override def hasDefiniteSize: Boolean = isEmpty || { + if (!tailDefined) false + else { + // Two-iterator trick (2x & 1x speed) for cycle detection. + var those = this + var these = tail + while (those ne these) { + if (these.isEmpty) return true + if (!these.tailDefined) return false + these = these.tail + if (these.isEmpty) return true + if (!these.tailDefined) return false + these = these.tail + if (those eq these) return false + those = those.tail + } + false // Cycle detected + } } /** Create a new stream which contains all elements of this stream followed by @@ -460,8 +499,18 @@ self => ) else super.flatMap(f)(bf) + override private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = { + // optimization: drop leading prefix of elems for which f returns false + // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise + var rest = this + while (!rest.isEmpty && p(rest.head) == isFlipped) rest = rest.tail + // private utility func to avoid `this` on stack (would be needed for the lazy arg) + if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped) + else Stream.Empty + } + /** Returns all the elements of this `Stream` that satisfy the predicate `p` - * in a new `Stream` - i.e. it is still a lazy data structure. The order of + * in a new `Stream` - i.e., it is still a lazy data structure. The order of * the elements is preserved * * @param p the predicate used to filter the stream. @@ -473,15 +522,7 @@ self => * // produces * }}} */ - override def filter(p: A => Boolean): Stream[A] = { - // optimization: drop leading prefix of elems for which f returns false - // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise - var rest = this - while (!rest.isEmpty && !p(rest.head)) rest = rest.tail - // private utility func to avoid `this` on stack (would be needed for the lazy arg) - if (rest.nonEmpty) Stream.filteredTail(rest, p) - else Stream.Empty - } + override def filter(p: A => Boolean): Stream[A] = filterImpl(p, isFlipped = false) // This override is only left in 2.11 because of binary compatibility, see PR #3925 override final def withFilter(p: A => Boolean): StreamWithFilter = new StreamWithFilter(p) @@ -680,7 +721,8 @@ self => * `end`. Inside, the string representations of defined elements (w.r.t. * the method `toString()`) are separated by the string `sep`. The method will * not force evaluation of undefined elements. A tail of such elements will be - * represented by a `"?"` instead. + * represented by a `"?"` instead. A cyclic stream is represented by a `"..."` + * at the point where the cycle repeats. * * @param b The [[collection.mutable.StringBuilder]] factory to which we need * to add the string elements. @@ -691,16 +733,81 @@ self => * resulting string. */ override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = { - def loop(pre: String, these: Stream[A]) { - if (these.isEmpty) b append end - else { - b append pre append these.head - if (these.tailDefined) loop(sep, these.tail) - else b append sep append "?" append end + b append start + if (!isEmpty) { + b append head + var cursor = this + var n = 1 + if (cursor.tailDefined) { // If tailDefined, also !isEmpty + var scout = tail + if (scout.isEmpty) { + // Single element. Bail out early. + b append end + return b + } + if ((cursor ne scout) && scout.tailDefined) { + cursor = scout + scout = scout.tail + // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings + while ((cursor ne scout) && scout.tailDefined) { + b append sep append cursor.head + n += 1 + cursor = cursor.tail + scout = scout.tail + if (scout.tailDefined) scout = scout.tail + } + } + if (!scout.tailDefined) { // Not a cycle, scout hit an end + while (cursor ne scout) { + b append sep append cursor.head + n += 1 + cursor = cursor.tail + } + } + else { + // Cycle. + // If we have a prefix of length P followed by a cycle of length C, + // the scout will be at position (P%C) in the cycle when the cursor + // enters it at P. They'll then collide when the scout advances another + // C - (P%C) ahead of the cursor. + // If we run the scout P farther, then it will be at the start of + // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner + // starts at the beginning of the prefix, they'll collide exactly at + // the start of the loop. + var runner = this + var k = 0 + while (runner ne scout) { + runner = runner.tail + scout = scout.tail + k += 1 + } + // Now runner and scout are at the beginning of the cycle. Advance + // cursor, adding to string, until it hits; then we'll have covered + // everything once. If cursor is already at beginning, we'd better + // advance one first unless runner didn't go anywhere (in which case + // we've already looped once). + if ((cursor eq scout) && (k > 0)) { + b append sep append cursor.head + n += 1 + cursor = cursor.tail + } + while (cursor ne scout) { + b append sep append cursor.head + n += 1 + cursor = cursor.tail + } + // Subtract prefix length from total length for cycle reporting. + // (Not currently used, but probably a good idea for the future.) + n -= k + } + } + if (!cursor.isEmpty) { + // Either undefined or cyclic; we can check with tailDefined + if (!cursor.tailDefined) b append sep append "?" + else b append sep append "..." } } - b append start - loop("", this) + b append end b } @@ -771,7 +878,7 @@ self => * @return A new `Stream` containing everything but the last element. If your * `Stream` represents an infinite series, this method will not return. * - * @throws `Predef.UnsupportedOperationException` if the stream is empty. + * @throws UnsupportedOperationException if the stream is empty. */ override def init: Stream[A] = if (isEmpty) super.init @@ -839,7 +946,7 @@ self => * * @param p the test predicate. * @return A new `Stream` representing the results of applying `p` to the - * oringal `Stream`. + * original `Stream`. * * @example {{{ * // Assume we have a Stream that takes the first 20 natural numbers @@ -1179,8 +1286,8 @@ object Stream extends SeqFactory[Stream] { else cons(start, range(start + step, end, step)) } - private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean) = { - cons(stream.head, stream.tail filter p) + private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean, isFlipped: Boolean) = { + cons(stream.head, stream.tail.filterImpl(p, isFlipped)) } private[immutable] def collectedTail[A, B, That](head: B, stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = { diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala index c2eb85815d57..4d7eaeff2a21 100644 --- a/src/library/scala/collection/immutable/StreamViewLike.scala +++ b/src/library/scala/collection/immutable/StreamViewLike.scala @@ -53,6 +53,7 @@ extends SeqView[A, Coll] /** boilerplate */ protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B] protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B] + protected override def newPrepended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B] protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B] protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B] protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered @@ -67,7 +68,6 @@ extends SeqView[A, Coll] protected override def newPatched[B >: A](_from: Int, _patch: scala.collection.GenSeq[B], _replaced: Int): Transformed[B] = { new { val from = _from; val patch = _patch; val replaced = _replaced } with AbstractTransformed[B] with Patched[B] } - protected override def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B] override def stringPrefix = "StreamView" } diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index 8e1d950d00fc..f0daaf25a563 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -121,37 +121,43 @@ self => } /** Return all lines in this string in an iterator, excluding trailing line - * end characters, i.e. apply `.stripLineEnd` to all lines + * end characters, i.e., apply `.stripLineEnd` to all lines * returned by `linesWithSeparators`. */ def lines: Iterator[String] = linesWithSeparators map (line => new WrappedString(line).stripLineEnd) /** Return all lines in this string in an iterator, excluding trailing line - * end characters, i.e. apply `.stripLineEnd` to all lines + * end characters, i.e., apply `.stripLineEnd` to all lines * returned by `linesWithSeparators`. */ @deprecated("Use `lines` instead.","2.11.0") def linesIterator: Iterator[String] = linesWithSeparators map (line => new WrappedString(line).stripLineEnd) - /** Returns this string with first character converted to upper case */ + /** Returns this string with first character converted to upper case. + * If the first character of the string is capitalized, it is returned unchanged. + */ def capitalize: String = if (toString == null) null else if (toString.length == 0) "" + else if (toString.charAt(0).isUpper) toString else { val chars = toString.toCharArray chars(0) = chars(0).toUpper new String(chars) } - /** Returns this string with the given `prefix` stripped. */ + /** Returns this string with the given `prefix` stripped. If this string does not + * start with `prefix`, it is returned unchanged. + */ def stripPrefix(prefix: String) = if (toString.startsWith(prefix)) toString.substring(prefix.length) else toString /** Returns this string with the given `suffix` stripped. If this string does not - * end with `suffix`, it is returned unchanged. */ + * end with `suffix`, it is returned unchanged. + */ def stripSuffix(suffix: String) = if (toString.endsWith(suffix)) toString.substring(0, toString.length() - suffix.length) else toString @@ -224,31 +230,31 @@ self => def r(groupNames: String*): Regex = new Regex(toString, groupNames: _*) /** - * @throws `java.lang.IllegalArgumentException` - If the string does not contain a parsable boolean. + * @throws java.lang.IllegalArgumentException - If the string does not contain a parsable boolean. */ def toBoolean: Boolean = parseBoolean(toString) /** - * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable byte. + * @throws java.lang.NumberFormatException - If the string does not contain a parsable byte. */ def toByte: Byte = java.lang.Byte.parseByte(toString) /** - * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable short. + * @throws java.lang.NumberFormatException - If the string does not contain a parsable short. */ def toShort: Short = java.lang.Short.parseShort(toString) /** - * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable int. + * @throws java.lang.NumberFormatException - If the string does not contain a parsable int. */ def toInt: Int = java.lang.Integer.parseInt(toString) /** - * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable long. + * @throws java.lang.NumberFormatException - If the string does not contain a parsable long. */ def toLong: Long = java.lang.Long.parseLong(toString) /** - * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable float. + * @throws java.lang.NumberFormatException - If the string does not contain a parsable float. */ def toFloat: Float = java.lang.Float.parseFloat(toString) /** - * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable double. + * @throws java.lang.NumberFormatException - If the string does not contain a parsable double. */ def toDouble: Double = java.lang.Double.parseDouble(toString) @@ -281,7 +287,7 @@ self => * understands. * * @param args the arguments used to instantiating the pattern. - * @throws `java.lang.IllegalArgumentException` + * @throws java.lang.IllegalArgumentException */ def format(args : Any*): String = java.lang.String.format(toString, args map unwrapArg: _*) @@ -298,7 +304,7 @@ self => * * @param l an instance of `java.util.Locale` * @param args the arguments used to instantiating the pattern. - * @throws `java.lang.IllegalArgumentException` + * @throws java.lang.IllegalArgumentException */ def formatLocal(l: java.util.Locale, args: Any*): String = java.lang.String.format(l, toString, args map unwrapArg: _*) diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala index 775d635faefa..5fc0607a0072 100644 --- a/src/library/scala/collection/immutable/Traversable.scala +++ b/src/library/scala/collection/immutable/Traversable.scala @@ -29,7 +29,7 @@ trait Traversable[+A] extends scala.collection.Traversable[A] } /** $factoryInfo - * The current default implementation of a $Coll is a `Vector`. + * The current default implementation of a $Coll is a `List`. * @define coll immutable traversable collection * @define Coll `immutable.Traversable` */ diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 8cc99a53e6a5..662075cd9369 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -101,8 +101,8 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi else new TreeMap(RB.slice(tree, from, until)) } - override def dropRight(n: Int) = take(size - n) - override def takeRight(n: Int) = drop(size - n) + override def dropRight(n: Int) = take(size - math.max(n, 0)) + override def takeRight(n: Int) = drop(size - math.max(n, 0)) override def splitAt(n: Int) = (take(n), drop(n)) private[this] def countWhile(p: ((A, B)) => Boolean): Int = { diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 681dbbd1a81d..7378211db085 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -87,8 +87,8 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin else newSet(RB.slice(tree, from, until)) } - override def dropRight(n: Int) = take(size - n) - override def takeRight(n: Int) = drop(size - n) + override def dropRight(n: Int) = take(size - math.max(n, 0)) + override def takeRight(n: Int) = drop(size - math.max(n, 0)) override def splitAt(n: Int) = (take(n), drop(n)) private[this] def countWhile(p: A => Boolean): Int = { diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala index de09bb2040bd..cc2acb74d41c 100644 --- a/src/library/scala/collection/mutable/AVLTree.scala +++ b/src/library/scala/collection/mutable/AVLTree.scala @@ -14,8 +14,8 @@ package mutable * An immutable AVL Tree implementation formerly used by mutable.TreeSet * * @author Lucien Pereira - * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0") */ +@deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.2") private[mutable] sealed trait AVLTree[+A] extends Serializable { def balance: Int diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala index 47fb66744e16..ed6ca1939d00 100644 --- a/src/library/scala/collection/mutable/AnyRefMap.scala +++ b/src/library/scala/collection/mutable/AnyRefMap.scala @@ -224,7 +224,7 @@ extends AbstractMap[K, V] override def put(key: K, value: V): Option[V] = { val h = hashOf(key) val k = key - var i = seekEntryOrOpen(h, k) + val i = seekEntryOrOpen(h, k) if (i < 0) { val j = i & IndexMask _hashes(j) = h @@ -251,7 +251,7 @@ extends AbstractMap[K, V] override def update(key: K, value: V): Unit = { val h = hashOf(key) val k = key - var i = seekEntryOrOpen(h, k) + val i = seekEntryOrOpen(h, k) if (i < 0) { val j = i & IndexMask _hashes(j) = h @@ -335,6 +335,24 @@ extends AbstractMap[K, V] arm } + override def +[V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = { + val arm = clone().asInstanceOf[AnyRefMap[K, V1]] + arm += kv + arm + } + + override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): AnyRefMap[K, V1] = { + val arm = clone().asInstanceOf[AnyRefMap[K, V1]] + xs.foreach(kv => arm += kv) + arm + } + + override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = { + val arm = clone().asInstanceOf[AnyRefMap[K, V1]] + arm += (key, value) + arm + } + private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B) { var i,j = 0 while (i < _hashes.length & j < _size) { diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala index 2d43b352c5b3..011fd415ee4b 100644 --- a/src/library/scala/collection/mutable/ArrayBuffer.scala +++ b/src/library/scala/collection/mutable/ArrayBuffer.scala @@ -30,8 +30,8 @@ import parallel.mutable.ParArray * * @tparam A the type of this arraybuffer's elements. * - * @define Coll `ArrayBuffer` - * @define coll arraybuffer + * @define Coll `mutable.ArrayBuffer` + * @define coll array buffer * @define thatinfo the class of the returned collection. In the standard library configuration, * `That` is always `ArrayBuffer[B]` because an implicit of type `CanBuildFrom[ArrayBuffer, B, ArrayBuffer[B]]` * is defined in object `ArrayBuffer`. @@ -128,21 +128,22 @@ class ArrayBuffer[A](override protected val initialSize: Int) override def ++=:(xs: TraversableOnce[A]): this.type = { insertAll(0, xs.toTraversable); this } /** Inserts new elements at the index `n`. Opposed to method - * `update`, this method will not replace an element with a + * `update`, this method will not replace an element with a new * one. Instead, it will insert a new element at index `n`. * * @param n the index where a new element will be inserted. * @param seq the traversable object providing all elements to insert. - * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds. + * @throws IndexOutOfBoundsException if `n` is out of bounds. */ def insertAll(n: Int, seq: Traversable[A]) { if (n < 0 || n > size0) throw new IndexOutOfBoundsException(n.toString) - val xs = seq.toList - val len = xs.length - ensureSize(size0 + len) + val len = seq.size + val newSize = size0 + len + ensureSize(newSize) + copy(n, n + len, size0 - n) - xs.copyToArray(array.asInstanceOf[scala.Array[Any]], n) - size0 += len + seq.copyToArray(array.asInstanceOf[Array[Any]], n) + size0 = newSize } /** Removes the element on a given index position. It takes time linear in @@ -150,7 +151,7 @@ class ArrayBuffer[A](override protected val initialSize: Int) * * @param n the index which refers to the first element to delete. * @param count the number of elements to delete - * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds. + * @throws IndexOutOfBoundsException if `n` is out of bounds. */ override def remove(n: Int, count: Int) { require(count >= 0, "removing negative number of elements") diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala index 00491ef20ebb..2bc41b580221 100644 --- a/src/library/scala/collection/mutable/ArrayOps.scala +++ b/src/library/scala/collection/mutable/ArrayOps.scala @@ -40,9 +40,8 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza arrayElementClass(repr.getClass) override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) { - var l = math.min(len, repr.length) - if (xs.length - start < l) l = xs.length - start max 0 - Array.copy(repr, 0, xs, start, l) + val l = len min repr.length min (xs.length - start) + if (l > 0) Array.copy(repr, 0, xs, start, l) } override def toArray[U >: T : ClassTag]: Array[U] = { diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala index 577a838315ad..5a50f4fb2733 100644 --- a/src/library/scala/collection/mutable/ArraySeq.scala +++ b/src/library/scala/collection/mutable/ArraySeq.scala @@ -87,7 +87,7 @@ extends AbstractSeq[A] */ override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { val len1 = len min (xs.length - start) min length - Array.copy(array, 0, xs, start, len1) + if (len1 > 0) Array.copy(array, 0, xs, start, len1) } override def clone(): ArraySeq[A] = { diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala index 43d23acc1a7c..faa415531727 100644 --- a/src/library/scala/collection/mutable/BitSet.scala +++ b/src/library/scala/collection/mutable/BitSet.scala @@ -110,7 +110,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int] * @return the bitset itself. */ def |= (other: BitSet): this.type = { - ensureCapacity(other.nwords) + ensureCapacity(other.nwords - 1) for (i <- 0 until other.nwords) elems(i) = elems(i) | other.word(i) this @@ -121,7 +121,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int] * @return the bitset itself. */ def &= (other: BitSet): this.type = { - ensureCapacity(other.nwords) + ensureCapacity(other.nwords - 1) for (i <- 0 until other.nwords) elems(i) = elems(i) & other.word(i) this @@ -132,7 +132,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int] * @return the bitset itself. */ def ^= (other: BitSet): this.type = { - ensureCapacity(other.nwords) + ensureCapacity(other.nwords - 1) for (i <- 0 until other.nwords) elems(i) = elems(i) ^ other.word(i) this @@ -143,7 +143,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int] * @return the bitset itself. */ def &~= (other: BitSet): this.type = { - ensureCapacity(other.nwords) + ensureCapacity(other.nwords - 1) for (i <- 0 until other.nwords) elems(i) = elems(i) & ~other.word(i) this diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala index 3c57387c0356..8d2453862008 100644 --- a/src/library/scala/collection/mutable/BufferLike.scala +++ b/src/library/scala/collection/mutable/BufferLike.scala @@ -211,13 +211,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]] */ override def stringPrefix: String = "Buffer" - /** Returns the current evolving(!) state of this buffer as a read-only sequence. - * - * @return A sequence that forwards to this buffer for all its operations. - */ - @deprecated("The returned sequence changes as this buffer is mutated. For an immutable copy, use, e.g., toList.", "2.11.0") - def readOnly: scala.collection.Seq[A] = toSeq - /** Creates a new collection containing both the elements of this collection and the provided * traversable object. * diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala index d9632cce91be..2d52831d37ac 100644 --- a/src/library/scala/collection/mutable/BufferProxy.scala +++ b/src/library/scala/collection/mutable/BufferProxy.scala @@ -43,8 +43,6 @@ trait BufferProxy[A] extends Buffer[A] with Proxy { */ def +=(elem: A): this.type = { self.+=(elem); this } - override def readOnly = self.readOnly - /** Appends a number of elements provided by a traversable object. * * @param xs the traversable object. diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala index 671b79f8c25e..fd95e74fbcb2 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedList.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala @@ -41,7 +41,7 @@ import generic._ * @define mayNotTerminateInf * @define willNotTerminateInf */ -@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0") +@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0") @SerialVersionUID(-8144992287952814767L) class DoubleLinkedList[A]() extends AbstractSeq[A] with LinearSeq[A] diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala index a43fe34c9995..aafe34f50a60 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala @@ -56,10 +56,10 @@ import scala.annotation.migration * @define Coll `DoubleLinkedList` * @define coll double linked list */ -@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0") +@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0") trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]] extends SeqLike[A, This] with LinkedListLike[A, This] { self => - /** A reference to the node in the linked list preceeding the current node. */ + /** A reference to the node in the linked list preceding the current node. */ var prev: This = _ // returns that list if this list is empty diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala index 31a474996030..7acdeeff1807 100644 --- a/src/library/scala/collection/mutable/IndexedSeqView.scala +++ b/src/library/scala/collection/mutable/IndexedSeqView.scala @@ -50,7 +50,7 @@ self => trait Sliced extends super.Sliced with Transformed[A] { override def length = endpoints.width def update(idx: Int, elem: A) = - if (idx + from < until) self.update(idx + from, elem) + if (idx >= 0 && idx + from < until) self.update(idx + from, elem) else throw new IndexOutOfBoundsException(idx.toString) } diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala index 092698ac0b1c..b3500367af1a 100644 --- a/src/library/scala/collection/mutable/LinkedList.scala +++ b/src/library/scala/collection/mutable/LinkedList.scala @@ -76,7 +76,7 @@ import generic._ * }}} */ @SerialVersionUID(-7308240733518833071L) -@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0") +@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0") class LinkedList[A]() extends AbstractSeq[A] with LinearSeq[A] with GenericTraversableTemplate[A, LinkedList] diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala index 987b83d23bee..a9d385bc5be6 100644 --- a/src/library/scala/collection/mutable/LinkedListLike.scala +++ b/src/library/scala/collection/mutable/LinkedListLike.scala @@ -55,7 +55,7 @@ import scala.annotation.tailrec * * }}} */ -@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0") +@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0") trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends SeqLike[A, This] { self => var elem: A = _ diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala index 5e838d0d88bc..1906c47f618c 100644 --- a/src/library/scala/collection/mutable/ListBuffer.scala +++ b/src/library/scala/collection/mutable/ListBuffer.scala @@ -12,7 +12,7 @@ package mutable import generic._ import immutable.{List, Nil, ::} -import java.io._ +import java.io.{ObjectOutputStream, ObjectInputStream} import scala.annotation.migration /** A `Buffer` implementation back up by a list. It provides constant time @@ -132,7 +132,7 @@ final class ListBuffer[A] * * @param n the index of the element to replace. * @param x the new element. - * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds. + * @throws IndexOutOfBoundsException if `n` is out of bounds. */ def update(n: Int, x: A) { // We check the bounds early, so that we don't trigger copying. @@ -217,7 +217,7 @@ final class ListBuffer[A] * * @param n the index where a new element will be inserted. * @param seq the iterable object providing all elements to insert. - * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds. + * @throws IndexOutOfBoundsException if `n` is out of bounds. */ def insertAll(n: Int, seq: Traversable[A]) { // We check the bounds early, so that we don't trigger copying. @@ -330,7 +330,7 @@ final class ListBuffer[A] * @param n the index which refers to the element to delete. * @return n the element that was formerly at position `n`. * @note an element must exists at position `n`. - * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds. + * @throws IndexOutOfBoundsException if `n` is out of bounds. */ def remove(n: Int): A = { if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString()) @@ -408,9 +408,6 @@ final class ListBuffer[A] } } - @deprecated("The result of this method will change along with this buffer, which is often not what's expected.", "2.11.0") - override def readOnly: List[A] = start - // Private methods /** Copy contents of this buffer */ @@ -426,7 +423,7 @@ final class ListBuffer[A] } override def equals(that: Any): Boolean = that match { - case that: ListBuffer[_] => this.readOnly equals that.readOnly + case that: ListBuffer[_] => this.start equals that.start case _ => super.equals(that) } diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala index 984ae6f7ccb8..1eb12d817c41 100644 --- a/src/library/scala/collection/mutable/LongMap.scala +++ b/src/library/scala/collection/mutable/LongMap.scala @@ -19,7 +19,7 @@ import generic.CanBuildFrom * on a map that will no longer have elements removed but will be * used heavily may save both time and storage space. * - * This map is not indended to contain more than 2^29 entries (approximately + * This map is not intended to contain more than 2^29 entries (approximately * 500 million). The maximum capacity is 2^30, but performance will degrade * rapidly as 2^30 is approached. * @@ -81,7 +81,7 @@ extends AbstractMap[Long, V] private def toIndex(k: Long): Int = { // Part of the MurmurHash3 32 bit finalizer val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt - var x = (h ^ (h >>> 16)) * 0x85EBCA6B + val x = (h ^ (h >>> 16)) * 0x85EBCA6B (x ^ (x >>> 13)) & mask } @@ -311,7 +311,7 @@ extends AbstractMap[Long, V] } } else { - var i = seekEntryOrOpen(key) + val i = seekEntryOrOpen(key) if (i < 0) { val j = i & IndexMask _keys(j) = key @@ -388,12 +388,14 @@ extends AbstractMap[Long, V] nextPair = anotherPair anotherPair = null } - nextPair = null + else nextPair = null ans } } override def foreach[A](f: ((Long,V)) => A) { + if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V])) + if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V])) var i,j = 0 while (i < _keys.length & j < _size) { val k = _keys(i) @@ -403,8 +405,6 @@ extends AbstractMap[Long, V] } i += 1 } - if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V])) - if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V])) } override def clone(): LongMap[V] = { @@ -415,8 +415,28 @@ extends AbstractMap[Long, V] lm } + override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = { + val lm = clone().asInstanceOf[LongMap[V1]] + lm += kv + lm + } + + override def ++[V1 >: V](xs: GenTraversableOnce[(Long, V1)]): LongMap[V1] = { + val lm = clone().asInstanceOf[LongMap[V1]] + xs.foreach(kv => lm += kv) + lm + } + + override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = { + val lm = clone().asInstanceOf[LongMap[V1]] + lm += (key, value) + lm + } + /** Applies a function to all keys of this map. */ def foreachKey[A](f: Long => A) { + if ((extraKeys & 1) == 1) f(0L) + if ((extraKeys & 2) == 2) f(Long.MinValue) var i,j = 0 while (i < _keys.length & j < _size) { val k = _keys(i) @@ -426,12 +446,12 @@ extends AbstractMap[Long, V] } i += 1 } - if ((extraKeys & 1) == 1) f(0L) - if ((extraKeys & 2) == 2) f(Long.MinValue) } /** Applies a function to all values of this map. */ def foreachValue[A](f: V => A) { + if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]) + if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]) var i,j = 0 while (i < _keys.length & j < _size) { val k = _keys(i) @@ -441,8 +461,6 @@ extends AbstractMap[Long, V] } i += 1 } - if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]) - if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]) } /** Creates a new `LongMap` with different values. @@ -450,6 +468,8 @@ extends AbstractMap[Long, V] * collection immediately. */ def mapValuesNow[V1](f: V => V1): LongMap[V1] = { + val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null + val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false) val kz = java.util.Arrays.copyOf(_keys, _keys.length) val vz = new Array[AnyRef](_values.length) @@ -462,8 +482,6 @@ extends AbstractMap[Long, V] } i += 1 } - val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null - val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz) lm } @@ -472,6 +490,8 @@ extends AbstractMap[Long, V] * Note: the default, if any, is not transformed. */ def transformValues(f: V => V): this.type = { + if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] + if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] var i,j = 0 while (i < _keys.length & j < _size) { val k = _keys(i) @@ -481,26 +501,8 @@ extends AbstractMap[Long, V] } i += 1 } - if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] - if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] this } - - /* - override def toString = { - val sb = new StringBuilder("LongMap(") - var n = 0 - foreach{ case (k,v) => - if (n > 0) sb ++= ", " - sb ++= k.toString - sb ++= " -> " - sb ++= v.toString - n += 1 - } - sb += ')' - sb.result - } - */ } object LongMap { @@ -557,7 +559,7 @@ object LongMap { /** Creates a new `LongMap` from keys and values. * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. */ - def fromZip[V](keys: Iterable[Long], values: Iterable[V]): LongMap[V] = { + def fromZip[V](keys: collection.Iterable[Long], values: collection.Iterable[V]): LongMap[V] = { val sz = math.min(keys.size, values.size) val lm = new LongMap[V](sz * 2) val ki = keys.iterator diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala index 6230fc23aa19..42000e59188b 100644 --- a/src/library/scala/collection/mutable/MapLike.scala +++ b/src/library/scala/collection/mutable/MapLike.scala @@ -18,6 +18,8 @@ import scala.collection.parallel.mutable.ParMap /** A template trait for mutable maps. * $mapNote * $mapTags + * @define Coll `mutable.Map` + * @define coll mutable map * @since 2.8 * * @define mapNote @@ -59,6 +61,18 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] override protected[this] def newBuilder: Builder[(A, B), This] = empty protected[this] override def parCombiner = ParMap.newCombiner[A, B] + + /** Converts this $coll to a sequence. + * + * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true. + */ + override def toSeq: collection.Seq[(A, B)] = { + // ArrayBuffer for efficiency, preallocated to the right size. + val result = new ArrayBuffer[(A, B)](size) + foreach(result += _) + result + } + /** Adds a new key/value pair to this map and optionally returns previously bound value. * If the map already contains a @@ -131,7 +145,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] /** Creates a new map containing the key/value mappings provided by the specified traversable object * and all the key/value mappings of this map. * - * Note that existing mappings from this map with the same key as those in `xs` will be overriden. + * Note that existing mappings from this map with the same key as those in `xs` will be overridden. * * @param xs the traversable object. * @return a new map containing mappings of this map and those provided by `xs`. diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala index 78dfc35268e9..ac2ebf31d8bd 100644 --- a/src/library/scala/collection/mutable/MultiMap.scala +++ b/src/library/scala/collection/mutable/MultiMap.scala @@ -65,10 +65,9 @@ trait MultiMap[A, B] extends Map[A, Set[B]] { */ protected def makeSet: Set[B] = new HashSet[B] - /** Assigns the specified `value` to a specified `key`, replacing - * the existing value assigned to that `key` if it is equal to - * the specified value. Otherwise, simply adds another binding to - * the `key`. + /** Assigns the specified `value` to a specified `key`. If the key + * already has a binding to equal to `value`, nothing is changed; + * otherwise a new binding is added for that `key`. * * @param key The key to which to bind the new value. * @param value The value to bind to the key. diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala index a0d3ee0ef098..b852a4747b0c 100644 --- a/src/library/scala/collection/mutable/MutableList.scala +++ b/src/library/scala/collection/mutable/MutableList.scala @@ -22,6 +22,8 @@ import immutable.{List, Nil} * @author Martin Odersky * @version 2.8 * @since 1 + * @define Coll `mutable.MutableList` + * @define coll mutable list * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_lists "Scala's Collection Library overview"]] * section on `Mutable Lists` for more information. */ diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala index aade2ed6fbaf..24f5761cf50b 100644 --- a/src/library/scala/collection/mutable/OpenHashMap.scala +++ b/src/library/scala/collection/mutable/OpenHashMap.scala @@ -31,7 +31,7 @@ object OpenHashMap { /** A mutable hash map based on an open hashing scheme. The precise scheme is * undefined, but it should make a reasonable effort to ensure that an insert - * with consecutive hash codes is not unneccessarily penalised. In particular, + * with consecutive hash codes is not unnecessarily penalised. In particular, * mappings of consecutive integer keys should work without significant * performance loss. * diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index b949bec48a43..d3c4161e3bf0 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -16,6 +16,11 @@ import generic._ * To prioritize elements of type A there must be an implicit * Ordering[A] available at creation. * + * Only the `dequeue` and `dequeueAll` methods will return methods in priority + * order (while removing elements from the heap). Standard collection methods + * including `drop` and `iterator` will remove or traverse the heap in whichever + * order seems most convenient. + * * @tparam A type of the elements in this priority queue. * @param ord implicit ordering used to compare the elements of type `A`. * @@ -121,7 +126,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A]) /** Returns the element with the highest priority in the queue, * and removes this element from the queue. * - * @throws Predef.NoSuchElementException + * @throws java.util.NoSuchElementException * @return the element with the highest priority. */ def dequeue(): A = @@ -242,13 +247,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A]) * @return a priority queue with the same elements. */ override def clone(): PriorityQueue[A] = new PriorityQueue[A] ++= this.iterator - - // def printstate() { - // println("-----------------------") - // println("Size: " + resarr.p_size0) - // println("Internal array: " + resarr.p_array.toList) - // println(toString) - // } } diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala index 7c890fe309ad..03d387a535b6 100644 --- a/src/library/scala/collection/mutable/Queue.scala +++ b/src/library/scala/collection/mutable/Queue.scala @@ -58,7 +58,7 @@ extends MutableList[A] /** Returns the first element in the queue, and removes this element * from the queue. * - * @throws Predef.NoSuchElementException + * @throws java.util.NoSuchElementException * @return the first element of the queue. */ def dequeue(): A = diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala index c3047522e2cb..85a299216edd 100644 --- a/src/library/scala/collection/mutable/ResizableArray.scala +++ b/src/library/scala/collection/mutable/ResizableArray.scala @@ -74,7 +74,7 @@ trait ResizableArray[A] extends IndexedSeq[A] */ override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { val len1 = len min (xs.length - start) min length - Array.copy(array, 0, xs, start, len1) + if (len1 > 0) Array.copy(array, 0, xs, start, len1) } //########################################################################## diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala index d749167870eb..40a5c93064e3 100644 --- a/src/library/scala/collection/mutable/SetLike.scala +++ b/src/library/scala/collection/mutable/SetLike.scala @@ -16,19 +16,20 @@ import scala.annotation.migration import parallel.mutable.ParSet /** A template trait for mutable sets of type `mutable.Set[A]`. + * + * This trait provides most of the operations of a `mutable.Set` independently of its representation. + * It is typically inherited by concrete implementations of sets. + * + * $setNote + * * @tparam A the type of the elements of the set * @tparam This the type of the set itself. * - * $setnote - * * @author Martin Odersky * @version 2.8 * @since 2.8 * - * @define setnote - * @note - * This trait provides most of the operations of a `mutable.Set` independently of its representation. - * It is typically inherited by concrete implementations of sets. + * @define setNote * * To implement a concrete mutable set, you need to provide implementations * of the following methods: @@ -36,13 +37,13 @@ import parallel.mutable.ParSet * def contains(elem: A): Boolean * def iterator: Iterator[A] * def += (elem: A): this.type - * def -= (elem: A): this.type + * def -= (elem: A): this.type * }}} * If you wish that methods like `take`, * `drop`, `filter` return the same kind of set, * you should also override: * {{{ - * def empty: This + * def empty: This * }}} * It is also good idea to override methods `foreach` and * `size` for efficiency. @@ -71,6 +72,17 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]] protected[this] override def parCombiner = ParSet.newCombiner[A] + /** Converts this $coll to a sequence. + * + * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true. + */ + override def toSeq: collection.Seq[A] = { + // ArrayBuffer for efficiency, preallocated to the right size. + val result = new ArrayBuffer[A](size) + foreach(result += _) + result + } + /** Adds an element to this $coll. * * @param elem the element to be added @@ -207,7 +219,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]] /** Send a message to this scriptable object. * * @param cmd the message to send. - * @throws `Predef.UnsupportedOperationException` + * @throws UnsupportedOperationException * if the message was not understood. */ @deprecated("Scripting is deprecated.", "2.11.0") diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala index 53b6c5993907..1a92f23b7b19 100644 --- a/src/library/scala/collection/mutable/Stack.scala +++ b/src/library/scala/collection/mutable/Stack.scala @@ -125,7 +125,7 @@ extends AbstractSeq[A] * the element from the stack. An error is signaled if there is no * element on the stack. * - * @throws Predef.NoSuchElementException + * @throws java.util.NoSuchElementException * @return the top element */ def top: A = @@ -133,7 +133,7 @@ extends AbstractSeq[A] /** Removes the top element from the stack. * - * @throws Predef.NoSuchElementException + * @throws java.util.NoSuchElementException * @return the top element */ def pop(): A = { diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala index 498e9e461e58..c56d40786e4c 100644 --- a/src/library/scala/collection/mutable/StringBuilder.scala +++ b/src/library/scala/collection/mutable/StringBuilder.scala @@ -22,6 +22,8 @@ import immutable.StringLike * @author Martin Odersky * @version 2.8 * @since 2.7 + * @define Coll `mutable.IndexedSeq` + * @define coll string builder * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html# "Scala's Collection Library overview"]] * section on `StringBuilders` for more information. */ diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala index 1f89199bdcd9..693c47d86ecd 100644 --- a/src/library/scala/collection/mutable/UnrolledBuffer.scala +++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala @@ -300,27 +300,33 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] { if (next eq null) true else false // checks if last node was thrown out } else false - @tailrec final def insertAll(idx: Int, t: scala.collection.Traversable[T], buffer: UnrolledBuffer[T]): Unit = if (idx < size) { - // divide this node at the appropriate position and insert all into head - // update new next - val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff) - Array.copy(array, idx, newnextnode.array, 0, size - idx) - newnextnode.size = size - idx - newnextnode.next = next - - // update this - nullout(idx, size) - size = idx - next = null - - // insert everything from iterable to this - var curr = this - for (elem <- t) curr = curr append elem - curr.next = newnextnode - - // try to merge the last node of this with the newnextnode - if (curr.tryMergeWithNext()) buffer.lastPtr = curr - } else insertAll(idx - size, t, buffer) + @tailrec final def insertAll(idx: Int, t: scala.collection.Traversable[T], buffer: UnrolledBuffer[T]): Unit = { + if (idx < size) { + // divide this node at the appropriate position and insert all into head + // update new next + val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff) + Array.copy(array, idx, newnextnode.array, 0, size - idx) + newnextnode.size = size - idx + newnextnode.next = next + + // update this + nullout(idx, size) + size = idx + next = null + + // insert everything from iterable to this + var curr = this + for (elem <- t) curr = curr append elem + curr.next = newnextnode + + // try to merge the last node of this with the newnextnode + if (curr.tryMergeWithNext()) buffer.lastPtr = curr + } + else if (idx == size) { + var curr = this + for (elem <- t) curr = curr append elem + } else insertAll(idx - size, t, buffer) + } private def nullout(from: Int, until: Int) { var idx = from while (idx < until) { diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala index 26b061b2a5d6..6a2b6de75a85 100644 --- a/src/library/scala/collection/package.scala +++ b/src/library/scala/collection/package.scala @@ -18,7 +18,7 @@ package scala * * == Using Collections == * - * It is convienient to treat all collections as either + * It is convenient to treat all collections as either * a [[scala.collection.Traversable]] or [[scala.collection.Iterable]], as * these traits define the vast majority of operations * on a collection. diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala index 2ceeb18eefe5..a5ba8c49adec 100644 --- a/src/library/scala/collection/parallel/ParIterable.scala +++ b/src/library/scala/collection/parallel/ParIterable.scala @@ -23,9 +23,6 @@ import scala.collection.parallel.mutable.ParArrayCombiner * * @author Aleksandar Prokopec * @since 2.9 - * - * @define Coll `ParIterable` - * @define coll parallel iterable */ trait ParIterable[+T] extends GenIterable[T] diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 445edd23cb0d..016255dca4f9 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -150,7 +150,8 @@ import scala.collection.parallel.ParallelCollectionImplicits._ * @define indexsignalling * This method will use `indexFlag` signalling capabilities. This means * that splitters may set and read the `indexFlag` state. - * + * @define Coll `ParIterable` + * @define coll parallel iterable */ trait ParIterableLike[+T, +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableLike[T, Sequential]] extends GenIterableLike[T, Repr] @@ -743,7 +744,7 @@ self: ParIterableLike[T, Repr, Sequential] => * The index flag is initially set to maximum integer value. * * @param pred the predicate used to test the elements - * @return the longest prefix of this $coll of elements that satisy the predicate `pred` + * @return the longest prefix of this $coll of elements that satisfy the predicate `pred` */ def takeWhile(pred: T => Boolean): Repr = { val cbf = combinerFactory diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala index d2b15c727a9a..ee1334ba5526 100644 --- a/src/library/scala/collection/parallel/ParMapLike.scala +++ b/src/library/scala/collection/parallel/ParMapLike.scala @@ -24,6 +24,8 @@ import scala.collection.generic.Signalling * * @tparam K the key type of the map * @tparam V the value type of the map + * @define Coll `ParMap` + * @define coll parallel map * * @author Aleksandar Prokopec * @since 2.9 diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala index 4e9a2e5751c5..4feda5ff07fe 100644 --- a/src/library/scala/collection/parallel/ParSetLike.scala +++ b/src/library/scala/collection/parallel/ParSetLike.scala @@ -20,6 +20,8 @@ import scala.collection.Set * $sideeffects * * @tparam T the element type of the set + * @define Coll `ParSet` + * @define coll parallel set * * @author Aleksandar Prokopec * @since 2.9 diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala index 5f2ceac0e0d8..7bb278b03820 100644 --- a/src/library/scala/collection/parallel/RemainsIterator.scala +++ b/src/library/scala/collection/parallel/RemainsIterator.scala @@ -456,6 +456,15 @@ self => } it } + /** Drop implemented as simple eager consumption. */ + override def drop(n: Int): IterableSplitter[T] = { + var i = 0 + while (i < n && hasNext) { + next() + i += 1 + } + this + } override def take(n: Int): IterableSplitter[T] = newTaken(n) override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1) diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala index 65a632470e83..3a1ec7fff82f 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala @@ -197,7 +197,7 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetC while (i < chunksz) { val v = chunkarr(i).asInstanceOf[T] val hc = trie.computeHash(v) - trie = trie.updated0(v, hc, rootbits) + trie = trie.updated0(v, hc, rootbits) // internal API, private[collection] i += 1 } i = 0 diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala index 42027f5bac10..5d99394a50cd 100644 --- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala +++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala @@ -22,6 +22,8 @@ import scala.collection.generic.Shrinkable * * @tparam K the key type of the map * @tparam V the value type of the map + * @define Coll `ParMap` + * @define coll parallel map * * @author Aleksandar Prokopec * @since 2.9 diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala index 9367f1424d56..4e2d3e0e4cd5 100644 --- a/src/library/scala/collection/parallel/mutable/ParSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParSet.scala @@ -13,9 +13,6 @@ import scala.collection.generic._ import scala.collection.parallel.Combiner /** A mutable variant of `ParSet`. - * - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set * * @author Aleksandar Prokopec */ diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala index 13af5ed64939..08aa3b024bcb 100644 --- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala +++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala @@ -21,6 +21,8 @@ import scala.collection.generic.Shrinkable * $sideeffects * * @tparam T the element type of the set + * @define Coll `mutable.ParSet` + * @define coll mutable parallel set * * @author Aleksandar Prokopec * @since 2.9 diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala index 91c54fa8f1ce..d77dcb0658e2 100644 --- a/src/library/scala/collection/parallel/package.scala +++ b/src/library/scala/collection/parallel/package.scala @@ -206,7 +206,7 @@ package parallel { * Methods `beforeCombine` and `afterCombine` are called before and after * combining the buckets, respectively, given that the argument to `combine` * is not `this` (as required by the `combine` contract). - * They can be overriden in subclasses to provide custom behaviour by modifying + * They can be overridden in subclasses to provide custom behaviour by modifying * the receiver (which will be the return value). */ private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]] diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala index 875d811b9b26..4c82d6e15be5 100644 --- a/src/library/scala/compat/Platform.scala +++ b/src/library/scala/compat/Platform.scala @@ -70,9 +70,9 @@ object Platform { * @param elemClass the `Class` object of the component type of the array * @param length the length of the new array. * @return an array of the given component type as an `AnyRef`. - * @throws `java.lang.NullPointerException` If `elemClass` is `null`. - * @throws `java.lang.IllegalArgumentException` if componentType is [[scala.Unit]] or `java.lang.Void.TYPE` - * @throws `java.lang.NegativeArraySizeException` if the specified length is negative + * @throws java.lang.NullPointerException If `elemClass` is `null`. + * @throws java.lang.IllegalArgumentException if componentType is [[scala.Unit]] or `java.lang.Void.TYPE` + * @throws java.lang.NegativeArraySizeException if the specified length is negative */ @inline def createArray(elemClass: Class[_], length: Int): AnyRef = @@ -80,7 +80,7 @@ object Platform { /** Assigns the value of 0 to each element in the array. * @param arr A non-null Array[Int]. - * @throws `java.lang.NullPointerException` If `arr` is `null`. + * @throws java.lang.NullPointerException If `arr` is `null`. */ @inline def arrayclear(arr: Array[Int]) { java.util.Arrays.fill(arr, 0) } @@ -92,9 +92,9 @@ object Platform { * * @param name the fully qualified name of the desired class. * @return the `Class` object for the class with the specified name. - * @throws `java.lang.LinkageError` if the linkage fails - * @throws `java.lang.ExceptionInInitializerError` if the initialization provoked by this method fails - * @throws `java.lang.ClassNotFoundException` if the class cannot be located + * @throws java.lang.LinkageError if the linkage fails + * @throws java.lang.ExceptionInInitializerError if the initialization provoked by this method fails + * @throws java.lang.ClassNotFoundException if the class cannot be located * @example {{{ * val a = scala.compat.Platform.getClassForName("java.lang.Integer") // returns the Class[_] for java.lang.Integer * }}} diff --git a/src/library/scala/concurrent/BlockContext.scala b/src/library/scala/concurrent/BlockContext.scala index 747cc393c3ef..2b8ed4c7caa9 100644 --- a/src/library/scala/concurrent/BlockContext.scala +++ b/src/library/scala/concurrent/BlockContext.scala @@ -41,7 +41,7 @@ package scala.concurrent trait BlockContext { /** Used internally by the framework; - * Designates (and eventually executes) a thunk which potentially blocks the calling `Thread`. + * Designates (and eventually executes) a thunk which potentially blocks the calling `java.lang.Thread`. * * Clients must use `scala.concurrent.blocking` or `scala.concurrent.Await` instead. */ @@ -53,9 +53,16 @@ object BlockContext { override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = thunk } + /** + * @return the `BlockContext` that will be used if no other is found. + **/ + def defaultBlockContext: BlockContext = DefaultBlockContext + private val contextLocal = new ThreadLocal[BlockContext]() - /** Obtain the current thread's current `BlockContext`. */ + /** + @return the `BlockContext` that would be used for the current `java.lang.Thread` at this point + **/ def current: BlockContext = contextLocal.get match { case null => Thread.currentThread match { case ctx: BlockContext => ctx @@ -64,7 +71,9 @@ object BlockContext { case some => some } - /** Pushes a current `BlockContext` while executing `body`. */ + /** + * Installs a current `BlockContext` around executing `body`. + **/ def withBlockContext[T](blockContext: BlockContext)(body: => T): T = { val old = contextLocal.get // can be null try { diff --git a/src/library/scala/concurrent/Channel.scala b/src/library/scala/concurrent/Channel.scala index 067244bd1ce4..89ad7d8c0e93 100644 --- a/src/library/scala/concurrent/Channel.scala +++ b/src/library/scala/concurrent/Channel.scala @@ -10,8 +10,10 @@ package scala.concurrent -/** This class ... +/** This class provides a simple FIFO queue of data objects, + * which are read by one or more reader threads. * + * @tparam A type of data exchanged * @author Martin Odersky * @version 1.0, 10/03/2003 */ @@ -20,11 +22,14 @@ class Channel[A] { var elem: A = _ var next: LinkedList[A] = null } - private var written = new LinkedList[A] // FIFO buffer, realized through + private var written = new LinkedList[A] // FIFO queue, realized through private var lastWritten = written // aliasing of a linked list private var nreaders = 0 - /** + /** Append a value to the FIFO queue to be read by `read`. + * This operation is nonblocking and can be executed by any thread. + * + * @param x object to enqueue to this channel */ def write(x: A) = synchronized { lastWritten.elem = x @@ -33,6 +38,11 @@ class Channel[A] { if (nreaders > 0) notify() } + /** Retrieve the next waiting object from the FIFO queue, + * blocking if necessary until an object is available. + * + * @return next object dequeued from this channel + */ def read: A = synchronized { while (written.next == null) { try { @@ -45,5 +55,4 @@ class Channel[A] { written = written.next x } - } diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index a55432fd712c..df2d68c9c699 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -14,8 +14,13 @@ import scala.annotation.implicitNotFound import scala.util.Try /** - * An `ExecutionContext` can execute program logic, typically but not - * necessarily on a thread pool. + * An `ExecutionContext` can execute program logic asynchronously, + * typically but not necessarily on a thread pool. + * + * A general purpose `ExecutionContext` must be asynchronous in executing + * any `Runnable` that is passed into its `execute`-method. A special purpose + * `ExecutionContext` may be synchronous but must only be passed to code that + * is explicitly safe to be run using a synchronously executing `ExecutionContext`. * * APIs such as `Future.onComplete` require you to provide a callback * and an implicit `ExecutionContext`. The implicit `ExecutionContext` @@ -56,28 +61,46 @@ or import scala.concurrent.ExecutionContext.Implicits.global.""") trait ExecutionContext { /** Runs a block of code on this execution context. + * + * @param runnable the task to execute */ def execute(runnable: Runnable): Unit /** Reports that an asynchronous computation failed. + * + * @param cause the cause of the failure */ def reportFailure(@deprecatedName('t) cause: Throwable): Unit /** Prepares for the execution of a task. Returns the prepared - * execution context. A valid implementation of `prepare` is one - * that simply returns `this`. - */ + * execution context. The recommended implementation of + * `prepare` is to return `this`. + * + * This method should no longer be overridden or called. It was + * originally expected that `prepare` would be called by + * all libraries that consume ExecutionContexts, in order to + * capture thread local context. However, this usage has proven + * difficult to implement in practice and instead it is + * now better to avoid using `prepare` entirely. + * + * Instead, if an `ExecutionContext` needs to capture thread + * local context, it should capture that context when it is + * constructed, so that it doesn't need any additional + * preparation later. + */ + @deprecated("Preparation of ExecutionContexts will be removed.", "2.12") def prepare(): ExecutionContext = this - } /** - * Union interface since Java does not support union types + * An [[ExecutionContext]] that is also a + * Java [[http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Executor.html Executor]]. */ trait ExecutionContextExecutor extends ExecutionContext with Executor /** - * Union interface since Java does not support union types + * An [[ExecutionContext]] that is also a + * Java [[http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html ExecutorService]]. */ trait ExecutionContextExecutorService extends ExecutionContextExecutor with ExecutorService @@ -86,38 +109,72 @@ trait ExecutionContextExecutorService extends ExecutionContextExecutor with Exec */ object ExecutionContext { /** - * This is the explicit global ExecutionContext, - * call this when you want to provide the global ExecutionContext explicitly + * The explicit global `ExecutionContext`. Invoke `global` when you want to provide the global + * `ExecutionContext` explicitly. + * + * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default, + * the thread pool uses a target number of worker threads equal to the number of + * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]]. + * + * @return the global `ExecutionContext` */ - def global: ExecutionContextExecutor = Implicits.global + def global: ExecutionContextExecutor = Implicits.global.asInstanceOf[ExecutionContextExecutor] object Implicits { /** - * This is the implicit global ExecutionContext, - * import this when you want to provide the global ExecutionContext implicitly + * The implicit global `ExecutionContext`. Import `global` when you want to provide the global + * `ExecutionContext` implicitly. + * + * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default, + * the thread pool uses a target number of worker threads equal to the number of + * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]]. */ - implicit lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor) + implicit lazy val global: ExecutionContext = impl.ExecutionContextImpl.fromExecutor(null: Executor) } /** Creates an `ExecutionContext` from the given `ExecutorService`. + * + * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]]. + * @param reporter a function for error reporting + * @return the `ExecutionContext` using the given `ExecutorService` */ def fromExecutorService(e: ExecutorService, reporter: Throwable => Unit): ExecutionContextExecutorService = impl.ExecutionContextImpl.fromExecutorService(e, reporter) - /** Creates an `ExecutionContext` from the given `ExecutorService` with the default Reporter. + /** Creates an `ExecutionContext` from the given `ExecutorService` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]]. + * + * If it is guaranteed that none of the executed tasks are blocking, a single-threaded `ExecutorService` + * can be used to create an `ExecutionContext` as follows: + * + * {{{ + * import java.util.concurrent.Executors + * val ec = ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor()) + * }}} + * + * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]]. + * @return the `ExecutionContext` using the given `ExecutorService` */ def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter) /** Creates an `ExecutionContext` from the given `Executor`. + * + * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]]. + * @param reporter a function for error reporting + * @return the `ExecutionContext` using the given `Executor` */ def fromExecutor(e: Executor, reporter: Throwable => Unit): ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(e, reporter) - /** Creates an `ExecutionContext` from the given `Executor` with the default Reporter. + /** Creates an `ExecutionContext` from the given `Executor` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]]. + * + * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]]. + * @return the `ExecutionContext` using the given `Executor` */ def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter) - /** The default reporter simply prints the stack trace of the `Throwable` to System.err. + /** The default reporter simply prints the stack trace of the `Throwable` to [[http://docs.oracle.com/javase/8/docs/api/java/lang/System.html#err System.err]]. + * + * @return the function for error reporting */ def defaultReporter: Throwable => Unit = _.printStackTrace() } diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index 4ed068733405..6304f35da941 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -10,26 +10,22 @@ package scala.concurrent import scala.language.higherKinds -import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable } +import java.util.concurrent.{ CountDownLatch, TimeUnit, Callable } import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS ⇒ MILLIS } -import java.lang.{ Iterable => JIterable } -import java.util.{ LinkedList => JLinkedList } -import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicLong, AtomicBoolean } +import java.util.concurrent.atomic.AtomicInteger import scala.util.control.NonFatal -import scala.Option import scala.util.{Try, Success, Failure} - +import scala.concurrent.duration._ import scala.annotation.tailrec import scala.collection.mutable.Builder import scala.collection.generic.CanBuildFrom import scala.reflect.ClassTag - /** The trait that represents futures. * - * Asynchronous computations that yield futures are created with the `Future` call: + * Asynchronous computations that yield futures are created with the `Future.apply` call: * * {{{ * val s = "Hello" @@ -60,6 +56,10 @@ import scala.reflect.ClassTag * If a future is failed with a `scala.runtime.NonLocalReturnControl`, * it is completed with a value from that throwable instead. * + * @define swallowsExceptions + * Since this method executes asynchronously and does not produce a return value, + * any non-fatal exceptions thrown will be reported to the `ExecutionContext`. + * * @define nonDeterministic * Note: using this method yields nondeterministic dataflow programs. * @@ -91,34 +91,29 @@ import scala.reflect.ClassTag * `execute()` either immediately or asynchronously. */ trait Future[+T] extends Awaitable[T] { - - // The executor within the lexical scope - // of the Future trait. Note that this will - // (modulo bugs) _never_ execute a callback - // other than those below in this same file. - // - // See the documentation on `InternalCallbackExecutor` for more details. - private def internalExecutor = Future.InternalCallbackExecutor + import Future.{ InternalCallbackExecutor => internalExecutor } /* Callbacks */ - /** When this future is completed successfully (i.e. with a value), + /** When this future is completed successfully (i.e., with a value), * apply the provided partial function to the value if the partial function * is defined at that value. * * If the future has already been completed with a value, * this will either be applied immediately or be scheduled asynchronously. * + * $swallowsExceptions * $multipleCallbacks * $callbackInContext */ + @deprecated("use `foreach` or `onComplete` instead (keep in mind that they take total rather than partial functions)", "2.12") def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete { case Success(v) => pf.applyOrElse[T, Any](v, Predef.conforms[T]) // Exploiting the cached function to avoid MatchError case _ => } - /** When this future is completed with a failure (i.e. with a throwable), + /** When this future is completed with a failure (i.e., with a throwable), * apply the provided callback to the throwable. * * $caughtThrowables @@ -128,9 +123,11 @@ trait Future[+T] extends Awaitable[T] { * * Will not be called in case that the future is completed with a value. * + * $swallowsExceptions * $multipleCallbacks * $callbackInContext */ + @deprecated("use `onComplete` or `failed.foreach` instead (keep in mind that they take total rather than partial functions)", "2.12") def onFailure[U](@deprecatedName('callback) pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete { case Failure(t) => pf.applyOrElse[Throwable, Any](t, Predef.conforms[Throwable]) // Exploiting the cached function to avoid MatchError @@ -143,8 +140,12 @@ trait Future[+T] extends Awaitable[T] { * If the future has already been completed, * this will either be applied immediately or be scheduled asynchronously. * + * $swallowsExceptions * $multipleCallbacks * $callbackInContext + * + * @tparam U only used to accept any return type of the given callback function + * @param f the function to be executed when this `Future` completes */ def onComplete[U](@deprecatedName('func) f: Try[T] => U)(implicit executor: ExecutionContext): Unit @@ -160,46 +161,47 @@ trait Future[+T] extends Awaitable[T] { */ def isCompleted: Boolean - /** The value of this `Future`. + /** The current value of this `Future`. + * + * $nonDeterministic * * If the future is not completed the returned value will be `None`. * If the future is completed the value will be `Some(Success(t))` * if it contains a valid result, or `Some(Failure(error))` if it contains * an exception. + * + * @return `None` if the `Future` wasn't completed, `Some` if it was. */ def value: Option[Try[T]] /* Projections */ - /** Returns a failed projection of this future. - * - * The failed projection is a future holding a value of type `Throwable`. + /** The returned `Future` will be successfully completed with the `Throwable` of the original `Future` + * if the original `Future` fails. * - * It is completed with a value which is the throwable of the original future - * in case the original future is failed. + * If the original `Future` is successful, the returned `Future` is failed with a `NoSuchElementException`. * - * It is failed with a `NoSuchElementException` if the original future is completed successfully. - * - * Blocking on this future returns a value if the original future is completed with an exception - * and throws a corresponding exception if the original future fails. + * @return a failed projection of this `Future`. */ - def failed: Future[Throwable] = { - implicit val ec = internalExecutor - val p = Promise[Throwable]() - onComplete { - case Failure(t) => p success t - case Success(v) => p failure (new NoSuchElementException("Future.failed not completed with a throwable.")) - } - p.future - } + def failed: Future[Throwable] = + transform({ + case Failure(t) => Success(t) + case Success(v) => Failure(new NoSuchElementException("Future.failed not completed with a throwable.")) + })(internalExecutor) /* Monadic operations */ /** Asynchronously processes the value in the future once the value becomes available. * - * Will not be called if the future fails. + * WARNING: Will not be called if this future is never completed or if it is completed with a failure. + * + * $swallowsExceptions + * + * @tparam U only used to accept any return type of the given callback function + * @param f the function which will be executed if this `Future` completes with a result, + * the return value of `f` will be discarded. */ def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete { _ foreach f } @@ -208,33 +210,49 @@ trait Future[+T] extends Awaitable[T] { * exception thrown when 's' or 'f' is applied, that exception will be propagated * to the resulting future. * - * @param s function that transforms a successful result of the receiver into a - * successful result of the returned future - * @param f function that transforms a failure of the receiver into a failure of - * the returned future - * @return a future that will be completed with the transformed value - */ - def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = { - val p = Promise[S]() - // transform on Try has the wrong shape for us here - onComplete { - case Success(r) => p complete Try(s(r)) - case Failure(t) => p complete Try(throw f(t)) // will throw fatal errors! + * @tparam S the type of the returned `Future` + * @param s function that transforms a successful result of the receiver into a successful result of the returned future + * @param f function that transforms a failure of the receiver into a failure of the returned future + * @return a `Future` that will be completed with the transformed value + */ + def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = + transform { + case Success(r) => Try(s(r)) + case Failure(t) => Try(throw f(t)) // will throw fatal errors! } - p.future - } + + /** Creates a new Future by applying the specified function to the result + * of this Future. If there is any non-fatal exception thrown when 'f' + * is applied then that exception will be propagated to the resulting future. + * + * @tparam S the type of the returned `Future` + * @param f function that transforms the result of this future + * @return a `Future` that will be completed with the transformed value + */ + def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S] + + /** Creates a new Future by applying the specified function, which produces a Future, to the result + * of this Future. If there is any non-fatal exception thrown when 'f' + * is applied then that exception will be propagated to the resulting future. + * + * @tparam S the type of the returned `Future` + * @param f function that transforms the result of this future + * @return a `Future` that will be completed with the transformed value + */ + def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] + /** Creates a new future by applying a function to the successful result of * this future. If this future is completed with an exception then the new * future will also contain this exception. * * $forComprehensionExamples + * + * @tparam S the type of the returned `Future` + * @param f the function which will be applied to the successful result of this `Future` + * @return a `Future` which will be completed with the result of the application of the function */ - def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = { // transform(f, identity) - val p = Promise[S]() - onComplete { v => p complete (v map f) } - p.future - } + def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = transform(_.map(f)) /** Creates a new future by applying a function to the successful result of * this future, and returns the result of the function as the new future. @@ -242,21 +260,23 @@ trait Future[+T] extends Awaitable[T] { * also contain this exception. * * $forComprehensionExamples + * + * @tparam S the type of the returned `Future` + * @param f the function which will be applied to the successful result of this `Future` + * @return a `Future` which will be completed with the result of the application of the function */ - def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = { - import impl.Promise.DefaultPromise - val p = new DefaultPromise[S]() - onComplete { - case f: Failure[_] => p complete f.asInstanceOf[Failure[S]] - case Success(v) => try f(v) match { - // If possible, link DefaultPromises to avoid space leaks - case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p) - case fut => fut.onComplete(p.complete)(internalExecutor) - } catch { case NonFatal(t) => p failure t } - } - p.future + def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = transformWith { + case Success(s) => f(s) + case Failure(_) => this.asInstanceOf[Future[S]] } + /** Creates a new future with one level of nesting flattened, this method is equivalent + * to `flatMap(identity)`. + * + * @tparam S the type of the returned `Future` + */ + def flatten[S](implicit ev: T <:< Future[S]): Future[S] = flatMap(ev)(internalExecutor) + /** Creates a new future by filtering the value of the current future with a predicate. * * If the current future contains a value which satisfies the predicate, the new future will also hold that value. @@ -269,14 +289,15 @@ trait Future[+T] extends Awaitable[T] { * val f = Future { 5 } * val g = f filter { _ % 2 == 1 } * val h = f filter { _ % 2 == 0 } - * Await.result(g, Duration.Zero) // evaluates to 5 + * g foreach println // Eventually prints 5 * Await.result(h, Duration.Zero) // throw a NoSuchElementException * }}} + * + * @param p the predicate to apply to the successful result of this `Future` + * @return a `Future` which will hold the successful result of this `Future` if it matches the predicate or a `NoSuchElementException` */ def filter(@deprecatedName('pred) p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = - map { - r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied") - } + map { r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied") } /** Used by for-comprehensions. */ @@ -298,9 +319,13 @@ trait Future[+T] extends Awaitable[T] { * val h = f collect { * case x if x > 0 => x * 2 * } - * Await.result(g, Duration.Zero) // evaluates to 5 + * g foreach println // Eventually prints 5 * Await.result(h, Duration.Zero) // throw a NoSuchElementException * }}} + * + * @tparam S the type of the returned `Future` + *  @param pf the `PartialFunction` to apply to the successful result of this `Future` + * @return a `Future` holding the result of application of the `PartialFunction` or a `NoSuchElementException` */ def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = map { @@ -318,12 +343,13 @@ trait Future[+T] extends Awaitable[T] { * Future (6 / 0) recover { case e: NotFoundException => 0 } // result: exception * Future (6 / 2) recover { case e: ArithmeticException => 0 } // result: 3 * }}} + * + * @tparam U the type of the returned `Future` + * @param pf the `PartialFunction` to apply if this `Future` fails + * @return a `Future` with the successful value of this `Future` or the result of the `PartialFunction` */ - def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = { - val p = Promise[U]() - onComplete { v => p complete (v recover pf) } - p.future - } + def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = + transform { _ recover pf } /** Creates a new future that will handle any matching throwable that this * future might contain by assigning it a value of another future. @@ -337,15 +363,16 @@ trait Future[+T] extends Awaitable[T] { * val f = Future { Int.MaxValue } * Future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue * }}} + * + * @tparam U the type of the returned `Future` + * @param pf the `PartialFunction` to apply if this `Future` fails + * @return a `Future` with the successful value of this `Future` or the outcome of the `Future` returned by the `PartialFunction` */ - def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = { - val p = Promise[U]() - onComplete { - case Failure(t) => try pf.applyOrElse(t, (_: Throwable) => this).onComplete(p.complete)(internalExecutor) catch { case NonFatal(t) => p failure t } - case other => p complete other + def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = + transformWith { + case Failure(t) => pf.applyOrElse(t, (_: Throwable) => this) + case Success(_) => this } - p.future - } /** Zips the values of `this` and `that` future, and creates * a new future holding the tuple of their results. @@ -354,17 +381,35 @@ trait Future[+T] extends Awaitable[T] { * with the throwable stored in `this`. * Otherwise, if `that` future fails, the resulting future is failed * with the throwable stored in `that`. + * + * @tparam U the type of the other `Future` + * @param that the other `Future` + * @return a `Future` with the results of both futures or the failure of the first of them that failed */ def zip[U](that: Future[U]): Future[(T, U)] = { implicit val ec = internalExecutor - val p = Promise[(T, U)]() - onComplete { - case f: Failure[_] => p complete f.asInstanceOf[Failure[(T, U)]] - case Success(s) => that onComplete { c => p.complete(c map { s2 => (s, s2) }) } - } - p.future + flatMap { r1 => that.map(r2 => (r1, r2)) } } + /** Zips the values of `this` and `that` future using a function `f`, + * and creates a new future holding the result. + * + * If `this` future fails, the resulting future is failed + * with the throwable stored in `this`. + * Otherwise, if `that` future fails, the resulting future is failed + * with the throwable stored in `that`. + * If the application of `f` throws a throwable, the resulting future + * is failed with that throwable if it is non-fatal. + * + * @tparam U the type of the other `Future` + * @tparam R the type of the resulting `Future` + * @param that the other `Future` + * @param f the function to apply to the results of `this` and `that` + * @return a `Future` with the result of the application of `f` to the results of `this` and `that` + */ + def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = + flatMap(r1 => that.map(r2 => f(r1, r2)))(internalExecutor) + /** Creates a new future which holds the result of this future if it was completed successfully, or, if not, * the result of the `that` future if `that` is completed successfully. * If both futures are failed, the resulting future holds the throwable object of the first future. @@ -376,24 +421,26 @@ trait Future[+T] extends Awaitable[T] { * val f = Future { sys.error("failed") } * val g = Future { 5 } * val h = f fallbackTo g - * Await.result(h, Duration.Zero) // evaluates to 5 + * h foreach println // Eventually prints 5 * }}} + * + * @tparam U the type of the other `Future` and the resulting `Future` + * @param that the `Future` whose result we want to use if this `Future` fails. + * @return a `Future` with the successful result of this or that `Future` or the failure of this `Future` if both fail */ - def fallbackTo[U >: T](that: Future[U]): Future[U] = { - implicit val ec = internalExecutor - val p = Promise[U]() - onComplete { - case s @ Success(_) => p complete s - case f @ Failure(_) => that onComplete { - case s2 @ Success(_) => p complete s2 - case _ => p complete f // Use the first failure as the failure - } + def fallbackTo[U >: T](that: Future[U]): Future[U] = + if (this eq that) this + else { + implicit val ec = internalExecutor + recoverWith { case _ => that } recoverWith { case _ => this } } - p.future - } /** Creates a new `Future[S]` which is completed with this `Future`'s result if * that conforms to `S`'s erased type or a `ClassCastException` otherwise. + * + * @tparam S the type of the returned `Future` + * @param tag the `ClassTag` which will be used to cast the result of this `Future` + * @return a `Future` holding the casted result of this `Future` or a `ClassCastException` otherwise */ def mapTo[S](implicit tag: ClassTag[S]): Future[S] = { implicit val ec = internalExecutor @@ -427,15 +474,19 @@ trait Future[+T] extends Awaitable[T] { * case Success(v) => println(v) * } * }}} + * + * @tparam U only used to accept any return type of the given `PartialFunction` + * @param pf a `PartialFunction` which will be conditionally applied to the outcome of this `Future` + * @return a `Future` which will be completed with the exact same outcome as this `Future` but after the `PartialFunction` has been executed. */ - def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] = { - val p = Promise[T]() - onComplete { - case r => try pf.applyOrElse[Try[T], Any](r, Predef.conforms[Try[T]]) finally p complete r - } - p.future - } + def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] = + transform { + result => + try pf.applyOrElse[Try[T], Any](result, Predef.conforms[Try[T]]) + catch { case NonFatal(t) => executor reportFailure t } + result + } } @@ -459,40 +510,102 @@ object Future { classOf[Unit] -> classOf[scala.runtime.BoxedUnit] ) + /** A Future which is never completed. + */ + final object never extends Future[Nothing] { + + @throws(classOf[TimeoutException]) + @throws(classOf[InterruptedException]) + override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = { + atMost match { + case e if e eq Duration.Undefined => throw new IllegalArgumentException("cannot wait for Undefined period") + case Duration.Inf => new CountDownLatch(1).await() + case Duration.MinusInf => // Drop out + case f: FiniteDuration => + if (f > Duration.Zero) new CountDownLatch(1).await(f.toNanos, TimeUnit.NANOSECONDS) + } + throw new TimeoutException(s"Future timed out after [$atMost]") + } + + @throws(classOf[Exception]) + override def result(atMost: Duration)(implicit permit: CanAwait): Nothing = { + ready(atMost) + throw new TimeoutException(s"Future timed out after [$atMost]") + } + + override def onSuccess[U](pf: PartialFunction[Nothing, U])(implicit executor: ExecutionContext): Unit = () + override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = () + override def onComplete[U](f: Try[Nothing] => U)(implicit executor: ExecutionContext): Unit = () + override def isCompleted: Boolean = false + override def value: Option[Try[Nothing]] = None + override def failed: Future[Throwable] = this + override def foreach[U](f: Nothing => U)(implicit executor: ExecutionContext): Unit = () + override def transform[S](s: Nothing => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = this + override def transform[S](f: Try[Nothing] => Try[S])(implicit executor: ExecutionContext): Future[S] = this + override def transformWith[S](f: Try[Nothing] => Future[S])(implicit executor: ExecutionContext): Future[S] = this + override def map[S](f: Nothing => S)(implicit executor: ExecutionContext): Future[S] = this + override def flatMap[S](f: Nothing => Future[S])(implicit executor: ExecutionContext): Future[S] = this + override def flatten[S](implicit ev: Nothing <:< Future[S]): Future[S] = this + override def filter(p: Nothing => Boolean)(implicit executor: ExecutionContext): Future[Nothing] = this + override def collect[S](pf: PartialFunction[Nothing, S])(implicit executor: ExecutionContext): Future[S] = this + override def recover[U >: Nothing](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this + override def recoverWith[U >: Nothing](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this + override def zip[U](that: Future[U]): Future[(Nothing, U)] = this + override def zipWith[U, R](that: Future[U])(f: (Nothing, U) => R)(implicit executor: ExecutionContext): Future[R] = this + override def fallbackTo[U >: Nothing](that: Future[U]): Future[U] = this + override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = this + override def andThen[U](pf: PartialFunction[Try[Nothing], U])(implicit executor: ExecutionContext): Future[Nothing] = this + + override def toString: String = "Future()" + } + + /** A Future which is always completed with the Unit value. + */ + val unit: Future[Unit] = successful(()) + /** Creates an already completed Future with the specified exception. * - * @tparam T the type of the value in the future - * @return the newly created `Future` object + * @tparam T the type of the value in the future + * @param exception the non-null instance of `Throwable` + * @return the newly created `Future` instance */ def failed[T](exception: Throwable): Future[T] = Promise.failed(exception).future /** Creates an already completed Future with the specified result. * * @tparam T the type of the value in the future - * @return the newly created `Future` object + * @param result the given successful value + * @return the newly created `Future` instance */ def successful[T](result: T): Future[T] = Promise.successful(result).future /** Creates an already completed Future with the specified result or exception. * - * @tparam T the type of the value in the promise - * @return the newly created `Future` object + * @tparam T the type of the value in the `Future` + * @param result the result of the returned `Future` instance + * @return the newly created `Future` instance */ def fromTry[T](result: Try[T]): Future[T] = Promise.fromTry(result).future - /** Starts an asynchronous computation and returns a `Future` object with the result of that computation. + /** Starts an asynchronous computation and returns a `Future` instance with the result of that computation. * * The result becomes available once the asynchronous computation is completed. * - * @tparam T the type of the result - * @param body the asychronous computation + * @tparam T the type of the result + * @param body the asynchronous computation * @param executor the execution context on which the future is run - * @return the `Future` holding the result of the computation + * @return the `Future` holding the result of the computation */ - def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = impl.Future(body) + def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = + unit.map(_ => body) - /** Simple version of `Future.traverse`. Transforms a `TraversableOnce[Future[A]]` into a `Future[TraversableOnce[A]]`. - * Useful for reducing many `Future`s into a single `Future`. + /** Simple version of `Future.traverse`. Asynchronously and non-blockingly transforms a `TraversableOnce[Future[A]]` + * into a `Future[TraversableOnce[A]]`. Useful for reducing many `Future`s into a single `Future`. + * + * @tparam A the type of the value inside the Futures + * @tparam M the type of the `TraversableOnce` of Futures + * @param in the `TraversableOnce` of Futures which will be sequenced + * @return the `Future` of the `TraversableOnce` of results */ def sequence[A, M[X] <: TraversableOnce[X]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = { in.foldLeft(successful(cbf(in))) { @@ -500,7 +613,12 @@ object Future { } map (_.result()) } - /** Returns a new `Future` to the result of the first future in the list that is completed. + /** Asynchronously and non-blockingly returns a new `Future` to the result of the first future + * in the list that is completed. This means no matter if it is completed as a success or as a failure. + * + * @tparam T the type of the value in the future + * @param futures the `TraversableOnce` of Futures in which to find the first completed + * @return the `Future` holding the result of the future that is first to be completed */ def firstCompletedOf[T](futures: TraversableOnce[Future[T]])(implicit executor: ExecutionContext): Future[T] = { val p = Promise[T]() @@ -509,8 +627,15 @@ object Future { p.future } - /** Returns a `Future` that will hold the optional result of the first `Future` with a result that matches the predicate. + /** Asynchronously and non-blockingly returns a `Future` that will hold the optional result + * of the first `Future` with a result that matches the predicate. + * + * @tparam T the type of the value in the future + * @param futures the `TraversableOnce` of Futures to search + * @param p the predicate which indicates if it's a match + * @return the `Future` holding the optional result of the search */ + @deprecated("Use the overloaded version of this method that takes a scala.collection.immutable.Iterable instead", "2.12") def find[T](@deprecatedName('futurestravonce) futures: TraversableOnce[Future[T]])(@deprecatedName('predicate) p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = { val futuresBuffer = futures.toBuffer if (futuresBuffer.isEmpty) successful[Option[T]](None) @@ -534,40 +659,127 @@ object Future { } } - /** A non-blocking fold over the specified futures, with the start value of the given zero. + + /** Asynchronously and non-blockingly returns a `Future` that will hold the optional result + * of the first `Future` with a result that matches the predicate, failed `Future`s will be ignored. + * + * @tparam T the type of the value in the future + * @param futures the `scala.collection.immutable.Iterable` of Futures to search + * @param p the predicate which indicates if it's a match + * @return the `Future` holding the optional result of the search + */ + def find[T](futures: scala.collection.immutable.Iterable[Future[T]])(p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = { + def searchNext(i: Iterator[Future[T]]): Future[Option[T]] = + if (!i.hasNext) successful[Option[T]](None) + else { + i.next().transformWith { + case Success(r) if p(r) => successful(Some(r)) + case other => searchNext(i) + } + } + searchNext(futures.iterator) + } + + /** A non-blocking, asynchronous left fold over the specified futures, + * with the start value of the given zero. + * The fold is performed asynchronously in left-to-right order as the futures become completed. + * The result will be the first failure of any of the futures, or any failure in the actual fold, + * or the result of the fold. + * + * Example: + * {{{ + * val futureSum = Future.foldLeft(futures)(0)(_ + _) + * }}} + * + * @tparam T the type of the value of the input Futures + * @tparam R the type of the value of the returned `Future` + * @param futures the `scala.collection.immutable.Iterable` of Futures to be folded + * @param zero the start value of the fold + * @param op the fold operation to be applied to the zero and futures + * @return the `Future` holding the result of the fold + */ + def foldLeft[T, R](futures: scala.collection.immutable.Iterable[Future[T]])(zero: R)(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = + foldNext(futures.iterator, zero, op) + + private[this] def foldNext[T, R](i: Iterator[Future[T]], prevValue: R, op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = + if (!i.hasNext) successful(prevValue) + else i.next().flatMap { value => foldNext(i, op(prevValue, value), op) } + + /** A non-blocking, asynchronous fold over the specified futures, with the start value of the given zero. * The fold is performed on the thread where the last future is completed, * the result will be the first failure of any of the futures, or any failure in the actual fold, * or the result of the fold. * * Example: * {{{ - * val result = Await.result(Future.fold(futures)(0)(_ + _), 5 seconds) + * val futureSum = Future.fold(futures)(0)(_ + _) * }}} + * + * @tparam T the type of the value of the input Futures + * @tparam R the type of the value of the returned `Future` + * @param futures the `TraversableOnce` of Futures to be folded + * @param zero the start value of the fold + * @param op the fold operation to be applied to the zero and futures + * @return the `Future` holding the result of the fold */ + @deprecated("Use Future.foldLeft instead", "2.12") def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(@deprecatedName('foldFun) op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { if (futures.isEmpty) successful(zero) else sequence(futures).map(_.foldLeft(zero)(op)) } - /** Initiates a fold over the supplied futures where the fold-zero is the result value of the `Future` that's completed first. + /** Initiates a non-blocking, asynchronous, fold over the supplied futures + * where the fold-zero is the result value of the `Future` that's completed first. * * Example: * {{{ - * val result = Await.result(Future.reduce(futures)(_ + _), 5 seconds) + * val futureSum = Future.reduce(futures)(_ + _) * }}} + * @tparam T the type of the value of the input Futures + * @tparam R the type of the value of the returned `Future` + * @param futures the `TraversableOnce` of Futures to be reduced + * @param op the reduce operation which is applied to the results of the futures + * @return the `Future` holding the result of the reduce */ + @deprecated("Use Future.reduceLeft instead", "2.12") def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { if (futures.isEmpty) failed(new NoSuchElementException("reduce attempted on empty collection")) else sequence(futures).map(_ reduceLeft op) } - /** Transforms a `TraversableOnce[A]` into a `Future[TraversableOnce[B]]` using the provided function `A => Future[B]`. + /** Initiates a non-blocking, asynchronous, left reduction over the supplied futures + * where the zero is the result value of the first `Future`. + * + * Example: + * {{{ + * val futureSum = Future.reduceLeft(futures)(_ + _) + * }}} + * @tparam T the type of the value of the input Futures + * @tparam R the type of the value of the returned `Future` + * @param futures the `scala.collection.immutable.Iterable` of Futures to be reduced + * @param op the reduce operation which is applied to the results of the futures + * @return the `Future` holding the result of the reduce + */ + def reduceLeft[T, R >: T](futures: scala.collection.immutable.Iterable[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { + val i = futures.iterator + if (!i.hasNext) failed(new NoSuchElementException("reduceLeft attempted on empty collection")) + else i.next() flatMap { v => foldNext(i, v, op) } + } + + /** Asynchronously and non-blockingly transforms a `TraversableOnce[A]` into a `Future[TraversableOnce[B]]` + * using the provided function `A => Future[B]`. * This is useful for performing a parallel map. For example, to apply a function to all items of a list * in parallel: * * {{{ * val myFutureList = Future.traverse(myList)(x => Future(myFunc(x))) * }}} + * @tparam A the type of the value inside the Futures in the `TraversableOnce` + * @tparam B the type of the value of the returned `Future` + * @tparam M the type of the `TraversableOnce` of Futures + * @param in the `TraversableOnce` of Futures which will be sequenced + * @param fn the function to apply to the `TraversableOnce` of Futures to produce the results + * @return the `Future` of the `TraversableOnce` of results */ def traverse[A, B, M[X] <: TraversableOnce[X]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] = in.foldLeft(successful(cbf(in))) { (fr, a) => @@ -575,6 +787,7 @@ object Future { for (r <- fr; b <- fb) yield (r += b) }.map(_.result()) + // This is used to run callbacks which are internal // to scala.concurrent; our own callbacks are only // ever used to eventually run another callback, diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala index 1c00c0e91f1d..8d18da2d387f 100644 --- a/src/library/scala/concurrent/Lock.scala +++ b/src/library/scala/concurrent/Lock.scala @@ -14,8 +14,8 @@ package scala.concurrent * * @author Martin Odersky * @version 1.0, 10/03/2003 - * @deprecated("Use java.util.concurrent.locks.Lock", "2.11.0") */ +@deprecated("Use java.util.concurrent.locks.Lock", "2.11.2") class Lock { var available = true diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala index eb8044ed3bfb..dc4376eba4e4 100644 --- a/src/library/scala/concurrent/Promise.scala +++ b/src/library/scala/concurrent/Promise.scala @@ -26,12 +26,6 @@ import scala.util.{ Try, Success, Failure } * Note: Using this method may result in non-deterministic concurrent programs. */ trait Promise[T] { - - // used for internal callbacks defined in - // the lexical scope of this trait; - // _never_ for application callbacks. - private implicit def internalExecutor: ExecutionContext = Future.InternalCallbackExecutor - /** Future containing the value of this promise. */ def future: Future[T] @@ -67,7 +61,9 @@ trait Promise[T] { * @return This promise */ final def completeWith(other: Future[T]): this.type = { - other onComplete { this complete _ } + if (other ne this.future) { // this completeWith this doesn't make much sense + other.onComplete(this complete _)(Future.InternalCallbackExecutor) + } this } @@ -76,7 +72,9 @@ trait Promise[T] { * @return This promise */ final def tryCompleteWith(other: Future[T]): this.type = { - other onComplete { this tryComplete _ } + if (other ne this.future) { // this tryCompleteWith this doesn't make much sense + other.onComplete(this tryComplete _)(Future.InternalCallbackExecutor) + } this } @@ -142,5 +140,5 @@ object Promise { * @tparam T the type of the value in the promise * @return the newly created `Promise` object */ - def fromTry[T](result: Try[T]): Promise[T] = new impl.Promise.KeptPromise[T](result) + def fromTry[T](result: Try[T]): Promise[T] = impl.Promise.KeptPromise[T](result) } diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala index d5dc3d7e3fdf..9634f6d90007 100644 --- a/src/library/scala/concurrent/SyncVar.scala +++ b/src/library/scala/concurrent/SyncVar.scala @@ -13,6 +13,7 @@ import java.util.concurrent.TimeUnit /** A class to provide safe concurrent access to a mutable cell. * All methods are synchronized. * + * @tparam A type of the contained value * @author Martin Odersky * @version 1.0, 10/03/2003 */ @@ -20,6 +21,12 @@ class SyncVar[A] { private var isDefined: Boolean = false private var value: Option[A] = None + /** + * Waits for this SyncVar to become defined and returns + * the result, without modifying the stored value. + * + * @return value that is held in this container + */ def get: A = synchronized { while (!isDefined) wait() value.get @@ -57,8 +64,12 @@ class SyncVar[A] { value } - /** Waits for this SyncVar to become defined and returns - * the result */ + /** + * Waits for this SyncVar to become defined and returns + * the result, unsetting the stored value before returning. + * + * @return value that was held in this container + */ def take(): A = synchronized { try get finally unsetVal() @@ -82,7 +93,7 @@ class SyncVar[A] { // [Heather] the reason why: it doesn't take into consideration // whether or not the SyncVar is already defined. So, set has been // deprecated in order to eventually be able to make "setting" private - @deprecated("Use `put` instead, as `set` is potentionally error-prone", "2.10.0") + @deprecated("Use `put` instead, as `set` is potentially error-prone", "2.10.0") // NOTE: Used by SBT 0.13.0-M2 and below def set(x: A): Unit = setVal(x) @@ -102,7 +113,7 @@ class SyncVar[A] { // [Heather] the reason why: it doesn't take into consideration // whether or not the SyncVar is already defined. So, unset has been // deprecated in order to eventually be able to make "unsetting" private - @deprecated("Use `take` instead, as `unset` is potentionally error-prone", "2.10.0") + @deprecated("Use `take` instead, as `unset` is potentially error-prone", "2.10.0") // NOTE: Used by SBT 0.13.0-M2 and below def unset(): Unit = synchronized { isDefined = false @@ -129,4 +140,3 @@ class SyncVar[A] { } } - diff --git a/src/library/scala/concurrent/duration/Deadline.scala b/src/library/scala/concurrent/duration/Deadline.scala index 61cbe4753072..a25a4786027f 100644 --- a/src/library/scala/concurrent/duration/Deadline.scala +++ b/src/library/scala/concurrent/duration/Deadline.scala @@ -25,15 +25,15 @@ package scala.concurrent.duration */ case class Deadline private (time: FiniteDuration) extends Ordered[Deadline] { /** - * Return a deadline advanced (i.e. moved into the future) by the given duration. + * Return a deadline advanced (i.e., moved into the future) by the given duration. */ def +(other: FiniteDuration): Deadline = copy(time = time + other) /** - * Return a deadline moved backwards (i.e. towards the past) by the given duration. + * Return a deadline moved backwards (i.e., towards the past) by the given duration. */ def -(other: FiniteDuration): Deadline = copy(time = time - other) /** - * Calculate time difference between this and the other deadline, where the result is directed (i.e. may be negative). + * Calculate time difference between this and the other deadline, where the result is directed (i.e., may be negative). */ def -(other: Deadline): FiniteDuration = time - other.time /** diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala index 1b50b7fa5672..2eded9f06090 100644 --- a/src/library/scala/concurrent/duration/Duration.scala +++ b/src/library/scala/concurrent/duration/Duration.scala @@ -621,7 +621,7 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio } def -(other: Duration) = other match { case x: FiniteDuration => add(-x.length, x.unit) - case _ => other + case _ => -other } def *(factor: Double) = diff --git a/src/library/scala/concurrent/impl/AbstractPromise.java b/src/library/scala/concurrent/impl/AbstractPromise.java deleted file mode 100644 index b8165b6cdea9..000000000000 --- a/src/library/scala/concurrent/impl/AbstractPromise.java +++ /dev/null @@ -1,40 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent.impl; - - -import scala.concurrent.util.Unsafe; -import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; - - - -abstract class AbstractPromise { - private volatile Object _ref; - - final static long _refoffset; - - static { - try { - _refoffset = Unsafe.instance.objectFieldOffset(AbstractPromise.class.getDeclaredField("_ref")); - } catch (Throwable t) { - throw new ExceptionInInitializerError(t); - } - } - - protected final boolean updateState(Object oldState, Object newState) { - return Unsafe.instance.compareAndSwapObject(this, _refoffset, oldState, newState); - } - - protected final Object getState() { - return _ref; - } - - protected final static AtomicReferenceFieldUpdater updater = - AtomicReferenceFieldUpdater.newUpdater(AbstractPromise.class, Object.class, "_ref"); -} \ No newline at end of file diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala index 479720287ca6..0c7f98ce5aeb 100644 --- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala +++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala @@ -11,52 +11,88 @@ package scala.concurrent.impl import java.util.concurrent.{ LinkedBlockingQueue, Callable, Executor, ExecutorService, Executors, ThreadFactory, TimeUnit, ThreadPoolExecutor } +import java.util.concurrent.atomic.AtomicInteger import java.util.Collection import scala.concurrent.forkjoin._ import scala.concurrent.{ BlockContext, ExecutionContext, Awaitable, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService } import scala.util.control.NonFatal +import scala.annotation.tailrec +private[scala] class ExecutionContextImpl private[impl] (val executor: Executor, val reporter: Throwable => Unit) extends ExecutionContextExecutor { + require(executor ne null, "Executor must not be null") + override def execute(runnable: Runnable) = executor execute runnable + override def reportFailure(t: Throwable) = reporter(t) +} -private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: Throwable => Unit) extends ExecutionContextExecutor { - // Placed here since the creation of the executor needs to read this val - private[this] val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler { - def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause) - } - val executor: Executor = es match { - case null => createExecutorService - case some => some - } +private[concurrent] object ExecutionContextImpl { // Implement BlockContext on FJP threads - class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory { + final class DefaultThreadFactory( + daemonic: Boolean, + maxThreads: Int, + prefix: String, + uncaught: Thread.UncaughtExceptionHandler) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory { + + require(prefix ne null, "DefaultThreadFactory.prefix must be non null") + require(maxThreads > 0, "DefaultThreadFactory.maxThreads must be greater than 0") + + private final val currentNumberOfThreads = new AtomicInteger(0) + + @tailrec private final def reserveThread(): Boolean = currentNumberOfThreads.get() match { + case `maxThreads` | Int.`MaxValue` => false + case other => currentNumberOfThreads.compareAndSet(other, other + 1) || reserveThread() + } + + @tailrec private final def deregisterThread(): Boolean = currentNumberOfThreads.get() match { + case 0 => false + case other => currentNumberOfThreads.compareAndSet(other, other - 1) || deregisterThread() + } + def wire[T <: Thread](thread: T): T = { thread.setDaemon(daemonic) - thread.setUncaughtExceptionHandler(uncaughtExceptionHandler) + thread.setUncaughtExceptionHandler(uncaught) + thread.setName(prefix + "-" + thread.getId()) thread } - def newThread(runnable: Runnable): Thread = wire(new Thread(runnable)) - - def newThread(fjp: ForkJoinPool): ForkJoinWorkerThread = wire(new ForkJoinWorkerThread(fjp) with BlockContext { - override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = { - var result: T = null.asInstanceOf[T] - ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker { - @volatile var isdone = false - override def block(): Boolean = { - result = try thunk finally { isdone = true } - true + // As per ThreadFactory contract newThread should return `null` if cannot create new thread. + def newThread(runnable: Runnable): Thread = + if (reserveThread()) + wire(new Thread(new Runnable { + // We have to decrement the current thread count when the thread exits + override def run() = try runnable.run() finally deregisterThread() + })) else null + + def newThread(fjp: ForkJoinPool): ForkJoinWorkerThread = + if (reserveThread()) { + wire(new ForkJoinWorkerThread(fjp) with BlockContext { + // We have to decrement the current thread count when the thread exits + final override def onTermination(exception: Throwable): Unit = deregisterThread() + final override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = { + var result: T = null.asInstanceOf[T] + ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker { + @volatile var isdone = false + override def block(): Boolean = { + result = try { + // When we block, switch out the BlockContext temporarily so that nested blocking does not created N new Threads + BlockContext.withBlockContext(BlockContext.defaultBlockContext) { thunk } + } finally { + isdone = true + } + + true + } + override def isReleasable = isdone + }) + result } - override def isReleasable = isdone }) - result - } - }) + } else null } - def createExecutorService: ExecutorService = { - + def createDefaultExecutorService(reporter: Throwable => Unit): ExecutorService = { def getInt(name: String, default: String) = (try System.getProperty(name, default) catch { case e: SecurityException => default }) match { @@ -65,20 +101,42 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: } def range(floor: Int, desired: Int, ceiling: Int) = scala.math.min(scala.math.max(floor, desired), ceiling) + val numThreads = getInt("scala.concurrent.context.numThreads", "x1") + // The hard limit on the number of active threads that the thread factory will produce + // SI-8955 Deadlocks can happen if maxNoOfThreads is too low, although we're currently not sure + // about what the exact threshhold is. numThreads + 256 is conservatively high. + val maxNoOfThreads = getInt("scala.concurrent.context.maxThreads", "x1") val desiredParallelism = range( getInt("scala.concurrent.context.minThreads", "1"), - getInt("scala.concurrent.context.numThreads", "x1"), - getInt("scala.concurrent.context.maxThreads", "x1")) + numThreads, + maxNoOfThreads) + + // The thread factory must provide additional threads to support managed blocking. + val maxExtraThreads = getInt("scala.concurrent.context.maxExtraThreads", "256") + + val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler { + override def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause) + } - val threadFactory = new DefaultThreadFactory(daemonic = true) + val threadFactory = new ExecutionContextImpl.DefaultThreadFactory(daemonic = true, + maxThreads = maxNoOfThreads + maxExtraThreads, + prefix = "scala-execution-context-global", + uncaught = uncaughtExceptionHandler) try { - new ForkJoinPool( - desiredParallelism, - threadFactory, - uncaughtExceptionHandler, - true) // Async all the way baby + new ForkJoinPool(desiredParallelism, threadFactory, uncaughtExceptionHandler, true) { + override def execute(runnable: Runnable): Unit = { + val fjt: ForkJoinTask[_] = runnable match { + case t: ForkJoinTask[_] => t + case r => new ExecutionContextImpl.AdaptedForkJoinTask(r) + } + Thread.currentThread match { + case fjw: ForkJoinWorkerThread if fjw.getPool eq this => fjt.fork() + case _ => super.execute(fjt) + } + } + } } catch { case NonFatal(t) => System.err.println("Failed to create ForkJoinPool for the default ExecutionContext, falling back to ThreadPoolExecutor") @@ -96,56 +154,42 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: } } - def execute(runnable: Runnable): Unit = executor match { - case fj: ForkJoinPool => - val fjt: ForkJoinTask[_] = runnable match { - case t: ForkJoinTask[_] => t - case r => new ExecutionContextImpl.AdaptedForkJoinTask(r) - } - Thread.currentThread match { - case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork() - case _ => fj execute fjt - } - case generic => generic execute runnable - } - - def reportFailure(t: Throwable) = reporter(t) -} - - -private[concurrent] object ExecutionContextImpl { - final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] { - final override def setRawResult(u: Unit): Unit = () - final override def getRawResult(): Unit = () - final override def exec(): Boolean = try { runnable.run(); true } catch { - case anything: Throwable ⇒ - val t = Thread.currentThread - t.getUncaughtExceptionHandler match { - case null ⇒ - case some ⇒ some.uncaughtException(t, anything) - } - throw anything - } + final override def setRawResult(u: Unit): Unit = () + final override def getRawResult(): Unit = () + final override def exec(): Boolean = try { runnable.run(); true } catch { + case anything: Throwable => + val t = Thread.currentThread + t.getUncaughtExceptionHandler match { + case null => + case some => some.uncaughtException(t, anything) } + throw anything + } + } - def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter) - def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService = - new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService { - final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService] - override def execute(command: Runnable) = executor.execute(command) - override def shutdown() { asExecutorService.shutdown() } - override def shutdownNow() = asExecutorService.shutdownNow() - override def isShutdown = asExecutorService.isShutdown - override def isTerminated = asExecutorService.isTerminated - override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit) - override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable) - override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t) - override def submit(runnable: Runnable) = asExecutorService.submit(runnable) - override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables) - override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit) - override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables) - override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit) + def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = + new ExecutionContextImpl(Option(e).getOrElse(createDefaultExecutorService(reporter)), reporter) + + def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): + ExecutionContextImpl with ExecutionContextExecutorService = { + new ExecutionContextImpl(Option(es).getOrElse(createDefaultExecutorService(reporter)), reporter) + with ExecutionContextExecutorService { + final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService] + override def execute(command: Runnable) = executor.execute(command) + override def shutdown() { asExecutorService.shutdown() } + override def shutdownNow() = asExecutorService.shutdownNow() + override def isShutdown = asExecutorService.isShutdown + override def isTerminated = asExecutorService.isTerminated + override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit) + override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable) + override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t) + override def submit(runnable: Runnable) = asExecutorService.submit(runnable) + override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables) + override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit) + override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables) + override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit) + } } } diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala deleted file mode 100644 index 042d32c23491..000000000000 --- a/src/library/scala/concurrent/impl/Future.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent.impl - - - -import scala.concurrent.ExecutionContext -import scala.util.control.NonFatal -import scala.util.{ Success, Failure } - - -private[concurrent] object Future { - class PromiseCompletingRunnable[T](body: => T) extends Runnable { - val promise = new Promise.DefaultPromise[T]() - - override def run() = { - promise complete { - try Success(body) catch { case NonFatal(e) => Failure(e) } - } - } - } - - def apply[T](body: =>T)(implicit executor: ExecutionContext): scala.concurrent.Future[T] = { - val runnable = new PromiseCompletingRunnable(body) - executor.prepare.execute(runnable) - runnable.promise.future - } -} diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index b15601058ea3..078ad45be9e2 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -16,14 +16,42 @@ import scala.util.control.NonFatal import scala.util.{ Try, Success, Failure } import java.io.ObjectInputStream import java.util.concurrent.locks.AbstractQueuedSynchronizer +import java.util.concurrent.atomic.AtomicReference private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] { def future: this.type = this + + import scala.concurrent.Future + import scala.concurrent.impl.Promise.DefaultPromise + + override def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S] = { + val p = new DefaultPromise[S]() + onComplete { result => p.complete(try f(result) catch { case NonFatal(t) => Failure(t) }) } + p.future + } + + // If possible, link DefaultPromises to avoid space leaks + override def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] = { + val p = new DefaultPromise[S]() + onComplete { + v => try f(v) match { + case fut if fut eq this => p complete v.asInstanceOf[Try[S]] + case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p) + case fut => p completeWith fut + } catch { case NonFatal(t) => p failure t } + } + p.future + } + + override def toString: String = value match { + case Some(result) => "Future("+result+")" + case None => "Future()" + } } /* Precondition: `executor` is prepared, i.e., `executor` has been returned from invocation of `prepare` on some other `ExecutionContext`. */ -private class CallbackRunnable[T](val executor: ExecutionContext, val onComplete: Try[T] => Any) extends Runnable with OnCompleteRunnable { +private final class CallbackRunnable[T](val executor: ExecutionContext, val onComplete: Try[T] => Any) extends Runnable with OnCompleteRunnable { // must be filled in before running it var value: Try[T] = null @@ -89,7 +117,7 @@ private[concurrent] object Promise { * incomplete, or as complete with the same result value. * * A DefaultPromise stores its state entirely in the AnyRef cell exposed by - * AbstractPromise. The type of object stored in the cell fully describes the + * AtomicReference. The type of object stored in the cell fully describes the * current state of the promise. * * 1. List[CallbackRunnable] - The promise is incomplete and has zero or more callbacks @@ -150,8 +178,7 @@ private[concurrent] object Promise { * DefaultPromises, and `linkedRootOf` is currently only designed to be called * by Future.flatMap. */ - class DefaultPromise[T] extends AbstractPromise with Promise[T] { self => - updateState(null, Nil) // The promise is incomplete and has no callbacks + final class DefaultPromise[T] extends AtomicReference[AnyRef](Nil) with Promise[T] { /** Get the root promise for this promise, compressing the link chain to that * promise if necessary. @@ -167,14 +194,23 @@ private[concurrent] object Promise { * be garbage collected. Also, subsequent calls to this method should be * faster as the link chain will be shorter. */ - @tailrec - private def compressedRoot(): DefaultPromise[T] = { - getState match { - case linked: DefaultPromise[_] => - val target = linked.asInstanceOf[DefaultPromise[T]].root - if (linked eq target) target else if (updateState(linked, target)) target else compressedRoot() + private def compressedRoot(): DefaultPromise[T] = + get() match { + case linked: DefaultPromise[_] => compressedRoot(linked) case _ => this } + + @tailrec + private[this] final def compressedRoot(linked: DefaultPromise[_]): DefaultPromise[T] = { + val target = linked.asInstanceOf[DefaultPromise[T]].root + if (linked eq target) target + else if (compareAndSet(linked, target)) target + else { + get() match { + case newLinked: DefaultPromise[_] => compressedRoot(newLinked) + case _ => this + } + } } /** Get the promise at the root of the chain of linked promises. Used by `compressedRoot()`. @@ -182,18 +218,16 @@ private[concurrent] object Promise { * to compress the link chain whenever possible. */ @tailrec - private def root: DefaultPromise[T] = { - getState match { + private def root: DefaultPromise[T] = + get() match { case linked: DefaultPromise[_] => linked.asInstanceOf[DefaultPromise[T]].root case _ => this } - } /** Try waiting for this promise to be completed. */ protected final def tryAwait(atMost: Duration): Boolean = if (!isCompleted) { import Duration.Undefined - import scala.concurrent.Future.InternalCallbackExecutor atMost match { case e if e eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period") case Duration.Inf => @@ -225,18 +259,18 @@ private[concurrent] object Promise { def value: Option[Try[T]] = value0 @tailrec - private def value0: Option[Try[T]] = getState match { + private def value0: Option[Try[T]] = get() match { case c: Try[_] => Some(c.asInstanceOf[Try[T]]) - case _: DefaultPromise[_] => compressedRoot().value0 + case dp: DefaultPromise[_] => compressedRoot(dp).value0 case _ => None } override def isCompleted: Boolean = isCompleted0 @tailrec - private def isCompleted0: Boolean = getState match { + private def isCompleted0: Boolean = get() match { case _: Try[_] => true - case _: DefaultPromise[_] => compressedRoot().isCompleted0 + case dp: DefaultPromise[_] => compressedRoot(dp).isCompleted0 case _ => false } @@ -254,21 +288,17 @@ private[concurrent] object Promise { */ @tailrec private def tryCompleteAndGetListeners(v: Try[T]): List[CallbackRunnable[T]] = { - getState match { + get() match { case raw: List[_] => val cur = raw.asInstanceOf[List[CallbackRunnable[T]]] - if (updateState(cur, v)) cur else tryCompleteAndGetListeners(v) - case _: DefaultPromise[_] => - compressedRoot().tryCompleteAndGetListeners(v) + if (compareAndSet(cur, v)) cur else tryCompleteAndGetListeners(v) + case dp: DefaultPromise[_] => compressedRoot(dp).tryCompleteAndGetListeners(v) case _ => null } } - def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = { - val preparedEC = executor.prepare() - val runnable = new CallbackRunnable[T](preparedEC, func) - dispatchOrAddCallback(runnable) - } + def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = + dispatchOrAddCallback(new CallbackRunnable[T](executor.prepare(), func)) /** Tries to add the callback, if already completed, it dispatches the callback to be executed. * Used by `onComplete()` to add callbacks to a promise and by `link()` to transfer callbacks @@ -276,15 +306,16 @@ private[concurrent] object Promise { */ @tailrec private def dispatchOrAddCallback(runnable: CallbackRunnable[T]): Unit = { - getState match { + get() match { case r: Try[_] => runnable.executeWithValue(r.asInstanceOf[Try[T]]) - case _: DefaultPromise[_] => compressedRoot().dispatchOrAddCallback(runnable) - case listeners: List[_] => if (updateState(listeners, runnable :: listeners)) () else dispatchOrAddCallback(runnable) + case dp: DefaultPromise[_] => compressedRoot(dp).dispatchOrAddCallback(runnable) + case listeners: List[_] => if (compareAndSet(listeners, runnable :: listeners)) () + else dispatchOrAddCallback(runnable) } } /** Link this promise to the root of another promise using `link()`. Should only be - * be called by Future.flatMap. + * be called by transformWith. */ protected[concurrent] final def linkRootOf(target: DefaultPromise[T]): Unit = link(target.compressedRoot()) @@ -299,18 +330,17 @@ private[concurrent] object Promise { */ @tailrec private def link(target: DefaultPromise[T]): Unit = if (this ne target) { - getState match { + get() match { case r: Try[_] => - if (!target.tryComplete(r.asInstanceOf[Try[T]])) { - // Currently linking is done from Future.flatMap, which should ensure only - // one promise can be completed. Therefore this situation is unexpected. + if (!target.tryComplete(r.asInstanceOf[Try[T]])) throw new IllegalStateException("Cannot link completed promises together") - } - case _: DefaultPromise[_] => - compressedRoot().link(target) - case listeners: List[_] => if (updateState(listeners, target)) { - if (!listeners.isEmpty) listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_)) - } else link(target) + case dp: DefaultPromise[_] => + compressedRoot(dp).link(target) + case listeners: List[_] if compareAndSet(listeners, target) => + if (listeners.nonEmpty) + listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_)) + case _ => + link(target) } } } @@ -319,23 +349,58 @@ private[concurrent] object Promise { * * Useful in Future-composition when a value to contribute is already available. */ - final class KeptPromise[T](suppliedValue: Try[T]) extends Promise[T] { + object KeptPromise { + import scala.concurrent.Future + import scala.reflect.ClassTag + + private[this] sealed trait Kept[T] extends Promise[T] { + def result: Try[T] + + override def value: Option[Try[T]] = Some(result) - val value = Some(resolveTry(suppliedValue)) + override def isCompleted: Boolean = true - override def isCompleted: Boolean = true + override def tryComplete(value: Try[T]): Boolean = false - def tryComplete(value: Try[T]): Boolean = false + override def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = + (new CallbackRunnable(executor.prepare(), func)).executeWithValue(result) - def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = { - val completedAs = value.get - val preparedEC = executor.prepare() - (new CallbackRunnable(preparedEC, func)).executeWithValue(completedAs) + override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this + + override def result(atMost: Duration)(implicit permit: CanAwait): T = result.get } - def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this + private[this] final class Successful[T](val result: Success[T]) extends Kept[T] { + override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = () + override def failed: Future[Throwable] = KeptPromise(Failure(new NoSuchElementException("Future.failed not completed with a throwable."))).future + override def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this + override def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this + override def fallbackTo[U >: T](that: Future[U]): Future[U] = this + } - def result(atMost: Duration)(implicit permit: CanAwait): T = value.get.get + private[this] final class Failed[T](val result: Failure[T]) extends Kept[T] { + private[this] final def thisAs[S]: Future[S] = future.asInstanceOf[Future[S]] + + override def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = () + override def failed: Future[Throwable] = thisAs[Throwable] + override def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = () + override def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = thisAs[S] + override def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = thisAs[S] + override def flatten[S](implicit ev: T <:< Future[S]): Future[S] = thisAs[S] + override def filter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = this + override def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = thisAs[S] + override def zip[U](that: Future[U]): Future[(T, U)] = thisAs[(T,U)] + override def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = thisAs[R] + override def fallbackTo[U >: T](that: Future[U]): Future[U] = + if (this eq that) this else that.recoverWith({ case _ => this })(InternalCallbackExecutor) + override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = thisAs[S] + } + + def apply[T](result: Try[T]): scala.concurrent.Promise[T] = + resolveTry(result) match { + case s @ Success(_) => new Successful(s) + case f @ Failure(_) => new Failed(f) + } } } diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala index cc1350f5a957..4843d28679e0 100644 --- a/src/library/scala/concurrent/package.scala +++ b/src/library/scala/concurrent/package.scala @@ -47,14 +47,19 @@ package object concurrent { * Blocking on an [[Awaitable]] should be done using [[Await.result]] instead of `blocking`. * * @param body A piece of code which contains potentially blocking or long running calls. - * @throws `CancellationException` if the computation was cancelled - * @throws `InterruptedException` in the case that a wait within the blocking `body` was interrupted + * @throws CancellationException if the computation was cancelled + * @throws InterruptedException in the case that a wait within the blocking `body` was interrupted */ @throws(classOf[Exception]) def blocking[T](body: =>T): T = BlockContext.current.blockOn(body)(scala.concurrent.AwaitPermission) } package concurrent { + /** + * This marker trait is used by [[Await]] to ensure that [[Awaitable.ready]] and [[Awaitable.result]] + * are not directly called by user code. An implicit instance of this trait is only available when + * user code is currently calling the methods on [[Await]]. + */ @implicitNotFound("Don't call `Awaitable` methods directly, use the `Await` object.") sealed trait CanAwait diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala index 1c87a1f42166..52fa525b2425 100644 --- a/src/library/scala/io/BufferedSource.scala +++ b/src/library/scala/io/BufferedSource.scala @@ -93,7 +93,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod val buf = new Array[Char](bufferSize) var n = 0 while (n != -1) { - n = charReader.read(buf) + n = allReader.read(buf) if (n>0) sb.appendAll(buf, 0, n) } sb.result diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala index 74c3e0683983..e38c197196d0 100644 --- a/src/library/scala/io/Source.scala +++ b/src/library/scala/io/Source.scala @@ -10,7 +10,7 @@ package scala package io import scala.collection.AbstractIterator -import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile } +import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile, Closeable } import java.net.{ URI, URL } /** This object provides convenience methods to create an iterable @@ -169,14 +169,25 @@ object Source { createBufferedSource(is, reset = () => fromInputStream(is)(codec), close = () => is.close())(codec) } -/** The class `Source` implements an iterable representation of source data. - * Calling method `reset` returns an identical, resetted source, where - * possible. +/** An iterable representation of source data. + * It may be reset with the optional `reset` method. + * + * Subclasses must supply [[scala.io.Source@iter the underlying iterator]]. + * + * Error handling may be customized by overriding the [[scala.io.Source@report report]] method. + * + * The [[scala.io.Source@ch current input]] and [[scala.io.Source@pos position]], + * as well as the [[scala.io.Source@next next character]] methods delegate to + * [[scala.io.Source$Positioner the positioner]]. + * + * The default positioner encodes line and column numbers in the position passed to `report`. + * This behavior can be changed by supplying a + * [[scala.io.Source@withPositioning(pos:Source.this.Positioner):Source.this.type custom positioner]]. * * @author Burak Emir * @version 1.0 */ -abstract class Source extends Iterator[Char] { +abstract class Source extends Iterator[Char] with Closeable { /** the actual iterator */ protected val iter: Iterator[Char] diff --git a/src/library/scala/io/StdIn.scala b/src/library/scala/io/StdIn.scala index 64836ecd6eae..0f9656436b8f 100644 --- a/src/library/scala/io/StdIn.scala +++ b/src/library/scala/io/StdIn.scala @@ -4,7 +4,7 @@ package io import java.text.MessageFormat /** private[scala] because this is not functionality we should be providing - * in the standard library, at least not in this idiosyncractic form. + * in the standard library, at least not in this idiosyncratic form. * Factored into trait because it is better code structure regardless. */ private[scala] trait StdIn { diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala index c638f531bb9c..2eb5514a1804 100644 --- a/src/library/scala/language.scala +++ b/src/library/scala/language.scala @@ -1,3 +1,13 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + + package scala /** diff --git a/src/library/scala/languageFeature.scala b/src/library/scala/languageFeature.scala index 1f411c412a11..51118b43be30 100644 --- a/src/library/scala/languageFeature.scala +++ b/src/library/scala/languageFeature.scala @@ -1,3 +1,13 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + + package scala import scala.annotation.meta diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala index bcbed645a750..cf95f945ba33 100644 --- a/src/library/scala/math/BigDecimal.scala +++ b/src/library/scala/math/BigDecimal.scala @@ -364,7 +364,7 @@ object BigDecimal { * to a decimal text representation, and build a `BigDecimal` based on that. * `BigDecimal.binary` will expand the binary fraction to the requested or default * precision. `BigDecimal.exact` will expand the binary fraction to the - * full number of digits, thus producing the exact decimal value corrsponding to + * full number of digits, thus producing the exact decimal value corresponding to * the binary fraction of that floating-point number. `BigDecimal` equality * matches the decimal expansion of `Double`: `BigDecimal.decimal(0.1) == 0.1`. * Note that since `0.1f != 0.1`, the same is not true for `Float`. Instead, @@ -417,7 +417,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable { private final def computeHashCode(): Unit = { computedHashCode = if (isWhole && (precision - scale) < BigDecimal.maximumHashScale) toBigInt.hashCode - else if (isValidDouble) doubleValue.## + else if (isDecimalDouble) doubleValue.## else { val temp = bigDecimal.stripTrailingZeros scala.util.hashing.MurmurHash3.mixLast( temp.scaleByPowerOfTen(temp.scale).toBigInteger.hashCode, temp.scale ) @@ -477,7 +477,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable { * `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble`, depending on the intended meaning. * By default, `decimal` creation is used, so `isDecimalDouble` is probably what you want. */ - @deprecated("Validity has two distinct meanings. Use `isExactBinaryDouble` or `equivalentToDouble` instead.", "2.11") + @deprecated("Validity has distinct meanings. Use `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble` instead.", "2.11") def isValidDouble = { val d = toDouble !d.isInfinity && bigDecimal.compareTo(new BigDec(d)) == 0 @@ -617,10 +617,10 @@ extends ScalaNumber with ScalaNumericConversions with Serializable { */ def abs: BigDecimal = if (signum < 0) unary_- else this - /** Returns the sign of this BigDecimal, i.e. + /** Returns the sign of this BigDecimal; * -1 if it is less than 0, - * +1 if it is greater than 0 - * 0 if it is equal to 0 + * +1 if it is greater than 0, + * 0 if it is equal to 0. */ def signum: Int = this.bigDecimal.signum() diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index 689fc0c3e1a5..abc7371d9fb0 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -282,10 +282,10 @@ final class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNum */ def abs: BigInt = new BigInt(this.bigInteger.abs()) - /** Returns the sign of this BigInt, i.e. + /** Returns the sign of this BigInt; * -1 if it is less than 0, - * +1 if it is greater than 0 - * 0 if it is equal to 0 + * +1 if it is greater than 0, + * 0 if it is equal to 0. */ def signum: Int = this.bigInteger.signum() diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala index d1a4e7c35c91..827cccc77e1b 100644 --- a/src/library/scala/math/Ordering.scala +++ b/src/library/scala/math/Ordering.scala @@ -26,7 +26,7 @@ import scala.language.{implicitConversions, higherKinds} * val pairs = Array(("a", 5, 2), ("c", 3, 1), ("b", 1, 3)) * * // sort by 2nd element - * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2) + * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2)) * * // sort by the 3rd element, then 1st * Sorting.quickSort(pairs)(Ordering[(Int, String)].on(x => (x._3, x._1))) @@ -284,6 +284,9 @@ object Ordering extends LowPriorityOrderingImplicits { override def gteq(x: Float, y: Float): Boolean = outer.gteq(y, x) override def lt(x: Float, y: Float): Boolean = outer.lt(y, x) override def gt(x: Float, y: Float): Boolean = outer.gt(y, x) + override def min(x: Float, y: Float): Float = outer.max(x, y) + override def max(x: Float, y: Float): Float = outer.min(x, y) + } } implicit object Float extends FloatOrdering @@ -309,6 +312,8 @@ object Ordering extends LowPriorityOrderingImplicits { override def gteq(x: Double, y: Double): Boolean = outer.gteq(y, x) override def lt(x: Double, y: Double): Boolean = outer.lt(y, x) override def gt(x: Double, y: Double): Boolean = outer.gt(y, x) + override def min(x: Double, y: Double): Double = outer.max(x, y) + override def max(x: Double, y: Double): Double = outer.min(x, y) } } implicit object Double extends DoubleOrdering diff --git a/src/library/scala/math/PartialOrdering.scala b/src/library/scala/math/PartialOrdering.scala index 9e3538152848..8d7fc3253550 100644 --- a/src/library/scala/math/PartialOrdering.scala +++ b/src/library/scala/math/PartialOrdering.scala @@ -15,17 +15,24 @@ package math * latter. * * A [[http://en.wikipedia.org/wiki/Partial_order partial ordering]] is a - * binary relation on a type `T` that is also an equivalence relation on - * values of type `T`. This relation is exposed as the `lteq` method of - * the `PartialOrdering` trait. This relation must be: + * binary relation on a type `T`, exposed as the `lteq` method of this trait. + * This relation must be: * * - reflexive: `lteq(x, x) == '''true'''`, for any `x` of type `T`. - * - anti-symmetric: `lteq(x, y) == '''true'''` and `lteq(y, x) == true` - * then `equiv(x, y)`, for any `x` and `y` of type `T`. + * - anti-symmetric: if `lteq(x, y) == '''true'''` and + * `lteq(y, x) == '''true'''` + * then `equiv(x, y) == '''true'''`, for any `x` and `y` of type `T`. * - transitive: if `lteq(x, y) == '''true'''` and * `lteq(y, z) == '''true'''` then `lteq(x, z) == '''true'''`, * for any `x`, `y`, and `z` of type `T`. * + * Additionally, a partial ordering induces an + * [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] + * on a type `T`: `x` and `y` of type `T` are equivalent if and only if + * `lteq(x, y) && lteq(y, x) == '''true'''`. This equivalence relation is + * exposed as the `equiv` method, inherited from the + * [[scala.math.Equiv Equiv]] trait. + * * @author Geoffrey Washburn * @version 1.0, 2008-04-0-3 * @since 2.7 diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala index 33c5cee783f1..2f4aa9cb8414 100644 --- a/src/library/scala/reflect/ClassTag.scala +++ b/src/library/scala/reflect/ClassTag.scala @@ -2,8 +2,7 @@ package scala package reflect import java.lang.{ Class => jClass } -import scala.language.{implicitConversions, existentials} -import scala.runtime.ScalaRunTime.{ arrayClass, arrayElementClass } +import scala.runtime.ScalaRunTime.arrayElementClass /** * @@ -70,26 +69,36 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial * `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)` * is uncheckable, but we have an instance of `ClassTag[T]`. */ - def unapply(x: Any): Option[T] = unapply_impl(x) - def unapply(x: Byte): Option[T] = unapply_impl(x) - def unapply(x: Short): Option[T] = unapply_impl(x) - def unapply(x: Char): Option[T] = unapply_impl(x) - def unapply(x: Int): Option[T] = unapply_impl(x) - def unapply(x: Long): Option[T] = unapply_impl(x) - def unapply(x: Float): Option[T] = unapply_impl(x) - def unapply(x: Double): Option[T] = unapply_impl(x) - def unapply(x: Boolean): Option[T] = unapply_impl(x) - def unapply(x: Unit): Option[T] = unapply_impl(x) + def unapply(x: Any): Option[T] = x match { + case null => None + case b: Byte => unapply(b) + case s: Short => unapply(s) + case c: Char => unapply(c) + case i: Int => unapply(i) + case l: Long => unapply(l) + case f: Float => unapply(f) + case d: Double => unapply(d) + case b: Boolean => unapply(b) + case u: Unit => unapply(u) + case a: Any => unapplyImpl(a) + } - private def unapply_impl[U: ClassTag](x: U): Option[T] = - if (x == null) None - else { - val staticClass = classTag[U].runtimeClass - val dynamicClass = x.getClass - val effectiveClass = if (staticClass.isPrimitive) staticClass else dynamicClass - val conforms = runtimeClass.isAssignableFrom(effectiveClass) - if (conforms) Some(x.asInstanceOf[T]) else None - } + // TODO: Inline the bodies of these into the Any-accepting unapply overload above and delete them. + // This cannot be done until at least 2.12.0 for reasons of binary compatibility + def unapply(x: Byte) : Option[T] = unapplyImpl(x, classOf[Byte]) + def unapply(x: Short) : Option[T] = unapplyImpl(x, classOf[Short]) + def unapply(x: Char) : Option[T] = unapplyImpl(x, classOf[Char]) + def unapply(x: Int) : Option[T] = unapplyImpl(x, classOf[Int]) + def unapply(x: Long) : Option[T] = unapplyImpl(x, classOf[Long]) + def unapply(x: Float) : Option[T] = unapplyImpl(x, classOf[Float]) + def unapply(x: Double) : Option[T] = unapplyImpl(x, classOf[Double]) + def unapply(x: Boolean) : Option[T] = unapplyImpl(x, classOf[Boolean]) + def unapply(x: Unit) : Option[T] = unapplyImpl(x, classOf[Unit]) + + private[this] def unapplyImpl(x: Any, alternative: jClass[_] = null): Option[T] = { + val conforms = runtimeClass.isAssignableFrom(x.getClass) || (alternative != null && runtimeClass.isAssignableFrom(alternative)) + if (conforms) Some(x.asInstanceOf[T]) else None + } // case class accessories override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]] diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala index 803c980058e6..2f7643bccf6b 100644 --- a/src/library/scala/reflect/Manifest.scala +++ b/src/library/scala/reflect/Manifest.scala @@ -64,6 +64,7 @@ trait Manifest[T] extends ClassManifest[T] with Equals { // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("Use type tags and manually check the corresponding class or type instead", "2.10.0") +@SerialVersionUID(1L) abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals { override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Manifest.Any) || (that eq Manifest.AnyVal) @@ -72,6 +73,7 @@ abstract class AnyValManifest[T <: AnyVal](override val toString: String) extend case _ => false } override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] + @transient override val hashCode = System.identityHashCode(this) } @@ -228,6 +230,7 @@ object ManifestFactory { private abstract class PhantomManifest[T](_runtimeClass: Predef.Class[_], override val toString: String) extends ClassTypeManifest[T](None, _runtimeClass, Nil) { override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] + @transient override val hashCode = System.identityHashCode(this) } diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java index 82a3b00ac426..a6df20165d0c 100644 --- a/src/library/scala/runtime/BoxesRunTime.java +++ b/src/library/scala/runtime/BoxesRunTime.java @@ -183,7 +183,7 @@ public static boolean equalsCharObject(java.lang.Character xc, Object y) { return xc.equals(y); } - private static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) { + public static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) { if (yc == null) return xn == null; diff --git a/src/library/scala/runtime/MethodCache.scala b/src/library/scala/runtime/MethodCache.scala index 2d5f832e1f09..a8fdfc10595d 100644 --- a/src/library/scala/runtime/MethodCache.scala +++ b/src/library/scala/runtime/MethodCache.scala @@ -16,7 +16,7 @@ import java.lang.{ Class => JClass } import scala.annotation.tailrec /** An element of a polymorphic object cache. - * This class is refered to by the `CleanUp` phase. Each `PolyMethodCache` chain + * This class is referred to by the `CleanUp` phase. Each `PolyMethodCache` chain * must only relate to one method as `PolyMethodCache` does not identify * the method name and argument types. In practice, one variable will be * generated per call point, and will uniquely relate to the method called diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index 5fb24f2a366a..a0d89fc0e1c7 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -13,6 +13,7 @@ import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator } import scala.collection.mutable.WrappedArray import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: } import scala.collection.generic.{ Sorted, IsTraversableLike } +import scala.collection.parallel.ParIterable import scala.reflect.{ ClassTag, classTag } import scala.util.control.ControlThrowable import java.lang.{ Class => jClass } @@ -62,7 +63,7 @@ object ScalaRunTime { } /** Return the class object representing an unboxed value type, - * e.g. classOf[int], not classOf[java.lang.Integer]. The compiler + * e.g., classOf[int], not classOf[java.lang.Integer]. The compiler * rewrites expressions like 5.getClass to come here. */ def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] = @@ -251,7 +252,7 @@ object ScalaRunTime { * * The primary motivation for this method is to provide a means for * correctly obtaining a String representation of a value, while - * avoiding the pitfalls of naïvely calling toString on said value. + * avoiding the pitfalls of naively calling toString on said value. * In particular, it addresses the fact that (a) toString cannot be * called on null and (b) depending on the apparent type of an * array, toString may or may not print it in a human-readable form. @@ -326,6 +327,7 @@ object ScalaRunTime { case x: AnyRef if isArray(x) => arrayToString(x) case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")") case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") + case x: ParIterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")") diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala index ce7d7afc9e3d..74e67bb9e7a6 100644 --- a/src/library/scala/runtime/SeqCharSequence.scala +++ b/src/library/scala/runtime/SeqCharSequence.scala @@ -44,5 +44,10 @@ final class ArrayCharSequence(val xs: Array[Char], start: Int, end: Int) extends new ArrayCharSequence(xs, start1, start1 + newlen) } } - override def toString = xs drop start take length mkString "" + override def toString = { + val start = math.max(this.start, 0) + val end = math.min(xs.length, start + length) + + if (start >= end) "" else new String(xs, start, end - start) + } } diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala index b28f6d426947..512c4fbc27bb 100644 --- a/src/library/scala/runtime/Tuple2Zipped.scala +++ b/src/library/scala/runtime/Tuple2Zipped.scala @@ -17,6 +17,10 @@ import scala.language.{ higherKinds, implicitConversions } /** This interface is intended as a minimal interface, not complicated * by the requirement to resolve type constructors, for implicit search (which only * needs to find an implicit conversion to Traversable for our purposes.) + * @define Coll `ZippedTraversable2` + * @define coll collection + * @define collectExample + * @define willNotTerminateInf */ trait ZippedTraversable2[+El1, +El2] extends Any { def foreach[U](f: (El1, El2) => U): Unit diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala index 7c501380a38e..ffd44acf815e 100644 --- a/src/library/scala/runtime/Tuple3Zipped.scala +++ b/src/library/scala/runtime/Tuple3Zipped.scala @@ -14,7 +14,12 @@ import scala.collection.{ TraversableLike, IterableLike } import scala.collection.generic.{ CanBuildFrom => CBF } import scala.language.{ higherKinds, implicitConversions } -/** See comment on ZippedTraversable2. */ +/** See comment on ZippedTraversable2 + * @define Coll `ZippedTraversable3` + * @define coll collection + * @define collectExample + * @define willNotTerminateInf + */ trait ZippedTraversable3[+El1, +El2, +El3] extends Any { def foreach[U](f: (El1, El2, El3) => U): Unit } diff --git a/src/library/scala/sys/Prop.scala b/src/library/scala/sys/Prop.scala index 04c7b5108c16..17ae8cb69ca1 100644 --- a/src/library/scala/sys/Prop.scala +++ b/src/library/scala/sys/Prop.scala @@ -20,7 +20,7 @@ package sys * @since 2.9 */ trait Prop[+T] { - /** The full name of the property, e.g. "java.awt.headless". + /** The full name of the property, e.g., "java.awt.headless". */ def key: String diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala index 39f66f5030c6..d2ebf8c0443b 100644 --- a/src/library/scala/sys/SystemProperties.scala +++ b/src/library/scala/sys/SystemProperties.scala @@ -21,6 +21,8 @@ import scala.language.implicitConversions * System properties. If a security manager is in place which prevents * the properties from being read or written, the AccessControlException * will be caught and discarded. + * @define Coll `collection.mutable.Map` + * @define coll mutable map * * @author Paul Phillips * @version 2.9 diff --git a/src/library/scala/sys/package.scala b/src/library/scala/sys/package.scala index 386bd841130d..e493603bc24e 100644 --- a/src/library/scala/sys/package.scala +++ b/src/library/scala/sys/package.scala @@ -61,16 +61,15 @@ package object sys { def env: immutable.Map[String, String] = immutable.Map(System.getenv().asScala.toSeq: _*) /** Register a shutdown hook to be run when the VM exits. - * The newly created thread is marked as a daemon so it will not - * interfere with VM shutdown. The hook is automatically registered: - * the returned value can be ignored, but is available in case the - * Thread requires further modification. It can also be unregistered - * by calling ShutdownHookThread#remove(). + * The hook is automatically registered: the returned value can be ignored, + * but is available in case the Thread requires further modification. + * It can also be unregistered by calling ShutdownHookThread#remove(). * * Note that shutdown hooks are NOT guaranteed to be run. * * @param body the body of code to run at shutdown * @return the Thread which will run the shutdown hook. + * @see [[scala.sys.ShutdownHookThread]] */ def addShutdownHook(body: => Unit): ShutdownHookThread = ShutdownHookThread(body) diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala index b31bbf05407b..866dac4458c8 100644 --- a/src/library/scala/sys/process/BasicIO.scala +++ b/src/library/scala/sys/process/BasicIO.scala @@ -221,7 +221,7 @@ object BasicIO { */ def transferFully(in: InputStream, out: OutputStream): Unit = try transferFullyImpl(in, out) - catch onInterrupt(()) + catch onIOInterrupt(()) private[this] def appendLine(buffer: Appendable): String => Unit = line => { buffer append line diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala index 2b7fcdeb73b6..d15f1a2b3d78 100644 --- a/src/library/scala/sys/process/ProcessImpl.scala +++ b/src/library/scala/sys/process/ProcessImpl.scala @@ -109,45 +109,46 @@ private[process] trait ProcessImpl { } private[process] class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean) extends CompoundProcess { - protected[this] override def runAndExitValue() = { - val currentSource = new SyncVar[Option[InputStream]] - val pipeOut = new PipedOutputStream - val source = new PipeSource(currentSource, pipeOut, a.toString) + protected[this] override def runAndExitValue() = runAndExitValue(new PipeSource(a.toString), new PipeSink(b.toString)) + protected[this] def runAndExitValue(source: PipeSource, sink: PipeSink): Option[Int] = { + source connectOut sink source.start() - - val pipeIn = new PipedInputStream(pipeOut) - val currentSink = new SyncVar[Option[OutputStream]] - val sink = new PipeSink(pipeIn, currentSink, b.toString) sink.start() - def handleOutOrError(fromOutput: InputStream) = currentSource put Some(fromOutput) + /** Release PipeSource, PipeSink and Process in the correct order. + * If once connect Process with Source or Sink, then the order of releasing them + * must be Source -> Sink -> Process, otherwise IOException will be thrown. */ + def releaseResources(so: PipeSource, sk: PipeSink, p: Process *) = { + so.release() + sk.release() + p foreach( _.destroy() ) + } val firstIO = - if (toError) - defaultIO.withError(handleOutOrError) - else - defaultIO.withOutput(handleOutOrError) - val secondIO = defaultIO.withInput(toInput => currentSink put Some(toInput)) - - val second = b.run(secondIO) - val first = a.run(firstIO) - try { - runInterruptible { - val exit1 = first.exitValue() - currentSource put None - currentSink put None - val exit2 = second.exitValue() - // Since file redirection (e.g. #>) is implemented as a piped process, - // we ignore its exit value so cmd #> file doesn't always return 0. - if (b.hasExitValue) exit2 else exit1 - } { - first.destroy() - second.destroy() + if (toError) defaultIO.withError(source.connectIn) + else defaultIO.withOutput(source.connectIn) + val secondIO = defaultIO.withInput(sink.connectOut) + + val second = + try b.run(secondIO) + catch onError { err => + releaseResources(source, sink) + throw err } - } - finally { - BasicIO close pipeIn - BasicIO close pipeOut + val first = + try a.run(firstIO) + catch onError { err => + releaseResources(source, sink, second) + throw err + } + runInterruptible { + val exit1 = first.exitValue() + val exit2 = second.exitValue() + // Since file redirection (e.g. #>) is implemented as a piped process, + // we ignore its exit value so cmd #> file doesn't always return 0. + if (b.hasExitValue) exit2 else exit1 + } { + releaseResources(source, sink, first, second) } } } @@ -168,37 +169,46 @@ private[process] trait ProcessImpl { } } - private[process] class PipeSource( - currentSource: SyncVar[Option[InputStream]], - pipe: PipedOutputStream, - label: => String - ) extends PipeThread(false, () => label) { - - final override def run(): Unit = currentSource.get match { - case Some(source) => - try runloop(source, pipe) - finally currentSource.unset() - - run() - case None => - currentSource.unset() - BasicIO close pipe + private[process] class PipeSource(label: => String) extends PipeThread(false, () => label) { + protected[this] val pipe = new PipedOutputStream + protected[this] val source = new LinkedBlockingQueue[Option[InputStream]] + override def run(): Unit = { + try { + source.take match { + case Some(in) => runloop(in, pipe) + case None => + } + } + catch onInterrupt(()) + finally BasicIO close pipe + } + def connectIn(in: InputStream): Unit = source add Some(in) + def connectOut(sink: PipeSink): Unit = sink connectIn pipe + def release(): Unit = { + interrupt() + source add None + join() } } - private[process] class PipeSink( - pipe: PipedInputStream, - currentSink: SyncVar[Option[OutputStream]], - label: => String - ) extends PipeThread(true, () => label) { - - final override def run(): Unit = currentSink.get match { - case Some(sink) => - try runloop(pipe, sink) - finally currentSink.unset() - - run() - case None => - currentSink.unset() + private[process] class PipeSink(label: => String) extends PipeThread(true, () => label) { + protected[this] val pipe = new PipedInputStream + protected[this] val sink = new LinkedBlockingQueue[Option[OutputStream]] + override def run(): Unit = { + try { + sink.take match { + case Some(out) => runloop(pipe, out) + case None => + } + } + catch onInterrupt(()) + finally BasicIO close pipe + } + def connectOut(out: OutputStream): Unit = sink add Some(out) + def connectIn(pipeOut: PipedOutputStream): Unit = pipe connect pipeOut + def release(): Unit = { + interrupt() + sink add None + join() } } diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala index 1340a6c415cb..5ec2e73cb979 100644 --- a/src/library/scala/sys/process/package.scala +++ b/src/library/scala/sys/process/package.scala @@ -119,7 +119,7 @@ package scala.sys { * ==Handling Input and Output== * * In the underlying Java model, once a `Process` has been started, one can - * get `java.io.InputStream` and `java.io.OutpuStream` representing its + * get `java.io.InputStream` and `java.io.OutputStream` representing its * output and input respectively. That is, what one writes to an * `OutputStream` is turned into input to the process, and the output of a * process can be read from an `InputStream` -- of which there are two, one @@ -224,16 +224,26 @@ package scala.sys { final val processDebug = props contains "scala.process.debug" dbg("Initializing process package.") - type =?>[-A, +B] = PartialFunction[A, B] - type Closeable = java.io.Closeable - type File = java.io.File - type IOException = java.io.IOException - type InputStream = java.io.InputStream - type JProcess = java.lang.Process - type JProcessBuilder = java.lang.ProcessBuilder - type OutputStream = java.io.OutputStream - type SyncVar[T] = scala.concurrent.SyncVar[T] - type URL = java.net.URL + type =?>[-A, +B] = PartialFunction[A, B] + type Closeable = java.io.Closeable + type File = java.io.File + type IOException = java.io.IOException + type InterruptedIOException = java.io.InterruptedIOException + type InputStream = java.io.InputStream + type JProcess = java.lang.Process + type JProcessBuilder = java.lang.ProcessBuilder + type LinkedBlockingQueue[T] = java.util.concurrent.LinkedBlockingQueue[T] + type OutputStream = java.io.OutputStream + type SyncVar[T] = scala.concurrent.SyncVar[T] + type URL = java.net.URL + + def onError[T](handler: Throwable => T): Throwable =?> T = { + case e @ _ => handler(e) + } + + def onIOInterrupt[T](handler: => T): Throwable =?> T = { + case _: InterruptedIOException => handler + } def onInterrupt[T](handler: => T): Throwable =?> T = { case _: InterruptedException => handler diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index b1a932be7e83..e196d403c287 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -274,7 +274,7 @@ object Either { */ final case class LeftProjection[+A, +B](e: Either[A, B]) { /** - * Returns the value from this `Left` or throws `Predef.NoSuchElementException` + * Returns the value from this `Left` or throws `java.util.NoSuchElementException` * if this is a `Right`. * * {{{ @@ -282,7 +282,7 @@ object Either { * Right(12).left.get // NoSuchElementException * }}} * - * @throws Predef.NoSuchElementException if the projection is [[scala.util.Right]] + * @throws java.util.NoSuchElementException if the projection is [[scala.util.Right]] */ def get = e match { case Left(a) => a @@ -440,14 +440,14 @@ object Either { /** * Returns the value from this `Right` or throws - * `Predef.NoSuchElementException` if this is a `Left`. + * `java.util.NoSuchElementException` if this is a `Left`. * * {{{ * Right(12).right.get // 12 * Left(12).right.get // NoSuchElementException * }}} * - * @throws Predef.NoSuchElementException if the projection is `Left`. + * @throws java.util.NoSuchElementException if the projection is `Left`. */ def get = e match { case Left(_) => throw new NoSuchElementException("Either.right.value on Left") diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index d597feb89887..367488f11694 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -62,10 +62,10 @@ private[scala] trait PropertiesTrait { def envOrSome(name: String, alt: Option[String]) = envOrNone(name) orElse alt - // for values based on propFilename - def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt) + // for values based on propFilename, falling back to System properties + def scalaPropOrElse(name: String, alt: String): String = scalaPropOrNone(name).getOrElse(alt) def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "") - def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name)) + def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name)).orElse(propOrNone("scala." + name)) /** The numeric portion of the runtime Scala version, if this is a final * release. If for instance the versionString says "version 2.9.0.final", @@ -107,7 +107,7 @@ private[scala] trait PropertiesTrait { val versionString = "version " + scalaPropOrElse("version.number", "(unknown)") val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2013, LAMP/EPFL") - /** This is the encoding to use reading in source files, overridden with -encoding + /** This is the encoding to use reading in source files, overridden with -encoding. * Note that it uses "prop" i.e. looks in the scala jar, not the system properties. */ def sourceEncoding = scalaPropOrElse("file.encoding", "UTF-8") @@ -155,9 +155,12 @@ private[scala] trait PropertiesTrait { // This is looking for javac, tools.jar, etc. // Tries JDK_HOME first, then the more common but likely jre JAVA_HOME, // and finally the system property based javaHome. - def jdkHome = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome)) + def jdkHome = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome)) - def versionMsg = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString) + // private[scala] for 2.12 + private[this] def versionFor(command: String) = f"Scala $command $versionString -- $copyrightString" + + def versionMsg = versionFor(propCategory) def scalaCmd = if (isWin) "scala.bat" else "scala" def scalacCmd = if (isWin) "scalac.bat" else "scalac" diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala index b0cf122f2aec..f08cc18f7d97 100644 --- a/src/library/scala/util/Try.scala +++ b/src/library/scala/util/Try.scala @@ -74,16 +74,11 @@ sealed abstract class Try[+T] { * * ''Note:'': This will throw an exception if it is not a success and default throws an exception. */ - def getOrElse[U >: T](default: => U): U = - if (isSuccess) get else default + def getOrElse[U >: T](default: => U): U /** Returns this `Try` if it's a `Success` or the given `default` argument if this is a `Failure`. */ - def orElse[U >: T](default: => Try[U]): Try[U] = - try if (isSuccess) this else default - catch { - case NonFatal(e) => Failure(e) - } + def orElse[U >: T](default: => Try[U]): Try[U] /** Returns the value from this `Success` or throws the exception if this is a `Failure`. */ @@ -106,6 +101,11 @@ sealed abstract class Try[+T] { */ def map[U](f: T => U): Try[U] + /** + * Applies the given partial function to the value from this `Success` or returns this if this is a `Failure`. + */ + def collect[U](pf: PartialFunction[T, U]): Try[U] + /** * Converts this to a `Failure` if the predicate is not satisfied. */ @@ -133,6 +133,7 @@ sealed abstract class Try[+T] { * collection" contract even though it seems unlikely to matter much in a * collection with max size 1. */ + @deprecatedInheritance("You were never supposed to be able to extend this class.", "2.12") class WithFilter(p: T => Boolean) { def map[U](f: T => U): Try[U] = Try.this filter p map f def flatMap[U](f: T => Try[U]): Try[U] = Try.this filter p flatMap f @@ -144,18 +145,18 @@ sealed abstract class Try[+T] { * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`. * This is like `flatMap` for the exception. */ - def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] + def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] /** * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`. * This is like map for the exception. */ - def recover[U >: T](f: PartialFunction[Throwable, U]): Try[U] + def recover[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, U]): Try[U] /** * Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`. */ - def toOption: Option[T] = if (isSuccess) Some(get) else None + def toOption: Option[T] /** * Transforms a nested `Try`, ie, a `Try` of type `Try[Try[T]]`, @@ -172,14 +173,7 @@ sealed abstract class Try[+T] { /** Completes this `Try` by applying the function `f` to this if this is of type `Failure`, or conversely, by applying * `s` if this is a `Success`. */ - def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] = - try this match { - case Success(v) => s(v) - case Failure(e) => f(e) - } catch { - case NonFatal(e) => Failure(e) - } - + def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] } object Try { @@ -191,57 +185,55 @@ object Try { try Success(r) catch { case NonFatal(e) => Failure(e) } - } final case class Failure[+T](exception: Throwable) extends Try[T] { - def isFailure: Boolean = true - def isSuccess: Boolean = false - def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = - try { - if (f isDefinedAt exception) f(exception) else this - } catch { - case NonFatal(e) => Failure(e) - } - def get: T = throw exception - def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]] - def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]] - def foreach[U](f: T => U): Unit = () - def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]] - def filter(p: T => Boolean): Try[T] = this - def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = - try { - if (rescueException isDefinedAt exception) { - Try(rescueException(exception)) - } else this - } catch { - case NonFatal(e) => Failure(e) - } - def failed: Try[Throwable] = Success(exception) + override def isFailure: Boolean = true + override def isSuccess: Boolean = false + override def get: T = throw exception + override def getOrElse[U >: T](default: => U): U = default + override def orElse[U >: T](default: => Try[U]): Try[U] = + try default catch { case NonFatal(e) => Failure(e) } + override def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]] + override def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]] + override def foreach[U](f: T => U): Unit = () + override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] = + try f(exception) catch { case NonFatal(e) => Failure(e) } + override def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]] + override def collect[U](pf: PartialFunction[T, U]): Try[U] = this.asInstanceOf[Try[U]] + override def filter(p: T => Boolean): Try[T] = this + override def recover[U >: T](@deprecatedName('rescueException) pf: PartialFunction[Throwable, U]): Try[U] = + try { if (pf isDefinedAt exception) Success(pf(exception)) else this } catch { case NonFatal(e) => Failure(e) } + override def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] = + try { if (pf isDefinedAt exception) pf(exception) else this } catch { case NonFatal(e) => Failure(e) } + override def failed: Try[Throwable] = Success(exception) + override def toOption: Option[T] = None } final case class Success[+T](value: T) extends Try[T] { - def isFailure: Boolean = false - def isSuccess: Boolean = true - def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = this - def get = value - def flatMap[U](f: T => Try[U]): Try[U] = - try f(value) - catch { - case NonFatal(e) => Failure(e) - } - def flatten[U](implicit ev: T <:< Try[U]): Try[U] = value - def foreach[U](f: T => U): Unit = f(value) - def map[U](f: T => U): Try[U] = Try[U](f(value)) - def filter(p: T => Boolean): Try[T] = { + override def isFailure: Boolean = false + override def isSuccess: Boolean = true + override def get = value + override def getOrElse[U >: T](default: => U): U = get + override def orElse[U >: T](default: => Try[U]): Try[U] = this + override def flatMap[U](f: T => Try[U]): Try[U] = + try f(value) catch { case NonFatal(e) => Failure(e) } + override def flatten[U](implicit ev: T <:< Try[U]): Try[U] = value + override def foreach[U](f: T => U): Unit = f(value) + override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] = this flatMap s + override def map[U](f: T => U): Try[U] = Try[U](f(value)) + override def collect[U](pf: PartialFunction[T, U]): Try[U] = try { - if (p(value)) this + if (pf isDefinedAt value) Success(pf(value)) else Failure(new NoSuchElementException("Predicate does not hold for " + value)) - } catch { - case NonFatal(e) => Failure(e) - } - } - def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = this - def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed")) + } catch { case NonFatal(e) => Failure(e) } + override def filter(p: T => Boolean): Try[T] = + try { + if (p(value)) this else Failure(new NoSuchElementException("Predicate does not hold for " + value)) + } catch { case NonFatal(e) => Failure(e) } + override def recover[U >: T](@deprecatedName('rescueException) pf: PartialFunction[Throwable, U]): Try[U] = this + override def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] = this + override def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed")) + override def toOption: Option[T] = Some(value) } diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 6743b9e42a33..5c4e706dc1c5 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -6,7 +6,6 @@ ** |/ ** \* */ - /** * This package is concerned with regular expression (regex) matching against strings, * with the main goal of pulling out information from those matches, or replacing @@ -28,117 +27,127 @@ * into a [[java.lang.String]]. * */ -package scala -package util.matching +package scala.util.matching import scala.collection.AbstractIterator import java.util.regex.{ Pattern, Matcher } -/** This class provides methods for creating and using regular expressions. - * It is based on the regular expressions of the JDK since 1.4. +/** A regular expression is used to determine whether a string matches a pattern + * and, if it does, to extract or transform the parts that match. * - * Its main goal is to extract strings that match a pattern, or the subgroups - * that make it up. For that reason, it is usually used with for comprehensions - * and matching (see methods for examples). + * This class delegates to the [[java.util.regex]] package of the Java Platform. + * See the documentation for [[java.util.regex.Pattern]] for details about + * the regular expression syntax for pattern strings. * - * A Regex is created from a [[java.lang.String]] representation of the - * regular expression pattern^1^. That pattern is compiled - * during construction, so frequently used patterns should be declared outside - * loops if performance is of concern. Possibly, they might be declared on a - * companion object, so that they need only to be initialized once. + * An instance of `Regex` represents a compiled regular expression pattern. + * Since compilation is expensive, frequently used `Regex`es should be constructed + * once, outside of loops and perhaps in a companion object. * - * The canonical way of creating regex patterns is by using the method `r`, provided - * on [[java.lang.String]] through an implicit conversion into - * [[scala.collection.immutable.WrappedString]]. Using triple quotes to write these - * strings avoids having to quote the backslash character (`\`). + * The canonical way to create a `Regex` is by using the method `r`, provided + * implicitly for strings: * - * Using the constructor directly, on the other hand, makes - * it possible to declare names for subgroups in the pattern. + * {{{ + * val date = """(\d\d\d\d)-(\d\d)-(\d\d)""".r + * }}} * - * For example, both declarations below generate the same regex, but the second - * one associate names with the subgroups. + * Since escapes are not processed in multi-line string literals, using triple quotes + * avoids having to escape the backslash character, so that `"\\d"` can be written `"""\d"""`. + * + * To extract the capturing groups when a `Regex` is matched, use it as + * an extractor in a pattern match: * * {{{ - * val dateP1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r - * val dateP2 = new scala.util.matching.Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day") + * "2004-01-20" match { + * case date(year, month, day) => s"$year was a good year for PLs." + * } * }}} * - * There are two ways of using a `Regex` to find a pattern: calling methods on - * Regex, such as `findFirstIn` or `findAllIn`, or using it as an extractor in a - * pattern match. + * To check only whether the `Regex` matches, ignoring any groups, + * use a sequence wildcard: + * + * {{{ + * "2004-01-20" match { + * case date(_*) => "It's a date!" + * } + * }}} * - * Note that, when calling `findAllIn`, the resulting [[scala.util.matching.Regex.MatchIterator]] - * needs to be initialized (by calling `hasNext` or `next()`, or causing these to be - * called) before information about a match can be retrieved: + * That works because a `Regex` extractor produces a sequence of strings. + * Extracting only the year from a date could also be expressed with + * a sequence wildcard: * * {{{ - * val msg = "I love Scala" + * "2004-01-20" match { + * case date(year, _*) => s"$year was a good year for PLs." + * } + * }}} * - * // val start = " ".r.findAllIn(msg).start // throws an IllegalStateException + * In a pattern match, `Regex` normally matches the entire input. + * However, an unanchored `Regex` finds the pattern anywhere + * in the input. * - * val matches = " ".r.findAllIn(msg) - * matches.hasNext // initializes the matcher - * val start = matches.start + * {{{ + * val embeddedDate = date.unanchored + * "Date: 2004-01-20 17:25:18 GMT (10 years, 28 weeks, 5 days, 17 hours and 51 minutes ago)" match { + * case embeddedDate("2004", "01", "20") => "A Scala is born." + * } * }}} * - * When Regex is used as an extractor in a pattern match, note that it - * only succeeds if the whole text can be matched. For this reason, one usually - * calls a method to find the matching substrings, and then use it as an extractor - * to break match into subgroups. + * To find or replace matches of the pattern, use the various find and replace methods. + * There is a flavor of each method that produces matched strings and + * another that produces `Match` objects. * - * As an example, the above patterns can be used like this: + * For example, pattern matching with an unanchored `Regex`, as in the previous example, + * is the same as using `findFirstMatchIn`, except that the findFirst methods return an `Option`, + * or `None` for no match: * * {{{ - * val dateP1(year, month, day) = "2011-07-15" + * val dates = "Important dates in history: 2004-01-20, 1958-09-05, 2010-10-06, 2011-07-15" + * val firstDate = date findFirstIn dates getOrElse "No date found." + * val firstYear = for (m <- date findFirstMatchIn dates) yield m group 1 + * }}} * - * // val dateP1(year, month, day) = "Date 2011-07-15" // throws an exception at runtime + * To find all matches: * - * val copyright: String = dateP1 findFirstIn "Date of this document: 2011-07-15" match { - * case Some(dateP1(year, month, day)) => "Copyright "+year - * case None => "No copyright" - * } + * {{{ + * val allYears = for (m <- date findAllMatchIn dates) yield m group 1 + * }}} * - * val copyright: Option[String] = for { - * dateP1(year, month, day) <- dateP1 findFirstIn "Last modified 2011-07-15" - * } yield year - - * def getYears(text: String): Iterator[String] = for (dateP1(year, _, _) <- dateP1 findAllIn text) yield year - * def getFirstDay(text: String): Option[String] = for (m <- dateP2 findFirstMatchIn text) yield m group "day" + * But `findAllIn` returns a special iterator of strings that can be queried for the `MatchData` + * of the last match: + * + * {{{ + * val mi = date findAllIn dates + * val oldies = mi filter (_ => (mi group 1).toInt < 1960) map (s => s"$s: An oldie but goodie.") * }}} * - * Regex does not provide a method that returns a [[scala.Boolean]]. One can - * use [[java.lang.String]] `matches` method, or, if `Regex` is preferred, - * either ignore the return value or test the `Option` for emptyness. For example: + * Note that `findAllIn` finds matches that don't overlap. (See [[findAllIn]] for more examples.) * * {{{ - * def hasDate(text: String): Boolean = (dateP1 findFirstIn text).nonEmpty - * def printLinesWithDates(lines: Traversable[String]) { - * lines foreach { line => - * dateP1 findFirstIn line foreach { _ => println(line) } - * } - * } + * val num = """(\d+)""".r + * val all = (num findAllIn "123").toList // List("123"), not List("123", "23", "3") * }}} * - * There are also methods that can be used to replace the patterns - * on a text. The substitutions can be simple replacements, or more - * complex functions. For example: + * Text replacement can be performed unconditionally or as a function of the current match: * * {{{ - * val months = Map( 1 -> "Jan", 2 -> "Feb", 3 -> "Mar", - * 4 -> "Apr", 5 -> "May", 6 -> "Jun", - * 7 -> "Jul", 8 -> "Aug", 9 -> "Sep", - * 10 -> "Oct", 11 -> "Nov", 12 -> "Dec") - * - * import scala.util.matching.Regex.Match - * def reformatDate(text: String) = dateP2 replaceAllIn ( text, (m: Match) => - * "%s %s, %s" format (months(m group "month" toInt), m group "day", m group "year") - * ) + * val redacted = date replaceAllIn (dates, "XXXX-XX-XX") + * val yearsOnly = date replaceAllIn (dates, m => m group 1) + * val months = (0 to 11) map { i => val c = Calendar.getInstance; c.set(2014, i, 1); f"$c%tb" } + * val reformatted = date replaceAllIn (dates, _ match { case date(y,m,d) => f"${months(m.toInt - 1)} $d, $y" }) * }}} * - * You can use special pattern syntax constructs like `(?idmsux-idmsux)`¹ to switch - * various regex compilation options like `CASE_INSENSITIVE` or `UNICODE_CASE`. + * Pattern matching the `Match` against the `Regex` that created it does not reapply the `Regex`. + * In the expression for `reformatted`, each `date` match is computed once. But it is possible to apply a + * `Regex` to a `Match` resulting from a different pattern: + * + * {{{ + * val docSpree = """2011(?:-\d{2}){2}""".r + * val docView = date replaceAllIn (dates, _ match { + * case docSpree() => "Historic doc spree!" + * case _ => "Something else happened" + * }) + * }}} * - * @note ¹ A detailed description is available in [[java.util.regex.Pattern]]. * @see [[java.util.regex.Pattern]] * * @author Thibaud Hottelier @@ -154,9 +163,8 @@ import java.util.regex.{ Pattern, Matcher } * interpreted as a reference to a group in the matched pattern, with numbers * 1 through 9 corresponding to the first nine groups, and 0 standing for the * whole match. Any other character is an error. The backslash (`\`) character - * will be interpreted as an escape character, and can be used to escape the - * dollar sign. One can use [[scala.util.matching.Regex]]'s `quoteReplacement` - * to automatically escape these characters. + * will be interpreted as an escape character and can be used to escape the + * dollar sign. Use `Regex.quoteReplacement` to escape these characters. */ @SerialVersionUID(-2094783597747625537L) class Regex private[matching](val pattern: Pattern, groupNames: String*) extends Serializable { @@ -164,51 +172,84 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends import Regex._ - /** - * @param regex A string representing a regular expression - * @param groupNames A mapping from names to indices in capture groups - */ + /** Compile a regular expression, supplied as a string, into a pattern that + * can be matched against inputs. + * + * If group names are supplied, they can be used this way: + * + * {{{ + * val namedDate = new Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day") + * val namedYears = for (m <- namedDate findAllMatchIn dates) yield m group "year" + * }}} + * + * This constructor does not support options as flags, which must be + * supplied as inline flags in the pattern string: `(?idmsux-idmsux)`. + * + * @param regex The regular expression to compile. + * @param groupNames Names of capturing groups. + */ def this(regex: String, groupNames: String*) = this(Pattern.compile(regex), groupNames: _*) /** Tries to match a [[java.lang.CharSequence]]. + * * If the match succeeds, the result is a list of the matching * groups (or a `null` element if a group did not match any input). * If the pattern specifies no groups, then the result will be an empty list * on a successful match. * * This method attempts to match the entire input by default; to find the next - * matching subsequence, use an unanchored Regex. - + * matching subsequence, use an unanchored `Regex`. + * * For example: * * {{{ * val p1 = "ab*c".r * val p1Matches = "abbbc" match { - * case p1() => true + * case p1() => true // no groups * case _ => false * } * val p2 = "a(b*)c".r + * val p2Matches = "abbbc" match { + * case p2(_*) => true // any groups + * case _ => false + * } * val numberOfB = "abbbc" match { - * case p2(b) => Some(b.length) + * case p2(b) => Some(b.length) // one group * case _ => None * } * val p3 = "b*".r.unanchored * val p3Matches = "abbbc" match { - * case p3() => true + * case p3() => true // find the b's * case _ => false * } + * val p4 = "a(b*)(c+)".r + * val p4Matches = "abbbcc" match { + * case p4(_*) => true // multiple groups + * case _ => false + * } + * val allGroups = "abbbcc" match { + * case p4(all @ _*) => all mkString "/" // "bbb/cc" + * case _ => "" + * } + * val cGroup = "abbbcc" match { + * case p4(_, c) => c + * case _ => "" + * } * }}} * * @param s The string to match * @return The matches */ - def unapplySeq(s: CharSequence): Option[List[String]] = { - val m = pattern matcher s - if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group) - else None + def unapplySeq(s: CharSequence): Option[List[String]] = s match { + case null => None + case _ => + val m = pattern matcher s + if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group) + else None } /** Tries to match the String representation of a [[scala.Char]]. + * * If the match succeeds, the result is the first matching * group if any groups are defined, or an empty Sequence otherwise. * @@ -247,13 +288,16 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends } /** Tries to match on a [[scala.util.matching.Regex.Match]]. + * * A previously failed match results in None. + * * If a successful match was made against the current pattern, then that result is used. + * * Otherwise, this Regex is applied to the previously matched input, * and the result of that match is used. */ def unapplySeq(m: Match): Option[List[String]] = - if (m.matched == null) None + if (m == null || m.matched == null) None else if (m.matcher.pattern == this.pattern) Some((1 to m.groupCount).toList map m.group) else unapplySeq(m.matched) @@ -274,25 +318,48 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends // @see UnanchoredRegex protected def runMatcher(m: Matcher) = m.matches() - /** Return all matches of this regexp in given character sequence as a [[scala.util.matching.Regex.MatchIterator]], + /** Return all non-overlapping matches of this `Regex` in the given character + * sequence as a [[scala.util.matching.Regex.MatchIterator]], * which is a special [[scala.collection.Iterator]] that returns the - * matched strings, but can also be converted into a normal iterator - * that returns objects of type [[scala.util.matching.Regex.Match]] - * that can be queried for data such as the text that precedes the - * match, subgroups, etc. + * matched strings but can also be queried for more data about the last match, + * such as capturing groups and start position. + * + * A `MatchIterator` can also be converted into an iterator + * that returns objects of type [[scala.util.matching.Regex.Match]], + * such as is normally returned by `findAllMatchIn`. + * + * Where potential matches overlap, the first possible match is returned, + * followed by the next match that follows the input consumed by the + * first match: * - * Attempting to retrieve information about a match before initializing - * the iterator can result in [[java.lang.IllegalStateException]]s. See - * [[scala.util.matching.Regex.MatchIterator]] for details. + * {{{ + * val hat = "hat[^a]+".r + * val hathaway = "hathatthattthatttt" + * val hats = (hat findAllIn hathaway).toList // List(hath, hattth) + * val pos = (hat findAllMatchIn hathaway map (_.start)).toList // List(0, 7) + * }}} + * + * To return overlapping matches, it is possible to formulate a regular expression + * with lookahead (`?=`) that does not consume the overlapping region. + * + * {{{ + * val madhatter = "(h)(?=(at[^a]+))".r + * val madhats = (madhatter findAllMatchIn hathaway map { + * case madhatter(x,y) => s"$x$y" + * }).toList // List(hath, hatth, hattth, hatttt) + * }}} + * + * Attempting to retrieve match information before performing the first match + * or after exhausting the iterator results in [[java.lang.IllegalStateException]]. + * See [[scala.util.matching.Regex.MatchIterator]] for details. * * @param source The text to match against. - * @return A [[scala.util.matching.Regex.MatchIterator]] of all matches. + * @return A [[scala.util.matching.Regex.MatchIterator]] of matched substrings. * @example {{{for (words <- """\w+""".r findAllIn "A simple example.") yield words}}} */ def findAllIn(source: CharSequence) = new Regex.MatchIterator(source, this, groupNames) - - /** Return all matches of this regexp in given character sequence as a + /** Return all non-overlapping matches of this regexp in given character sequence as a * [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]]. * * @param source The text to match against. @@ -310,8 +377,8 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends } } - /** Return optionally first matching string of this regexp in given character sequence, - * or None if it does not exist. + /** Return an optional first matching string of this `Regex` in the given character sequence, + * or None if there is no match. * * @param source The text to match against. * @return An [[scala.Option]] of the first matching string in the text. @@ -322,13 +389,11 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends if (m.find) Some(m.group) else None } - /** Return optionally first match of this regexp in given character sequence, + /** Return an optional first match of this `Regex` in the given character sequence, * or None if it does not exist. * - * The main difference between this method and `findFirstIn` is that the (optional) return - * type for this is [[scala.util.matching.Regex.Match]], through which more - * data can be obtained about the match, such as the strings that precede and follow it, - * or subgroups. + * If the match is successful, the [[scala.util.matching.Regex.Match]] can be queried for + * more data. * * @param source The text to match against. * @return A [[scala.Option]] of [[scala.util.matching.Regex.Match]] of the first matching string in the text. @@ -339,30 +404,28 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends if (m.find) Some(new Match(source, m, groupNames)) else None } - /** Return optionally match of this regexp at the beginning of the - * given character sequence, or None if regexp matches no prefix + /** Return an optional match of this `Regex` at the beginning of the + * given character sequence, or None if it matches no prefix * of the character sequence. * - * The main difference from this method to `findFirstIn` is that this - * method will not return any matches that do not begin at the start - * of the text being matched against. + * Unlike `findFirstIn`, this method will only return a match at + * the beginning of the input. * * @param source The text to match against. * @return A [[scala.Option]] of the matched prefix. - * @example {{{"""[a-z]""".r findPrefixOf "A simple example." // returns None, since the text does not begin with a lowercase letter}}} + * @example {{{"""\p{Lower}""".r findPrefixOf "A simple example." // returns None, since the text does not begin with a lowercase letter}}} */ def findPrefixOf(source: CharSequence): Option[String] = { val m = pattern.matcher(source) if (m.lookingAt) Some(m.group) else None } - /** Return optionally match of this regexp at the beginning of the - * given character sequence, or None if regexp matches no prefix + /** Return an optional match of this `Regex` at the beginning of the + * given character sequence, or None if it matches no prefix * of the character sequence. * - * The main difference from this method to `findFirstMatchIn` is that - * this method will not return any matches that do not begin at the - * start of the text being matched against. + * Unlike `findFirstMatchIn`, this method will only return a match at + * the beginning of the input. * * @param source The text to match against. * @return A [[scala.Option]] of the [[scala.util.matching.Regex.Match]] of the matched string. @@ -396,7 +459,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * import scala.util.matching.Regex * val datePattern = new Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day") * val text = "From 2011-07-15 to 2011-07-17" - * val repl = datePattern replaceAllIn (text, m => m.group("month")+"/"+m.group("day")) + * val repl = datePattern replaceAllIn (text, m => s"${m group "month"}/${m group "day"}") * }}} * * $replacementString @@ -419,10 +482,10 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * {{{ * import scala.util.matching.Regex._ * - * val map = Map("x" -> "a var", "y" -> """some $ and \ signs""") + * val vars = Map("x" -> "a var", "y" -> """some $ and \ signs""") * val text = "A text with variables %x, %y and %z." * val varPattern = """%(\w+)""".r - * val mapper = (m: Match) => map get (m group 1) map (quoteReplacement(_)) + * val mapper = (m: Match) => vars get (m group 1) map (quoteReplacement(_)) * val repl = varPattern replaceSomeIn (text, mapper) * }}} * @@ -463,17 +526,25 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends pattern.split(toSplit) /** Create a new Regex with the same pattern, but no requirement that - * the entire String matches in extractor patterns. For instance, the strings - * shown below lead to successful matches, where they would not otherwise. + * the entire String matches in extractor patterns. + * + * Normally, matching on `date` behaves as though the pattern were + * enclosed in anchors, `"^pattern$"`. + * + * The unanchored `Regex` behaves as though those anchors were removed. + * + * Note that this method does not actually strip any matchers from the pattern. + * + * Calling `anchored` returns the original `Regex`. * * {{{ - * val dateP1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r.unanchored + * val date = """(\d\d\d\d)-(\d\d)-(\d\d)""".r.unanchored * - * val dateP1(year, month, day) = "Date 2011-07-15" + * val date(year, month, day) = "Date 2011-07-15" // OK * * val copyright: String = "Date of this document: 2011-07-15" match { - * case dateP1(year, month, day) => "Copyright "+year - * case _ => "No copyright" + * case date(year, month, day) => s"Copyright $year" // OK + * case _ => "No copyright" * } * }}} * @@ -488,6 +559,10 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends override def toString = regex } +/** A [[Regex]] that finds the first match when used in a pattern match. + * + * @see [[Regex#unanchored]] + */ trait UnanchoredRegex extends Regex { override protected def runMatcher(m: Matcher) = m.find() override def unanchored = this @@ -503,70 +578,79 @@ object Regex { */ trait MatchData { - /** The source from where the match originated */ + /** The source from which the match originated */ val source: CharSequence - /** The names of the groups, or some empty sequence if one defined */ + /** The names of the groups, or an empty sequence if none defined */ val groupNames: Seq[String] - /** The number of subgroups in the pattern (not all of these need to match!) */ + /** The number of capturing groups in the pattern. + * (For a given successful match, some of those groups may not have matched any input.) + */ def groupCount: Int /** The index of the first matched character, or -1 if nothing was matched */ def start: Int /** The index of the first matched character in group `i`, - * or -1 if nothing was matched for that group */ + * or -1 if nothing was matched for that group. + */ def start(i: Int): Int - /** The index of the last matched character, or -1 if nothing was matched */ + /** The index following the last matched character, or -1 if nothing was matched. */ def end: Int /** The index following the last matched character in group `i`, - * or -1 if nothing was matched for that group */ + * or -1 if nothing was matched for that group. + */ def end(i: Int): Int - /** The matched string, or `null` if nothing was matched */ + /** The matched string, or `null` if nothing was matched. */ def matched: String = if (start >= 0) source.subSequence(start, end).toString else null /** The matched string in group `i`, - * or `null` if nothing was matched */ + * or `null` if nothing was matched. + */ def group(i: Int): String = if (start(i) >= 0) source.subSequence(start(i), end(i)).toString else null - /** All matched subgroups, i.e. not including group(0) */ + /** All capturing groups, i.e., not including group(0). */ def subgroups: List[String] = (1 to groupCount).toList map group /** The char sequence before first character of match, - * or `null` if nothing was matched */ + * or `null` if nothing was matched. + */ def before: CharSequence = if (start >= 0) source.subSequence(0, start) else null /** The char sequence before first character of match in group `i`, - * or `null` if nothing was matched for that group */ + * or `null` if nothing was matched for that group. + */ def before(i: Int): CharSequence = if (start(i) >= 0) source.subSequence(0, start(i)) else null /** Returns char sequence after last character of match, - * or `null` if nothing was matched */ + * or `null` if nothing was matched. + */ def after: CharSequence = if (end >= 0) source.subSequence(end, source.length) else null /** The char sequence after last character of match in group `i`, - * or `null` if nothing was matched for that group */ + * or `null` if nothing was matched for that group. + */ def after(i: Int): CharSequence = if (end(i) >= 0) source.subSequence(end(i), source.length) else null private lazy val nameToIndex: Map[String, Int] = Map[String, Int]() ++ ("" :: groupNames.toList).zipWithIndex - /** Returns the group with given name + /** Returns the group with given name. * * @param id The group name * @return The requested group @@ -577,24 +661,22 @@ object Regex { case Some(index) => group(index) } - /** The matched string; equivalent to `matched.toString` */ + /** The matched string; equivalent to `matched.toString`. */ override def toString = matched - } - /** Provides information about a succesful match. - */ + /** Provides information about a successful match. */ class Match(val source: CharSequence, private[matching] val matcher: Matcher, val groupNames: Seq[String]) extends MatchData { - /** The index of the first matched character */ + /** The index of the first matched character. */ val start = matcher.start - /** The index following the last matched character */ + /** The index following the last matched character. */ val end = matcher.end - /** The number of subgroups */ + /** The number of subgroups. */ def groupCount = matcher.groupCount private lazy val starts: Array[Int] = @@ -602,19 +684,19 @@ object Regex { private lazy val ends: Array[Int] = ((0 to groupCount) map matcher.end).toArray - /** The index of the first matched character in group `i` */ + /** The index of the first matched character in group `i`. */ def start(i: Int) = starts(i) - /** The index following the last matched character in group `i` */ + /** The index following the last matched character in group `i`. */ def end(i: Int) = ends(i) /** The match itself with matcher-dependent lazy vals forced, - * so that match is valid even once matcher is advanced + * so that match is valid even once matcher is advanced. */ def force: this.type = { starts; ends; this } } - /** An extractor object for Matches, yielding the matched string + /** An extractor object for Matches, yielding the matched string. * * This can be used to help writing replacer functions when you * are not interested in match data. For example: @@ -629,15 +711,15 @@ object Regex { def unapply(m: Match): Some[String] = Some(m.matched) } - /** An extractor object that yields the groups in the match. Using an extractor - * rather than the original regex avoids recomputing the match. + /** An extractor object that yields the groups in the match. Using this extractor + * rather than the original `Regex` ensures that the match is not recomputed. * * {{{ * import scala.util.matching.Regex.Groups * - * val datePattern = """(\d\d\d\d)-(\d\d)-(\d\d)""".r + * val date = """(\d\d\d\d)-(\d\d)-(\d\d)""".r * val text = "The doc spree happened on 2011-07-15." - * val day = datePattern replaceAllIn(text, _ match { case Groups(year, month, day) => month+"/"+day }) + * val day = date replaceAllIn(text, _ match { case Groups(_, month, day) => s"$month/$day" }) * }}} */ object Groups { @@ -666,7 +748,7 @@ object Regex { nextSeen } - /** The next matched substring of `source` */ + /** The next matched substring of `source`. */ def next(): String = { if (!hasNext) throw new NoSuchElementException nextSeen = false @@ -675,28 +757,28 @@ object Regex { override def toString = super[AbstractIterator].toString - /** The index of the first matched character */ + /** The index of the first matched character. */ def start: Int = matcher.start - /** The index of the first matched character in group `i` */ + /** The index of the first matched character in group `i`. */ def start(i: Int): Int = matcher.start(i) - /** The index of the last matched character */ + /** The index of the last matched character. */ def end: Int = matcher.end - /** The index following the last matched character in group `i` */ + /** The index following the last matched character in group `i`. */ def end(i: Int): Int = matcher.end(i) - /** The number of subgroups */ + /** The number of subgroups. */ def groupCount = matcher.groupCount - /** Convert to an iterator that yields MatchData elements instead of Strings */ + /** Convert to an iterator that yields MatchData elements instead of Strings. */ def matchData: Iterator[Match] = new AbstractIterator[Match] { def hasNext = self.hasNext def next = { self.next(); new Match(source, matcher, groupNames).force } } - /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support */ + /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support. */ private[matching] def replacementData = new AbstractIterator[Match] with Replacement { def matcher = self.matcher def hasNext = self.hasNext diff --git a/src/manual/scala/man1/Command.scala b/src/manual/scala/man1/Command.scala index 1cf55cb28d61..8f811f950e0d 100644 --- a/src/manual/scala/man1/Command.scala +++ b/src/manual/scala/man1/Command.scala @@ -47,7 +47,7 @@ trait Command { def copyright = Section("COPYRIGHT", "This is open-source software, available to you under a BSD-like license. " & - "See accomponying \"copyright\" or \"LICENSE\" file for copying conditions. " & + "See accompanying \"copyright\" or \"LICENSE\" file for copying conditions. " & "There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A " & "PARTICULAR PURPOSE.") diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala index 31d25d480181..3954ed588e01 100644 --- a/src/manual/scala/man1/scalac.scala +++ b/src/manual/scala/man1/scalac.scala @@ -360,7 +360,7 @@ object scalac extends Command { "ANF pre-transform for " & MItalic("@cps") & " (CPS plugin)"), Definition( MItalic("selectivecps"), - MItalic("@cps") & "-driven transform of selectiveanf assignements (CPS plugin)"), + MItalic("@cps") & "-driven transform of selectiveanf assignments (CPS plugin)"), Definition( MItalic("uncurry"), "uncurry, translate function values to anonymous classes"), diff --git a/src/partest-extras/scala/tools/partest/ASMConverters.scala b/src/partest-extras/scala/tools/partest/ASMConverters.scala index d618e086f428..67a4e8ae01b6 100644 --- a/src/partest-extras/scala/tools/partest/ASMConverters.scala +++ b/src/partest-extras/scala/tools/partest/ASMConverters.scala @@ -2,70 +2,216 @@ package scala.tools.partest import scala.collection.JavaConverters._ import scala.tools.asm -import asm.tree.{ClassNode, MethodNode, InsnList} +import asm.{tree => t} /** Makes using ASM from ByteCodeTests more convenient. * * Wraps ASM instructions in case classes so that equals and toString work * for the purpose of bytecode diffing and pretty printing. */ -trait ASMConverters { - // wrap ASM's instructions so we get case class-style `equals` and `toString` - object instructions { - def fromMethod(meth: MethodNode): List[Instruction] = { - val insns = meth.instructions - val asmToScala = new AsmToScala{ def labelIndex(l: asm.tree.AbstractInsnNode) = insns.indexOf(l) } - - asmToScala.mapOver(insns.iterator.asScala.toList).asInstanceOf[List[Instruction]] +object ASMConverters { + + /** + * Transform the instructions of an ASM Method into a list of [[Instruction]]s. + */ + def instructionsFromMethod(meth: t.MethodNode): List[Instruction] = new AsmToScala(meth).instructions + + def convertMethod(meth: t.MethodNode): Method = new AsmToScala(meth).method + + implicit class RichInstructionLists(val self: List[Instruction]) extends AnyVal { + def === (other: List[Instruction]) = equivalentBytecode(self, other) + + def dropLinesFrames = self.filterNot(i => i.isInstanceOf[LineNumber] || i.isInstanceOf[FrameEntry]) + + private def referencedLabels(instruction: Instruction): Set[Instruction] = instruction match { + case Jump(op, label) => Set(label) + case LookupSwitch(op, dflt, keys, labels) => (dflt :: labels).toSet + case TableSwitch(op, min, max, dflt, labels) => (dflt :: labels).toSet + case LineNumber(line, start) => Set(start) + case _ => Set.empty } - sealed abstract class Instruction { def opcode: String } - case class Field (opcode: String, desc: String, name: String, owner: String) extends Instruction - case class Incr (opcode: String, incr: Int, `var`: Int) extends Instruction - case class Op (opcode: String) extends Instruction - case class IntOp (opcode: String, operand: Int) extends Instruction - case class Jump (opcode: String, label: Label) extends Instruction - case class Ldc (opcode: String, cst: Any) extends Instruction - case class LookupSwitch (opcode: String, dflt: Label, keys: List[Integer], labels: List[Label]) extends Instruction - case class TableSwitch (opcode: String, dflt: Label, max: Int, min: Int, labels: List[Label]) extends Instruction - case class Method (opcode: String, desc: String, name: String, owner: String) extends Instruction - case class NewArray (opcode: String, desc: String, dims: Int) extends Instruction - case class TypeOp (opcode: String, desc: String) extends Instruction - case class VarOp (opcode: String, `var`: Int) extends Instruction - case class Label (offset: Int) extends Instruction { def opcode: String = "" } - case class FrameEntry (local: List[Any], stack: List[Any]) extends Instruction { def opcode: String = "" } - case class LineNumber (line: Int, start: Label) extends Instruction { def opcode: String = "" } + def dropStaleLabels = { + val definedLabels: Set[Instruction] = self.filter(_.isInstanceOf[Label]).toSet + val usedLabels: Set[Instruction] = self.flatMap(referencedLabels)(collection.breakOut) + self.filterNot(definedLabels diff usedLabels) + } + + def dropNonOp = dropLinesFrames.dropStaleLabels + } + + sealed abstract class Instruction extends Product { + def opcode: Int + + // toString such that the first field, "opcode: Int", is printed textually. + final override def toString() = { + import scala.tools.asm.util.Printer.OPCODES + def opString(op: Int) = if (OPCODES.isDefinedAt(op)) OPCODES(op) else "?" + val printOpcode = opcode != -1 + + productPrefix + ( + if (printOpcode) Iterator(opString(opcode)) ++ productIterator.drop(1) + else productIterator + ).mkString("(", ", ", ")") + } } - abstract class AsmToScala { - import instructions._ + case class Method(instructions: List[Instruction], handlers: List[ExceptionHandler], localVars: List[LocalVariable]) + + case class Field (opcode: Int, owner: String, name: String, desc: String) extends Instruction + case class Incr (opcode: Int, `var`: Int, incr: Int) extends Instruction + case class Op (opcode: Int) extends Instruction + case class IntOp (opcode: Int, operand: Int) extends Instruction + case class Jump (opcode: Int, label: Label) extends Instruction + case class Ldc (opcode: Int, cst: Any) extends Instruction + case class LookupSwitch(opcode: Int, dflt: Label, keys: List[Int], labels: List[Label]) extends Instruction + case class TableSwitch (opcode: Int, min: Int, max: Int, dflt: Label, labels: List[Label]) extends Instruction + case class Invoke (opcode: Int, owner: String, name: String, desc: String, itf: Boolean) extends Instruction + case class NewArray (opcode: Int, desc: String, dims: Int) extends Instruction + case class TypeOp (opcode: Int, desc: String) extends Instruction + case class VarOp (opcode: Int, `var`: Int) extends Instruction + case class Label (offset: Int) extends Instruction { def opcode: Int = -1 } + case class FrameEntry (`type`: Int, local: List[Any], stack: List[Any]) extends Instruction { def opcode: Int = -1 } + case class LineNumber (line: Int, start: Label) extends Instruction { def opcode: Int = -1 } + + case class ExceptionHandler(start: Label, end: Label, handler: Label, desc: Option[String]) + case class LocalVariable(name: String, desc: String, signature: Option[String], start: Label, end: Label, index: Int) + + class AsmToScala(asmMethod: t.MethodNode) { + + def instructions: List[Instruction] = asmMethod.instructions.iterator.asScala.toList map apply + + def method: Method = Method(instructions, convertHandlers(asmMethod), convertLocalVars(asmMethod)) - def labelIndex(l: asm.tree.AbstractInsnNode): Int + private def labelIndex(l: t.LabelNode): Int = asmMethod.instructions.indexOf(l) + + private def op(i: t.AbstractInsnNode): Int = i.getOpcode - def mapOver(is: List[Any]): List[Any] = is map { - case i: asm.tree.AbstractInsnNode => apply(i) + private def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList + + // Heterogenous List[Any] is used in FrameNode: type information about locals / stack values + // are stored in a List[Any] (Integer, String or LabelNode), see Javadoc of MethodNode#visitFrame. + // Opcodes (eg Opcodes.INTEGER) and Reference types (eg "java/lang/Object") are returned unchanged, + // LabelNodes are mapped to their LabelEntry. + private def mapOverFrameTypes(is: List[Any]): List[Any] = is map { + case i: t.LabelNode => applyLabel(i) case x => x } - def op(i: asm.tree.AbstractInsnNode) = if (asm.util.Printer.OPCODES.isDefinedAt(i.getOpcode)) asm.util.Printer.OPCODES(i.getOpcode) else "?" - def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList - def apply(l: asm.tree.LabelNode): Label = this(l: asm.tree.AbstractInsnNode).asInstanceOf[Label] - def apply(x: asm.tree.AbstractInsnNode): Instruction = x match { - case i: asm.tree.FieldInsnNode => Field (op(i), i.desc: String, i.name: String, i.owner: String) - case i: asm.tree.IincInsnNode => Incr (op(i), i.incr: Int, i.`var`: Int) - case i: asm.tree.InsnNode => Op (op(i)) - case i: asm.tree.IntInsnNode => IntOp (op(i), i.operand: Int) - case i: asm.tree.JumpInsnNode => Jump (op(i), this(i.label)) - case i: asm.tree.LdcInsnNode => Ldc (op(i), i.cst: Any) - case i: asm.tree.LookupSwitchInsnNode => LookupSwitch (op(i), this(i.dflt), lst(i.keys), mapOver(lst(i.labels)).asInstanceOf[List[Label]]) - case i: asm.tree.TableSwitchInsnNode => TableSwitch (op(i), this(i.dflt), i.max: Int, i.min: Int, mapOver(lst(i.labels)).asInstanceOf[List[Label]]) - case i: asm.tree.MethodInsnNode => Method (op(i), i.desc: String, i.name: String, i.owner: String) - case i: asm.tree.MultiANewArrayInsnNode => NewArray (op(i), i.desc: String, i.dims: Int) - case i: asm.tree.TypeInsnNode => TypeOp (op(i), i.desc: String) - case i: asm.tree.VarInsnNode => VarOp (op(i), i.`var`: Int) - case i: asm.tree.LabelNode => Label (labelIndex(x)) - case i: asm.tree.FrameNode => FrameEntry (mapOver(lst(i.local)), mapOver(lst(i.stack))) - case i: asm.tree.LineNumberNode => LineNumber (i.line: Int, this(i.start): Label) + // avoids some casts + private def applyLabel(l: t.LabelNode) = this(l: t.AbstractInsnNode).asInstanceOf[Label] + + private def apply(x: t.AbstractInsnNode): Instruction = x match { + case i: t.FieldInsnNode => Field (op(i), i.owner, i.name, i.desc) + case i: t.IincInsnNode => Incr (op(i), i.`var`, i.incr) + case i: t.InsnNode => Op (op(i)) + case i: t.IntInsnNode => IntOp (op(i), i.operand) + case i: t.JumpInsnNode => Jump (op(i), applyLabel(i.label)) + case i: t.LdcInsnNode => Ldc (op(i), i.cst: Any) + case i: t.LookupSwitchInsnNode => LookupSwitch (op(i), applyLabel(i.dflt), lst(i.keys) map (x => x: Int), lst(i.labels) map applyLabel) + case i: t.TableSwitchInsnNode => TableSwitch (op(i), i.min, i.max, applyLabel(i.dflt), lst(i.labels) map applyLabel) + case i: t.MethodInsnNode => Invoke (op(i), i.owner, i.name, i.desc, i.itf) + case i: t.MultiANewArrayInsnNode => NewArray (op(i), i.desc, i.dims) + case i: t.TypeInsnNode => TypeOp (op(i), i.desc) + case i: t.VarInsnNode => VarOp (op(i), i.`var`) + case i: t.LabelNode => Label (labelIndex(i)) + case i: t.FrameNode => FrameEntry (i.`type`, mapOverFrameTypes(lst(i.local)), mapOverFrameTypes(lst(i.stack))) + case i: t.LineNumberNode => LineNumber (i.line, applyLabel(i.start)) + } + + private def convertHandlers(method: t.MethodNode): List[ExceptionHandler] = { + method.tryCatchBlocks.asScala.map(h => ExceptionHandler(applyLabel(h.start), applyLabel(h.end), applyLabel(h.handler), Option(h.`type`)))(collection.breakOut) + } + + private def convertLocalVars(method: t.MethodNode): List[LocalVariable] = { + method.localVariables.asScala.map(v => LocalVariable(v.name, v.desc, Option(v.signature), applyLabel(v.start), applyLabel(v.end), v.index))(collection.breakOut) + } + } + + import collection.mutable.{Map => MMap} + + /** + * Bytecode is equal modula local variable numbering and label numbering. + */ + def equivalentBytecode(as: List[Instruction], bs: List[Instruction], varMap: MMap[Int, Int] = MMap(), labelMap: MMap[Int, Int] = MMap()): Boolean = { + def same(v1: Int, v2: Int, m: MMap[Int, Int]) = { + if (m contains v1) m(v1) == v2 + else if (m.valuesIterator contains v2) false // v2 is already associated with some different value v1 + else { m(v1) = v2; true } + } + def sameVar(v1: Int, v2: Int) = same(v1, v2, varMap) + def sameLabel(l1: Label, l2: Label) = same(l1.offset, l2.offset, labelMap) + def sameLabels(ls1: List[Label], ls2: List[Label]) = (ls1 corresponds ls2)(sameLabel) + + def sameFrameTypes(ts1: List[Any], ts2: List[Any]) = (ts1 corresponds ts2) { + case (t1: Label, t2: Label) => sameLabel(t1, t2) + case (x, y) => x == y + } + + if (as.isEmpty) bs.isEmpty + else if (bs.isEmpty) false + else ((as.head, bs.head) match { + case (VarOp(op1, v1), VarOp(op2, v2)) => op1 == op2 && sameVar(v1, v2) + case (Incr(op1, v1, inc1), Incr(op2, v2, inc2)) => op1 == op2 && sameVar(v1, v2) && inc1 == inc2 + + case (l1 @ Label(_), l2 @ Label(_)) => sameLabel(l1, l2) + case (Jump(op1, l1), Jump(op2, l2)) => op1 == op2 && sameLabel(l1, l2) + case (LookupSwitch(op1, l1, keys1, ls1), LookupSwitch(op2, l2, keys2, ls2)) => op1 == op2 && sameLabel(l1, l2) && keys1 == keys2 && sameLabels(ls1, ls2) + case (TableSwitch(op1, min1, max1, l1, ls1), TableSwitch(op2, min2, max2, l2, ls2)) => op1 == op2 && min1 == min2 && max1 == max2 && sameLabel(l1, l2) && sameLabels(ls1, ls2) + case (LineNumber(line1, l1), LineNumber(line2, l2)) => line1 == line2 && sameLabel(l1, l2) + case (FrameEntry(tp1, loc1, stk1), FrameEntry(tp2, loc2, stk2)) => tp1 == tp2 && sameFrameTypes(loc1, loc2) && sameFrameTypes(stk1, stk2) + + // this needs to go after the above. For example, Label(1) may not equal Label(1), if before + // the left 1 was associated with another right index. + case (a, b) if a == b => true + + case _ => false + }) && equivalentBytecode(as.tail, bs.tail, varMap, labelMap) + } + + def applyToMethod(method: t.MethodNode, instructions: List[Instruction]): Unit = { + val asmLabel = createLabelNodes(instructions) + instructions.foreach(visitMethod(method, _, asmLabel)) + } + + /** + * Convert back a [[Method]] to ASM land. The code is emitted into the parameter `asmMethod`. + */ + def applyToMethod(asmMethod: t.MethodNode, method: Method): Unit = { + val asmLabel = createLabelNodes(method.instructions) + method.instructions.foreach(visitMethod(asmMethod, _, asmLabel)) + method.handlers.foreach(h => asmMethod.visitTryCatchBlock(asmLabel(h.start), asmLabel(h.end), asmLabel(h.handler), h.desc.orNull)) + method.localVars.foreach(v => asmMethod.visitLocalVariable(v.name, v.desc, v.signature.orNull, asmLabel(v.start), asmLabel(v.end), v.index)) + } + + private def createLabelNodes(instructions: List[Instruction]): Map[Label, asm.Label] = { + val labels = instructions collect { + case l: Label => l } + assert(labels.distinct == labels, s"Duplicate labels in: $labels") + labels.map(l => (l, new asm.Label())).toMap + } + + private def frameTypesToAsm(l: List[Any], asmLabel: Map[Label, asm.Label]): List[Object] = l map { + case l: Label => asmLabel(l) + case x => x.asInstanceOf[Object] + } + + private def visitMethod(method: t.MethodNode, instruction: Instruction, asmLabel: Map[Label, asm.Label]): Unit = instruction match { + case Field(op, owner, name, desc) => method.visitFieldInsn(op, owner, name, desc) + case Incr(op, vr, incr) => method.visitIincInsn(vr, incr) + case Op(op) => method.visitInsn(op) + case IntOp(op, operand) => method.visitIntInsn(op, operand) + case Jump(op, label) => method.visitJumpInsn(op, asmLabel(label)) + case Ldc(op, cst) => method.visitLdcInsn(cst) + case LookupSwitch(op, dflt, keys, labels) => method.visitLookupSwitchInsn(asmLabel(dflt), keys.toArray, (labels map asmLabel).toArray) + case TableSwitch(op, min, max, dflt, labels) => method.visitTableSwitchInsn(min, max, asmLabel(dflt), (labels map asmLabel).toArray: _*) + case Invoke(op, owner, name, desc, itf) => method.visitMethodInsn(op, owner, name, desc, itf) + case NewArray(op, desc, dims) => method.visitMultiANewArrayInsn(desc, dims) + case TypeOp(op, desc) => method.visitTypeInsn(op, desc) + case VarOp(op, vr) => method.visitVarInsn(op, vr) + case l: Label => method.visitLabel(asmLabel(l)) + case FrameEntry(tp, local, stack) => method.visitFrame(tp, local.length, frameTypesToAsm(local, asmLabel).toArray, stack.length, frameTypesToAsm(stack, asmLabel).toArray) + case LineNumber(line, start) => method.visitLineNumber(line, asmLabel(start)) } -} \ No newline at end of file +} diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala index 7650a892fd7f..8459419fa5d6 100644 --- a/src/partest-extras/scala/tools/partest/BytecodeTest.scala +++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala @@ -3,7 +3,7 @@ package scala.tools.partest import scala.tools.nsc.util.JavaClassPath import scala.collection.JavaConverters._ import scala.tools.asm.{ClassWriter, ClassReader} -import scala.tools.asm.tree.{ClassNode, MethodNode, InsnList} +import scala.tools.asm.tree._ import java.io.{FileOutputStream, FileInputStream, File => JFile, InputStream} import AsmNode._ @@ -28,18 +28,18 @@ import AsmNode._ * See test/files/jvm/bytecode-test-example for an example of bytecode test. * */ -abstract class BytecodeTest extends ASMConverters { - import instructions._ +abstract class BytecodeTest { + import ASMConverters._ /** produce the output to be compared against a checkfile */ protected def show(): Unit - def main(args: Array[String]): Unit = show + def main(args: Array[String]): Unit = show() // asserts def sameBytecode(methA: MethodNode, methB: MethodNode) = { - val isa = instructions.fromMethod(methA) - val isb = instructions.fromMethod(methB) + val isa = instructionsFromMethod(methA) + val isb = instructionsFromMethod(methB) if (isa == isb) println("bytecode identical") else diffInstructions(isa, isb) } @@ -81,18 +81,16 @@ abstract class BytecodeTest extends ASMConverters { } } - // bytecode is equal modulo local variable numbering - def equalsModuloVar(a: Instruction, b: Instruction) = (a, b) match { - case _ if a == b => true - case (VarOp(op1, _), VarOp(op2, _)) if op1 == op2 => true - case _ => false - } - - def similarBytecode(methA: MethodNode, methB: MethodNode, similar: (Instruction, Instruction) => Boolean) = { - val isa = fromMethod(methA) - val isb = fromMethod(methB) + /** + * Compare the bytecodes of two methods. + * + * For the `similar` function, you probably want to pass [[ASMConverters.equivalentBytecode]]. + */ + def similarBytecode(methA: MethodNode, methB: MethodNode, similar: (List[Instruction], List[Instruction]) => Boolean) = { + val isa = instructionsFromMethod(methA) + val isb = instructionsFromMethod(methB) if (isa == isb) println("bytecode identical") - else if ((isa, isb).zipped.forall { case (a, b) => similar(a, b) }) println("bytecode similar") + else if (similar(isa, isb)) println("bytecode similar") else diffInstructions(isa, isb) } @@ -118,10 +116,8 @@ abstract class BytecodeTest extends ASMConverters { sys.error(s"Didn't find method '$name' in class '${classNode.name}'") protected def loadClassNode(name: String, skipDebugInfo: Boolean = true): ClassNode = { - val classBytes: InputStream = (for { - classRep <- classpath.findClass(name) - binary <- classRep.binary - } yield binary.input) getOrElse sys.error(s"failed to load class '$name'; classpath = $classpath") + val classBytes: InputStream = classpath.findClassFile(name).map(_.input) + .getOrElse(sys.error(s"failed to load class '$name'; classpath = $classpath")) val cr = new ClassReader(classBytes) val cn = new ClassNode() @@ -140,7 +136,7 @@ abstract class BytecodeTest extends ASMConverters { object BytecodeTest { /** Parse `file` as a class file, transforms the ASM representation with `f`, - * and overwrites the orginal file. + * and overwrites the original file. */ def modifyClassFile(file: JFile)(f: ClassNode => ClassNode) { val rfile = new reflect.io.File(file) diff --git a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java index d6b62e1d9ea0..848103f5ccf8 100644 --- a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java +++ b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java @@ -12,7 +12,7 @@ * A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting * call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class. * - * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only + * WARNING: This class is INTERNAL implementation detail and should never be used directly. It's made public only * because it must be universally accessible for instrumentation needs. If you want to profile your test use * {@link Instrumentation} instead. */ diff --git a/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java index b1b100fbb06e..d97756c17142 100644 --- a/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java +++ b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java @@ -50,7 +50,7 @@ public MethodVisitor visitMethod(int access, String name, String desc, String si mv.visitLdcInsn(name); mv.visitLdcInsn(desc); mv.visitMethodInsn(INVOKESTATIC, profilerClass, "methodCalled", - "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); + "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V", false); } } return mv; diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala index e73c5ffa91ec..fbcf7f3e4fb9 100644 --- a/src/reflect/scala/reflect/api/Constants.scala +++ b/src/reflect/scala/reflect/api/Constants.scala @@ -60,7 +60,7 @@ package api * * object Test extends App { * val jann = typeOf[JavaAnnottee].typeSymbol.annotations(0).javaArgs - * def jarg(name: String) = jann(newTermName(name)).asInstanceOf[LiteralArgument].value + * def jarg(name: String) = jann(TermName(name)).asInstanceOf[LiteralArgument].value * * val classRef = jarg("classRef").typeValue * println(showRaw(classRef)) // TypeRef(ThisType(), JavaAnnottee, List()) @@ -150,7 +150,7 @@ trait Constants { * * object Test extends App { * val jann = typeOf[JavaAnnottee].typeSymbol.annotations(0).javaArgs - * def jarg(name: String) = jann(newTermName(name)) match { + * def jarg(name: String) = jann(TermName(name)) match { * // Constant is always wrapped into a Literal or LiteralArgument tree node * case LiteralArgument(ct: Constant) => value * case _ => sys.error("Not a constant") diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala index 5b6ff2325c9e..ad03718898d7 100644 --- a/src/reflect/scala/reflect/api/Exprs.scala +++ b/src/reflect/scala/reflect/api/Exprs.scala @@ -9,6 +9,7 @@ package api import scala.reflect.runtime.{universe => ru} import scala.annotation.compileTimeOnly +import java.io.ObjectStreamException /** * EXPERIMENTAL @@ -83,7 +84,7 @@ trait Exprs { self: Universe => * * It is equivalent to * {{{ - * Select( expr.tree, newTermName("foo") ) + * Select( expr.tree, TermName("foo") ) * }}} * * The following example code however does not compile @@ -157,23 +158,23 @@ trait Exprs { self: Universe => |if you want to get a value of the underlying expression, add scala-compiler.jar to the classpath, |import `scala.tools.reflect.Eval` and call `.eval` instead.""".trim.stripMargin) + @throws(classOf[ObjectStreamException]) private def writeReplace(): AnyRef = new SerializedExpr(treec, implicitly[WeakTypeTag[T]].in(ru.rootMirror)) } } +@SerialVersionUID(1L) private[scala] class SerializedExpr(var treec: TreeCreator, var tag: ru.WeakTypeTag[_]) extends Serializable { - private def writeObject(out: java.io.ObjectOutputStream): Unit = { - out.writeObject(treec) - out.writeObject(tag) - } - - private def readObject(in: java.io.ObjectInputStream): Unit = { - treec = in.readObject().asInstanceOf[TreeCreator] - tag = in.readObject().asInstanceOf[ru.WeakTypeTag[_]] - } + import scala.reflect.runtime.universe.{Expr, runtimeMirror} + @throws(classOf[ObjectStreamException]) private def readResolve(): AnyRef = { - import ru._ - Expr(rootMirror, treec)(tag) + val loader: ClassLoader = try { + Thread.currentThread().getContextClassLoader() + } catch { + case se: SecurityException => null + } + val m = runtimeMirror(loader) + Expr(m, treec)(tag.in(m)) } } diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala index bf4d6353dffa..bcad84a3f0d8 100644 --- a/src/reflect/scala/reflect/api/FlagSets.scala +++ b/src/reflect/scala/reflect/api/FlagSets.scala @@ -20,20 +20,20 @@ import scala.language.implicitConversions * * For example, to create a class named `C` one would write something like: * {{{ - * ClassDef(Modifiers(NoFlags), newTypeName("C"), Nil, ...) + * ClassDef(Modifiers(NoFlags), TypeName("C"), Nil, ...) * }}} * * Here, the flag set is empty. * * To make `C` private, one would write something like: * {{{ - * ClassDef(Modifiers(PRIVATE), newTypeName("C"), Nil, ...) + * ClassDef(Modifiers(PRIVATE), TypeName("C"), Nil, ...) * }}} * * Flags can also be combined with the vertical bar operator (`|`). * For example, a private final class is written something like: * {{{ - * ClassDef(Modifiers(PRIVATE | FINAL), newTypeName("C"), Nil, ...) + * ClassDef(Modifiers(PRIVATE | FINAL), TypeName("C"), Nil, ...) * }}} * * The list of all available flags is defined in [[scala.reflect.api.FlagSets#FlagValues]], available via diff --git a/src/reflect/scala/reflect/api/Internals.scala b/src/reflect/scala/reflect/api/Internals.scala index 01f928ed61da..577cd0929532 100644 --- a/src/reflect/scala/reflect/api/Internals.scala +++ b/src/reflect/scala/reflect/api/Internals.scala @@ -600,10 +600,11 @@ trait Internals { self: Universe => } val SyntacticTypeApplied: SyntacticTypeAppliedExtractor + val SyntacticAppliedType: SyntacticTypeAppliedExtractor trait SyntacticTypeAppliedExtractor { def apply(tree: Tree, targs: List[Tree]): Tree - def unapply(tree: Tree): Some[(Tree, List[Tree])] + def unapply(tree: Tree): Option[(Tree, List[Tree])] } val SyntacticApplied: SyntacticAppliedExtractor @@ -761,9 +762,15 @@ trait Internals { self: Universe => def unapply(lst: List[List[Tree]]): Option[List[List[T]]] } + val SyntacticPartialFunction: SyntacticPartialFunctionExtractor + trait SyntacticPartialFunctionExtractor { + def apply(cases: List[Tree]): Match + def unapply(tree: Tree): Option[List[CaseDef]] + } + val SyntacticMatch: SyntacticMatchExtractor trait SyntacticMatchExtractor { - def apply(selector: Tree, cases: List[Tree]): Match + def apply(scrutinee: Tree, cases: List[Tree]): Match def unapply(tree: Match): Option[(Tree, List[CaseDef])] } @@ -773,10 +780,16 @@ trait Internals { self: Universe => def unapply(tree: Try): Option[(Tree, List[CaseDef], Tree)] } - val SyntacticIdent: SyntacticIdentExtractor - trait SyntacticIdentExtractor { - def apply(name: Name, isBackquoted: Boolean = false): Ident - def unapply(tree: Ident): Option[(Name, Boolean)] + val SyntacticTermIdent: SyntacticTermIdentExtractor + trait SyntacticTermIdentExtractor { + def apply(name: TermName, isBackquoted: Boolean = false): Ident + def unapply(id: Ident): Option[(TermName, Boolean)] + } + + val SyntacticTypeIdent: SyntacticTypeIdentExtractor + trait SyntacticTypeIdentExtractor { + def apply(name: TypeName): Ident + def unapply(tree: Tree): Option[TypeName] } val SyntacticImport: SyntacticImportExtractor @@ -784,6 +797,48 @@ trait Internals { self: Universe => def apply(expr: Tree, selectors: List[Tree]): Import def unapply(imp: Import): Some[(Tree, List[Tree])] } + + val SyntacticSelectType: SyntacticSelectTypeExtractor + trait SyntacticSelectTypeExtractor { + def apply(qual: Tree, name: TypeName): Select + def unapply(tree: Tree): Option[(Tree, TypeName)] + } + + val SyntacticSelectTerm: SyntacticSelectTermExtractor + trait SyntacticSelectTermExtractor { + def apply(qual: Tree, name: TermName): Select + def unapply(tree: Tree): Option[(Tree, TermName)] + } + + val SyntacticCompoundType: SyntacticCompoundTypeExtractor + trait SyntacticCompoundTypeExtractor { + def apply(parents: List[Tree], defns: List[Tree]): CompoundTypeTree + def unapply(tree: Tree): Option[(List[Tree], List[Tree])] + } + + val SyntacticSingletonType: SyntacitcSingletonTypeExtractor + trait SyntacitcSingletonTypeExtractor { + def apply(tree: Tree): SingletonTypeTree + def unapply(tree: Tree): Option[Tree] + } + + val SyntacticTypeProjection: SyntacticTypeProjectionExtractor + trait SyntacticTypeProjectionExtractor { + def apply(qual: Tree, name: TypeName): SelectFromTypeTree + def unapply(tree: Tree): Option[(Tree, TypeName)] + } + + val SyntacticAnnotatedType: SyntacticAnnotatedTypeExtractor + trait SyntacticAnnotatedTypeExtractor { + def apply(tpt: Tree, annot: Tree): Annotated + def unapply(tree: Tree): Option[(Tree, Tree)] + } + + val SyntacticExistentialType: SyntacticExistentialTypeExtractor + trait SyntacticExistentialTypeExtractor { + def apply(tpt: Tree, where: List[Tree]): ExistentialTypeTree + def unapply(tree: Tree): Option[(Tree, List[MemberDef])] + } } @deprecated("Use `internal.reificationSupport` instead", "2.11.0") diff --git a/src/reflect/scala/reflect/api/Liftables.scala b/src/reflect/scala/reflect/api/Liftables.scala index ec9d85b69eb1..c6352905d1c6 100644 --- a/src/reflect/scala/reflect/api/Liftables.scala +++ b/src/reflect/scala/reflect/api/Liftables.scala @@ -6,7 +6,7 @@ trait Liftables { self: Universe => /** A type class that defines a representation of `T` as a `Tree`. * - * @see [[http://docs.scala-lang.org/overviews/macros/quasiquotes.html#lifting]] + * @see [[http://docs.scala-lang.org/overviews/quasiquotes/lifting.html]] */ trait Liftable[T] { def apply(value: T): Tree @@ -32,7 +32,7 @@ trait Liftables { self: Universe => * lifted: universe.Tree = O * }}} * - * @see [[http://docs.scala-lang.org/overviews/macros/quasiquotes.html#lifting]] + * @see [[http://docs.scala-lang.org/overviews/quasiquotes/lifting.html]] */ def apply[T](f: T => Tree): Liftable[T] = new Liftable[T] { def apply(value: T): Tree = f(value) } @@ -40,7 +40,7 @@ trait Liftables { self: Universe => /** A type class that defines a way to extract instance of `T` from a `Tree`. * - * @see [[http://docs.scala-lang.org/overviews/macros/quasiquotes.html#unlifting]] + * @see [[http://docs.scala-lang.org/overviews/quasiquotes/unlifting.html]] */ trait Unliftable[T] { def unapply(tree: Tree): Option[T] @@ -52,7 +52,7 @@ trait Liftables { self: Universe => object Unliftable extends StandardUnliftableInstances { /** A helper method that simplifies creation of `Unliftable` instances. * Takes a partial function which is defined on correct representations of `T` - * and returns corresponing instances. + * and returns corresponding instances. * * For example to extract a reference to an object as object itself: * @@ -66,7 +66,7 @@ trait Liftables { self: Universe => * scala> val q"${_: O.type}" = q"$Oref" * }}} * - * @see [[http://docs.scala-lang.org/overviews/macros/quasiquotes.html#unlifting]] + * @see [[http://docs.scala-lang.org/overviews/quasiquotes/unlifting.html]] */ def apply[T](pf: PartialFunction[Tree, T]): Unliftable[T] = new Unliftable[T] { def unapply(value: Tree): Option[T] = pf.lift(value) diff --git a/src/reflect/scala/reflect/api/Mirror.scala b/src/reflect/scala/reflect/api/Mirror.scala index da3afd89ff48..96aab48e75eb 100644 --- a/src/reflect/scala/reflect/api/Mirror.scala +++ b/src/reflect/scala/reflect/api/Mirror.scala @@ -58,7 +58,7 @@ abstract class Mirror[U <: Universe with Singleton] { * scala> cm.staticPackage("scala") * res2: scala.reflect.runtime.universe.ModuleSymbol = package scala * - * scala> res2.moduleClass.info member newTypeName("List") + * scala> res2.moduleClass.info member TypeName("List") * res3: scala.reflect.runtime.universe.Symbol = type List * * scala> res3.fullName @@ -118,4 +118,22 @@ abstract class Mirror[U <: Universe with Singleton] { * @group Mirror */ def staticPackage(fullName: String): U#ModuleSymbol + + /** + * Shortcut for `implicitly[WeakTypeTag[T]].tpe` + * @group TypeTags + */ + def weakTypeOf[T: universe.WeakTypeTag]: U#Type = universe.weakTypeTag[T].in(this).tpe + + /** + * Shortcut for `implicitly[TypeTag[T]].tpe` + * @group TypeTags + */ + def typeOf[T: universe.TypeTag]: U#Type = universe.typeTag[T].in(this).tpe + + /** + * Type symbol of `x` as derived from a type tag. + * @group TypeTags + */ + def symbolOf[T: universe.WeakTypeTag]: U#TypeSymbol } diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala index ec420d184cf0..adaf829b32f0 100644 --- a/src/reflect/scala/reflect/api/Mirrors.scala +++ b/src/reflect/scala/reflect/api/Mirrors.scala @@ -292,7 +292,7 @@ trait Mirrors { self: Universe => * that can be used to create instances of the class, inspect its companion object or perform further reflections. * * To get a class symbol by the name of the class you would like to reflect, - * use `.symbol.info.member(newTypeName()).asClass`. + * use `.symbol.info.member(TypeName()).asClass`. * For further information about member lookup refer to `Symbol.info`. * * The input symbol can be either private or non-private (Scala reflection transparently deals with visibility). @@ -338,7 +338,7 @@ trait Mirrors { self: Universe => * with getting a field or invoking a getter method of the field. * * If `symbol` represents a field of a base class with respect to the class of the receiver, - * and this base field is overriden in the class of the receiver, then this method will retrieve + * and this base field is overridden in the class of the receiver, then this method will retrieve * the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor. */ def get: Any @@ -352,7 +352,7 @@ trait Mirrors { self: Universe => * with setting a field or invoking a setter method of the field. * * If `symbol` represents a field of a base class with respect to the class of the receiver, - * and this base field is overriden in the class of the receiver, then this method will set + * and this base field is overridden in the class of the receiver, then this method will set * the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor. */ def set(value: Any): Unit diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala index fe5f47c25db2..472da60338df 100644 --- a/src/reflect/scala/reflect/api/Names.scala +++ b/src/reflect/scala/reflect/api/Names.scala @@ -17,11 +17,11 @@ import scala.language.implicitConversions * To search for the `map` method (which is a term) declared in the `List` class, one can do: * * {{{ - * scala> typeOf[List[_]].member(newTermName("map")) + * scala> typeOf[List[_]].member(TermName("map")) * res0: reflect.runtime.universe.Symbol = method map * }}} * - * To search for a type member, one can follow the same procedure, using `newTypeName` instead. + * To search for a type member, one can follow the same procedure, using `TypeName` instead. * * For more information about creating and using `Name`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]] * @@ -30,14 +30,14 @@ import scala.language.implicitConversions */ trait Names { /** An implicit conversion from String to TermName. - * Enables an alternative notation `"map": TermName` as opposed to `newTermName("map")`. + * Enables an alternative notation `"map": TermName` as opposed to `TermName("map")`. * @group Names */ @deprecated("Use explicit `TermName(s)` instead", "2.11.0") implicit def stringToTermName(s: String): TermName = TermName(s) /** An implicit conversion from String to TypeName. - * Enables an alternative notation `"List": TypeName` as opposed to `newTypeName("List")`. + * Enables an alternative notation `"List": TypeName` as opposed to `TypeName("List")`. * @group Names */ @deprecated("Use explicit `TypeName(s)` instead", "2.11.0") @@ -72,10 +72,10 @@ trait Names { * @group API */ abstract class NameApi { - /** Checks wether the name is a term name */ + /** Checks whether the name is a term name */ def isTermName: Boolean - /** Checks wether the name is a type name */ + /** Checks whether the name is a type name */ def isTypeName: Boolean /** Returns a term name that wraps the same string as `this` */ diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala index 92ae6d8b4436..01b9759c7002 100644 --- a/src/reflect/scala/reflect/api/Printers.scala +++ b/src/reflect/scala/reflect/api/Printers.scala @@ -46,15 +46,15 @@ import java.io.{ PrintWriter, StringWriter } * {{{ * scala> showRaw(tree) * res1: String = Block(List( - * ClassDef(Modifiers(FINAL), newTypeName("C"), List(), Template( - * List(Ident(newTypeName("AnyRef"))), + * ClassDef(Modifiers(FINAL), TypeName("C"), List(), Template( + * List(Ident(TypeName("AnyRef"))), * noSelfType, * List( * DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), * Block(List( * Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), * Literal(Constant(())))), - * DefDef(Modifiers(), newTermName("x"), List(), List(), TypeTree(), + * DefDef(Modifiers(), TermName("x"), List(), List(), TypeTree(), * Literal(Constant(2))))))), * Literal(Constant(()))) * }}} @@ -70,23 +70,23 @@ import java.io.{ PrintWriter, StringWriter } * * scala> showRaw(cm.mkToolBox().typecheck(tree), printTypes = true) * res2: String = Block[1](List( - * ClassDef[2](Modifiers(FINAL), newTypeName("C"), List(), Template[3]( - * List(Ident[4](newTypeName("AnyRef"))), + * ClassDef[2](Modifiers(FINAL), TypeName("C"), List(), Template[3]( + * List(Ident[4](TypeName("AnyRef"))), * noSelfType, * List( * DefDef[2](Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree[3](), * Block[1](List( - * Apply[4](Select[5](Super[6](This[3](newTypeName("C")), tpnme.EMPTY), ...))), + * Apply[4](Select[5](Super[6](This[3](TypeName("C")), tpnme.EMPTY), ...))), * Literal[1](Constant(())))), - * DefDef[2](Modifiers(), newTermName("x"), List(), List(), TypeTree[7](), + * DefDef[2](Modifiers(), TermName("x"), List(), List(), TypeTree[7](), * Literal[8](Constant(2))))))), * Literal[1](Constant(()))) * [1] TypeRef(ThisType(scala), scala.Unit, List()) * [2] NoType - * [3] TypeRef(NoPrefix, newTypeName("C"), List()) + * [3] TypeRef(NoPrefix, TypeName("C"), List()) * [4] TypeRef(ThisType(java.lang), java.lang.Object, List()) * [5] MethodType(List(), TypeRef(ThisType(java.lang), java.lang.Object, List())) - * [6] SuperType(ThisType(newTypeName("C")), TypeRef(... java.lang.Object ...)) + * [6] SuperType(ThisType(TypeName("C")), TypeRef(... java.lang.Object ...)) * [7] TypeRef(ThisType(scala), scala.Int, List()) * [8] ConstantType(Constant(2)) * }}} @@ -112,10 +112,10 @@ import java.io.{ PrintWriter, StringWriter } * // showRaw has already been discussed above * scala> showRaw(tpe) * res1: String = RefinedType( - * List(TypeRef(ThisType(scala), newTypeName("AnyRef"), List())), + * List(TypeRef(ThisType(scala), TypeName("AnyRef"), List())), * Scope( - * newTermName("x"), - * newTermName("y"))) + * TermName("x"), + * TermName("y"))) * }}} * * `printIds` and/or `printKinds` can additionally be supplied as arguments in a call to @@ -124,10 +124,10 @@ import java.io.{ PrintWriter, StringWriter } * {{{ * scala> showRaw(tpe, printIds = true, printKinds = true) * res2: String = RefinedType( - * List(TypeRef(ThisType(scala#2043#PK), newTypeName("AnyRef")#691#TPE, List())), + * List(TypeRef(ThisType(scala#2043#PK), TypeName("AnyRef")#691#TPE, List())), * Scope( - * newTermName("x")#2540#METH, - * newTermName("y")#2541#GET)) + * TermName("x")#2540#METH, + * TermName("y")#2541#GET)) * }}} * * For more details about `Printer`s and other aspects of Scala reflection, see the diff --git a/src/reflect/scala/reflect/api/Quasiquotes.scala b/src/reflect/scala/reflect/api/Quasiquotes.scala index 0065926e3b39..eaae05bed541 100644 --- a/src/reflect/scala/reflect/api/Quasiquotes.scala +++ b/src/reflect/scala/reflect/api/Quasiquotes.scala @@ -7,13 +7,13 @@ trait Quasiquotes { self: Universe => * that are also known as quasiquotes. With their help you can easily manipulate * Scala reflection ASTs. * - * @see [[http://docs.scala-lang.org/overviews/macros/quasiquotes.html]] + * @see [[http://docs.scala-lang.org/overviews/quasiquotes/intro.html]] */ implicit class Quasiquote(ctx: StringContext) { protected trait api { // implementation is hardwired to `dispatch` method of `scala.tools.reflect.quasiquotes.Quasiquotes` // using the mechanism implemented in `scala.tools.reflect.FastTrack` - def apply[T](args: T*): Tree = macro ??? + def apply[A >: Any](args: A*): Tree = macro ??? def unapply(scrutinee: Any): Any = macro ??? } object q extends api diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala index 524b7ea14b6a..bf9cf5e3341d 100644 --- a/src/reflect/scala/reflect/api/StandardDefinitions.scala +++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala @@ -128,7 +128,7 @@ trait StandardDefinitions { * scala> import scala.reflect.runtime.universe._ * import scala.reflect.runtime.universe._ * - * scala> val m = typeOf[C].member(newTermName("m")).asMethod + * scala> val m = typeOf[C].member(TermName("m")).asMethod * m: reflect.runtime.universe.MethodSymbol = method m * * scala> m.params(0)(0).info @@ -156,7 +156,7 @@ trait StandardDefinitions { * scala> import scala.reflect.runtime.universe._ * import scala.reflect.runtime.universe._ * - * scala> val m = typeOf[C].member(newTermName("m")).asMethod + * scala> val m = typeOf[C].member(TermName("m")).asMethod * m: reflect.runtime.universe.MethodSymbol = method m * * scala> m.params(0)(0).info @@ -181,7 +181,7 @@ trait StandardDefinitions { * scala> import scala.reflect.runtime.universe._ * import scala.reflect.runtime.universe._ * - * scala> val m = typeOf[C].member(newTermName("m")).asMethod + * scala> val m = typeOf[C].member(TermName("m")).asMethod * m: reflect.runtime.universe.MethodSymbol = method m * * scala> m.params(0)(0).info diff --git a/src/reflect/scala/reflect/api/StandardLiftables.scala b/src/reflect/scala/reflect/api/StandardLiftables.scala index af11de46cefe..66ac62cc9e6e 100644 --- a/src/reflect/scala/reflect/api/StandardLiftables.scala +++ b/src/reflect/scala/reflect/api/StandardLiftables.scala @@ -27,6 +27,7 @@ trait StandardLiftables { self: Universe => callScala(stdnme.Symbol)(Literal(Constant(v.name)) :: Nil) } + implicit def liftTree[T <: Tree]: Liftable[T] = Liftable { identity } implicit def liftName[T <: Name]: Liftable[T] = Liftable { name => Ident(name) } implicit def liftExpr[T <: Expr[_]]: Liftable[T] = Liftable { expr => expr.tree } implicit def liftType[T <: Type]: Liftable[T] = Liftable { tpe => TypeTree(tpe) } diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala index a5a50f10888b..c01029d06745 100644 --- a/src/reflect/scala/reflect/api/Symbols.scala +++ b/src/reflect/scala/reflect/api/Symbols.scala @@ -27,7 +27,7 @@ package api * scala> class C[T] { def test[U](x: T)(y: U): Int = ??? } * defined class C * - * scala> val test = typeOf[C[Int]].member(newTermName("test")).asMethod + * scala> val test = typeOf[C[Int]].member(TermName("test")).asMethod * test: reflect.runtime.universe.MethodSymbol = method test * * scala> test.info @@ -260,6 +260,9 @@ trait Symbols { self: Universe => * with an object definition (module class in scala compiler parlance). * If yes, `isType` is also guaranteed to be true. * + * Note to compiler developers: During the "mixin" phase, trait implementation class symbols + * receive the `lateMODULE` flag, hence `isImplClass && isModuleClass` becomes true. + * * @group Tests */ def isModuleClass: Boolean = false @@ -336,7 +339,7 @@ trait Symbols { self: Universe => @deprecated("Use `overrides` instead", "2.11.0") def allOverriddenSymbols: List[Symbol] - /** Returns all symbols overriden by this symbol. + /** Returns all symbols overridden by this symbol. * * @group Basics */ @@ -919,6 +922,14 @@ trait Symbols { self: Universe => * For a Scala package class, NoSymbol. * For a Java class, NoSymbol. * + * Known issues: Due to SI-8367, primaryConstructor may return unexpected results + * when called for Java classes (for some vague definition of a "Java class", which apparently + * not only includes javac-produced classfiles, but also consists of classes defined in + * Scala programs under the java.lang package). What's even worse, for some Java classes + * we can't even guarantee stability of the return value - depending on your classloader configuration + * and/or JDK version you might get different primaryConstructor for the same ClassSymbol. + * We have logged these issues at SI-8193. + * * @group Class */ // TODO: SI-8193 I think we should only return a non-empty symbol if called for Scala classes diff --git a/src/reflect/scala/reflect/api/TreeCreator.scala b/src/reflect/scala/reflect/api/TreeCreator.scala index 027c84095572..000eaa1aa614 100644 --- a/src/reflect/scala/reflect/api/TreeCreator.scala +++ b/src/reflect/scala/reflect/api/TreeCreator.scala @@ -2,12 +2,12 @@ package scala package reflect package api -/** This is an internal implementation class. +/** A mirror-aware factory for trees. * * This class is used internally by Scala Reflection, and is not recommended for use in client code. * - * @group ReflectionAPI + * @group ReflectionAPI */ -abstract class TreeCreator { +abstract class TreeCreator extends Serializable { def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Tree } diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index ff8926651b97..9ecd87c17ecf 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -33,7 +33,7 @@ package api * * The following creates an AST representing `print("Hello World")`: * {{{ - * Apply(Select(Select(This(newTypeName("scala")), newTermName("Predef")), newTermName("print")), List(Literal(Constant("Hello World")))) + * Apply(Select(Select(This(TypeName("scala")), TermName("Predef")), TermName("print")), List(Literal(Constant("Hello World")))) * }}} * * The following creates an AST from a literal 5, and then uses `showRaw` to print it in a readable format. @@ -158,7 +158,7 @@ trait Trees { self: Universe => /** Do all parts of this tree satisfy predicate `p`? */ def forAll(p: Tree => Boolean): Boolean - /** Tests whether two trees are structurall equal. + /** Tests whether two trees are structurally equal. * Note that `==` on trees is reference equality. */ def equalsStructure(that : Tree): Boolean @@ -1098,11 +1098,11 @@ trait Trees { self: Universe => * // a dummy node that carries the type of unapplication to patmat * // the here doesn't have an underlying symbol * // it only has a type assigned, therefore after `untypecheck` this tree is no longer typeable - * Apply(Select(Ident(Foo), newTermName("unapply")), List(Ident(newTermName("")))), + * Apply(Select(Ident(Foo), TermName("unapply")), List(Ident(TermName("")))), * // arguments of the unapply => nothing synthetic here - * List(Bind(newTermName("x"), Ident(nme.WILDCARD)))), + * List(Bind(TermName("x"), Ident(nme.WILDCARD)))), * EmptyTree, - * Ident(newTermName("x"))))) + * Ident(TermName("x"))))) * }}} * * Introduced by typer. Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher). diff --git a/src/reflect/scala/reflect/api/TypeCreator.scala b/src/reflect/scala/reflect/api/TypeCreator.scala index 37fff90b4390..cbd55b942865 100644 --- a/src/reflect/scala/reflect/api/TypeCreator.scala +++ b/src/reflect/scala/reflect/api/TypeCreator.scala @@ -8,6 +8,6 @@ package api * * @group ReflectionAPI */ -abstract class TypeCreator { +abstract class TypeCreator extends Serializable { def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Type } diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala index 1dfc84be6917..7db375ca61f9 100644 --- a/src/reflect/scala/reflect/api/TypeTags.scala +++ b/src/reflect/scala/reflect/api/TypeTags.scala @@ -9,6 +9,7 @@ package api import java.lang.{ Class => jClass } import scala.language.implicitConversions +import java.io.ObjectStreamException /** * A `TypeTag[T]` encapsulates the runtime type representation of some type `T`. @@ -233,6 +234,7 @@ trait TypeTags { self: Universe => val otherMirror1 = otherMirror.asInstanceOf[scala.reflect.api.Mirror[otherMirror.universe.type]] otherMirror.universe.WeakTypeTag[T](otherMirror1, tpec) } + @throws(classOf[ObjectStreamException]) private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = false) } @@ -293,10 +295,13 @@ trait TypeTags { self: Universe => val otherMirror1 = otherMirror.asInstanceOf[scala.reflect.api.Mirror[otherMirror.universe.type]] otherMirror.universe.TypeTag[T](otherMirror1, tpec) } + @throws(classOf[ObjectStreamException]) private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true) } /* @group TypeTags */ + // This class only exists to silence MIMA complaining about a binary incompatibility. + // Only the top-level class (api.PredefTypeCreator) should be used. private class PredefTypeCreator[T](copyIn: Universe => Universe#TypeTag[T]) extends TypeCreator { def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Type = { copyIn(m.universe).asInstanceOf[U # TypeTag[T]].tpe @@ -304,8 +309,9 @@ trait TypeTags { self: Universe => } /* @group TypeTags */ - private class PredefTypeTag[T](_tpe: Type, copyIn: Universe => Universe#TypeTag[T]) extends TypeTagImpl[T](rootMirror, new PredefTypeCreator(copyIn)) { + private class PredefTypeTag[T](_tpe: Type, copyIn: Universe => Universe#TypeTag[T]) extends TypeTagImpl[T](rootMirror, new api.PredefTypeCreator(copyIn)) { override lazy val tpe: Type = _tpe + @throws(classOf[ObjectStreamException]) private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true) } @@ -341,20 +347,27 @@ trait TypeTags { self: Universe => def symbolOf[T: WeakTypeTag]: TypeSymbol } +// This class should be final, but we can't do that in Scala 2.11.x without breaking +// binary incompatibility. +@SerialVersionUID(1L) private[scala] class SerializedTypeTag(var tpec: TypeCreator, var concrete: Boolean) extends Serializable { - private def writeObject(out: java.io.ObjectOutputStream): Unit = { - out.writeObject(tpec) - out.writeBoolean(concrete) - } - - private def readObject(in: java.io.ObjectInputStream): Unit = { - tpec = in.readObject().asInstanceOf[TypeCreator] - concrete = in.readBoolean() + import scala.reflect.runtime.universe.{TypeTag, WeakTypeTag, runtimeMirror} + @throws(classOf[ObjectStreamException]) + private def readResolve(): AnyRef = { + val loader: ClassLoader = try { + Thread.currentThread().getContextClassLoader() + } catch { + case se: SecurityException => null + } + val m = runtimeMirror(loader) + if (concrete) TypeTag(m, tpec) + else WeakTypeTag(m, tpec) } +} - private def readResolve(): AnyRef = { - import scala.reflect.runtime.universe._ - if (concrete) TypeTag(rootMirror, tpec) - else WeakTypeTag(rootMirror, tpec) +/* @group TypeTags */ +private class PredefTypeCreator[T](copyIn: Universe => Universe#TypeTag[T]) extends TypeCreator { + def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Type = { + copyIn(m.universe).asInstanceOf[U # TypeTag[T]].tpe } } diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 19e9eef851f0..6863cdfd82c4 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -15,7 +15,6 @@ import scala.language.postfixOps /** AnnotationInfo and its helpers */ trait AnnotationInfos extends api.Annotations { self: SymbolTable => import definitions._ - import treeInfo._ // Common annotation code between Symbol and Type. // For methods altering the annotation list, on Symbol it mutates @@ -302,7 +301,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => */ def defaultTargets = symbol.annotations map (_.symbol) filter isMetaAnnotation // Test whether the typeSymbol of atp conforms to the given class. - def matches(clazz: Symbol) = symbol isNonBottomSubClass clazz + def matches(clazz: Symbol) = !symbol.isInstanceOf[StubSymbol] && (symbol isNonBottomSubClass clazz) // All subtrees of all args are considered. def hasArgWhich(p: Tree => Boolean) = args exists (_ exists p) @@ -388,11 +387,11 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => case Literal(const) => LiteralAnnotArg(const) case Apply(ArrayModule, args) => ArrayAnnotArg(args map encodeJavaArg toArray) case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => NestedAnnotArg(treeToAnnotation(arg)) - case _ => throw new Exception("unexpected java argument shape $arg: literals, arrays and nested annotations are supported") + case _ => throw new Exception(s"unexpected java argument shape $arg: literals, arrays and nested annotations are supported") } def encodeJavaArgs(args: List[Tree]): List[(Name, ClassfileAnnotArg)] = args match { case AssignOrNamedArg(Ident(name), arg) :: rest => (name, encodeJavaArg(arg)) :: encodeJavaArgs(rest) - case arg :: rest => throw new Exception("unexpected java argument shape $arg: only AssignOrNamedArg trees are supported") + case arg :: rest => throw new Exception(s"unexpected java argument shape $arg: only AssignOrNamedArg trees are supported") case Nil => Nil } val atp = tpt.tpe diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index 0ca861171952..54f64153c179 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -144,7 +144,7 @@ trait BaseTypeSeqs { "\n --- because ---\n"+msg) } - /** A merker object for a base type sequence that's no yet computed. + /** A marker object for a base type sequence that's no yet computed. * used to catch inheritance cycles */ val undetBaseTypeSeq: BaseTypeSeq = newBaseTypeSeq(List(), Array()) @@ -152,7 +152,7 @@ trait BaseTypeSeqs { /** Create a base type sequence consisting of a single type */ def baseTypeSingletonSeq(tp: Type): BaseTypeSeq = newBaseTypeSeq(List(), Array(tp)) - /** Create the base type sequence of a compound type wuth given tp.parents */ + /** Create the base type sequence of a compound type with given tp.parents */ def compoundBaseTypeSeq(tp: Type): BaseTypeSeq = { val tsym = tp.typeSymbol val parents = tp.parents diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 558e1aa61137..9f4ec3e6d1bf 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -410,7 +410,8 @@ trait Definitions extends api.StandardDefinitions { else if (isScalaRepeatedParamType(tp)) elementExtract(RepeatedParamClass, tp) orElse tp else tp ) - def repeatedToSingle(tp: Type): Type = elementExtract(RepeatedParamClass, tp) orElse tp + def repeatedToSingle(tp: Type): Type = elementExtract(RepeatedParamClass, tp) orElse elementExtract(JavaRepeatedParamClass, tp) orElse tp + // We don't need to deal with JavaRepeatedParamClass here, as `repeatedToSeq` is only called in the patmat translation for Scala sources. def repeatedToSeq(tp: Type): Type = elementTransform(RepeatedParamClass, tp)(seqType) orElse tp def seqToRepeated(tp: Type): Type = elementTransform(SeqClass, tp)(scalaRepeatedType) orElse tp def isReferenceArray(tp: Type) = elementTest(ArrayClass, tp)(_ <:< AnyRefTpe) @@ -489,8 +490,10 @@ trait Definitions extends api.StandardDefinitions { lazy val TypeCreatorClass = getClassIfDefined("scala.reflect.api.TypeCreator") // defined in scala-reflect.jar, so we need to be careful lazy val TreeCreatorClass = getClassIfDefined("scala.reflect.api.TreeCreator") // defined in scala-reflect.jar, so we need to be careful - lazy val BlackboxContextClass = getClassIfDefined("scala.reflect.macros.blackbox.Context") // defined in scala-reflect.jar, so we need to be careful - lazy val WhiteboxContextClass = getClassIfDefined("scala.reflect.macros.whitebox.Context") // defined in scala-reflect.jar, so we need to be careful + private def Context_210 = if (settings.isScala211) NoSymbol else getClassIfDefined("scala.reflect.macros.Context") // needed under -Xsource:2.10 + lazy val BlackboxContextClass = getClassIfDefined("scala.reflect.macros.blackbox.Context").orElse(Context_210) // defined in scala-reflect.jar, so we need to be careful + + lazy val WhiteboxContextClass = getClassIfDefined("scala.reflect.macros.whitebox.Context").orElse(Context_210) // defined in scala-reflect.jar, so we need to be careful def MacroContextPrefix = BlackboxContextClass.map(sym => getMemberMethod(sym, nme.prefix)) def MacroContextPrefixType = BlackboxContextClass.map(sym => getTypeMember(sym, tpnme.PrefixType)) def MacroContextUniverse = BlackboxContextClass.map(sym => getMemberMethod(sym, nme.universe)) @@ -501,7 +504,9 @@ trait Definitions extends api.StandardDefinitions { lazy val StringContextClass = requiredClass[scala.StringContext] - lazy val QuasiquoteClass = if (ApiUniverseClass != NoSymbol) getMember(ApiUniverseClass, tpnme.Quasiquote) else NoSymbol + // SI-8392 a reflection universe on classpath may not have + // quasiquotes, if e.g. crosstyping with -Xsource on + lazy val QuasiquoteClass = if (ApiUniverseClass != NoSymbol) getMemberIfDefined(ApiUniverseClass, tpnme.Quasiquote) else NoSymbol lazy val QuasiquoteClass_api = if (QuasiquoteClass != NoSymbol) getMember(QuasiquoteClass, tpnme.api) else NoSymbol lazy val QuasiquoteClass_api_apply = if (QuasiquoteClass_api != NoSymbol) getMember(QuasiquoteClass_api, nme.apply) else NoSymbol lazy val QuasiquoteClass_api_unapply = if (QuasiquoteClass_api != NoSymbol) getMember(QuasiquoteClass_api, nme.unapply) else NoSymbol @@ -509,6 +514,8 @@ trait Definitions extends api.StandardDefinitions { lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature] lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature] + lazy val MethodHandle = getClassIfDefined("java.lang.invoke.MethodHandle") + // Option classes lazy val OptionClass: ClassSymbol = requiredClass[Option[_]] lazy val OptionModule: ModuleSymbol = requiredModule[scala.Option.type] @@ -648,6 +655,7 @@ trait Definitions extends api.StandardDefinitions { // tends to change the course of events by forcing types. def isFunctionType(tp: Type) = isFunctionTypeDirect(tp.dealiasWiden) def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden) + def tupleComponents(tp: Type) = tp.dealiasWiden.typeArgs lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product] def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity) @@ -784,7 +792,7 @@ trait Definitions extends api.StandardDefinitions { * The class defining the method is a supertype of `tp` that * has a public no-arg primary constructor. */ - def samOf(tp: Type): Symbol = { + def samOf(tp: Type): Symbol = if (!settings.Xexperimental) NoSymbol else { // if tp has a constructor, it must be public and must not take any arguments // (not even an implicit argument list -- to keep it simple for now) val tpSym = tp.typeSymbol @@ -831,12 +839,18 @@ trait Definitions extends api.StandardDefinitions { def typeOfMemberNamedHead(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.head)()) def typeOfMemberNamedApply(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.apply)(IntTpe)) def typeOfMemberNamedDrop(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.drop)(IntTpe)) - def typesOfSelectors(tp: Type) = getterMemberTypes(tp, productSelectors(tp)) + def typesOfSelectors(tp: Type) = + if (isTupleType(tp)) tupleComponents(tp) + else getterMemberTypes(tp, productSelectors(tp)) + // SI-8128 Still using the type argument of the base type at Seq/Option if this is an old-style (2.10 compatible) // extractor to limit exposure to regressions like the reported problem with existentials. // TODO fix the existential problem in the general case, see test/pending/pos/t8128.scala private def typeArgOfBaseTypeOr(tp: Type, baseClass: Symbol)(or: => Type): Type = (tp baseType baseClass).typeArgs match { - case x :: Nil => x + case x :: Nil => + val x1 = x + val x2 = repackExistential(x1) + x2 case _ => or } @@ -893,12 +907,14 @@ trait Definitions extends api.StandardDefinitions { ) } - def EnumType(sym: Symbol) = + def EnumType(sym: Symbol) = { // given (in java): "class A { enum E { VAL1 } }" // - sym: the symbol of the actual enumeration value (VAL1) // - .owner: the ModuleClassSymbol of the enumeration (object E) // - .linkedClassOfClass: the ClassSymbol of the enumeration (class E) - sym.owner.linkedClassOfClass.tpe + // SI-6613 Subsequent runs of the resident compiler demand the phase discipline here. + enteringPhaseNotLaterThan(picklerPhase)(sym.owner.linkedClassOfClass).tpe + } /** Given a class symbol C with type parameters T1, T2, ... Tn * which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn, @@ -917,7 +933,7 @@ trait Definitions extends api.StandardDefinitions { // members of class scala.Any - // TODO these aren't final! They are now overriden in AnyRef/Object. Prior to the fix + // TODO these aren't final! They are now overridden in AnyRef/Object. Prior to the fix // for SI-8129, they were actually *overloaded* by the members in AnyRef/Object. // We should unfinalize these, override in AnyValClass, and make the overrides final. // Refchecks never actually looks at these, so its just for consistency. @@ -1076,6 +1092,10 @@ trait Definitions extends api.StandardDefinitions { lazy val ClassfileAnnotationClass = requiredClass[scala.annotation.ClassfileAnnotation] lazy val StaticAnnotationClass = requiredClass[scala.annotation.StaticAnnotation] + // Java retention annotations + lazy val AnnotationRetentionAttr = requiredClass[java.lang.annotation.Retention] + lazy val AnnotationRetentionPolicyAttr = requiredClass[java.lang.annotation.RetentionPolicy] + // Annotations lazy val BridgeClass = requiredClass[scala.annotation.bridge] lazy val ElidableMethodClass = requiredClass[scala.annotation.elidable] @@ -1100,7 +1120,7 @@ trait Definitions extends api.StandardDefinitions { lazy val ScalaInlineClass = requiredClass[scala.inline] lazy val ScalaNoInlineClass = requiredClass[scala.noinline] lazy val SerialVersionUIDAttr = requiredClass[scala.SerialVersionUID] - lazy val SerialVersionUIDAnnotation = AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List()) + lazy val SerialVersionUIDAnnotation = AnnotationInfo(SerialVersionUIDAttr.tpe, List(), List(nme.value -> LiteralAnnotArg(Constant(0)))) lazy val SpecializedClass = requiredClass[scala.specialized] lazy val ThrowsClass = requiredClass[scala.throws[_]] lazy val TransientAttr = requiredClass[scala.transient] @@ -1421,6 +1441,10 @@ trait Definitions extends api.StandardDefinitions { lazy val isUnbox = unboxMethod.values.toSet[Symbol] lazy val isBox = boxMethod.values.toSet[Symbol] + lazy val Boolean_and = definitions.Boolean_and + lazy val Boolean_or = definitions.Boolean_or + lazy val Boolean_not = definitions.Boolean_not + lazy val Option_apply = getMemberMethod(OptionModule, nme.apply) lazy val List_apply = DefinitionsClass.this.List_apply @@ -1486,6 +1510,9 @@ trait Definitions extends api.StandardDefinitions { lazy val PartialManifestClass = getTypeMember(ReflectPackage, tpnme.ClassManifest) lazy val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass) + + def isPolymorphicSignature(sym: Symbol) = PolySigMethods(sym) + private lazy val PolySigMethods: Set[Symbol] = Set[Symbol](MethodHandle.info.decl(sn.Invoke), MethodHandle.info.decl(sn.InvokeExact)).filter(_.exists) } } } diff --git a/src/reflect/scala/reflect/internal/Depth.scala b/src/reflect/scala/reflect/internal/Depth.scala index 357abf765f0b..a330e0accb83 100644 --- a/src/reflect/scala/reflect/internal/Depth.scala +++ b/src/reflect/scala/reflect/internal/Depth.scala @@ -21,8 +21,20 @@ final class Depth private (val depth: Int) extends AnyVal with Ordered[Depth] { object Depth { // A don't care value for the depth parameter in lubs/glbs and related operations. - final val AnyDepth = new Depth(Int.MinValue) + // When passed this value, the recursion budget will be inferred from the shape of + // the `typeDepth` of the list of types. + final val AnyDepthValue = -3 + final val AnyDepth = new Depth(AnyDepthValue) + final val Zero = new Depth(0) - @inline final def apply(depth: Int): Depth = if (depth < 0) AnyDepth else new Depth(depth) + // SI-9018: A negative depth is used to signal that we have breached the recursion limit. + // The LUB/GLB implementation will then truncate to Any/Nothing. + // + // We only really need one of these, but we allow representation of Depth(-1) and Depth(-2) + // to mimic the historical choice of 2.10.4. + @inline final def apply(depth: Int): Depth = { + if (depth < AnyDepthValue) AnyDepth + else new Depth(depth) + } } diff --git a/src/reflect/scala/reflect/internal/Internals.scala b/src/reflect/scala/reflect/internal/Internals.scala index e9916cf7d11a..ad4cec5b4db5 100644 --- a/src/reflect/scala/reflect/internal/Internals.scala +++ b/src/reflect/scala/reflect/internal/Internals.scala @@ -9,7 +9,6 @@ import scala.ref.WeakReference import scala.reflect.api.Universe import scala.reflect.macros.Attachments import scala.reflect.internal.util.FreshNameCreator -import scala.reflect.internal.Flags._ import scala.reflect.internal.util.ListOfNil trait Internals extends api.Internals { @@ -129,7 +128,7 @@ trait Internals extends api.Internals { def typeBounds(lo: Type, hi: Type): TypeBounds = self.TypeBounds(lo, hi) def boundedWildcardType(bounds: TypeBounds): BoundedWildcardType = self.BoundedWildcardType(bounds) - def subpatterns(tree: Tree): Option[List[Tree]] = tree.attachments.get[SubpatternsAttachment].map(_.patterns.map(_.duplicate)) + def subpatterns(tree: Tree): Option[List[Tree]] = tree.attachments.get[SubpatternsAttachment].map(_.patterns.map(duplicateAndKeepPositions)) type Decorators = MacroDecoratorApi lazy val decorators: Decorators = new MacroDecoratorApi { diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index 7065a8cd6d3a..0f0f16574ee6 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -30,6 +30,8 @@ trait Mirrors extends api.Mirrors { val EmptyPackageClass: ClassSymbol val EmptyPackage: ModuleSymbol + def symbolOf[T: universe.WeakTypeTag]: universe.TypeSymbol = universe.weakTypeTag[T].in(this).tpe.typeSymbolDirect.asType + def findMemberFromRoot(fullName: Name): Symbol = { val segs = nme.segments(fullName.toString, fullName.isTermName) if (segs.isEmpty) NoSymbol @@ -178,7 +180,7 @@ trait Mirrors extends api.Mirrors { def getPackageObject(fullname: String): ModuleSymbol = getPackageObject(newTermName(fullname)) def getPackageObject(fullname: TermName): ModuleSymbol = - (getPackage(fullname).info member nme.PACKAGE) match { + (getPackage(fullname).packageObject) match { case x: ModuleSymbol => x case _ => MissingRequirementError.notFound("package object " + fullname) } @@ -189,15 +191,6 @@ trait Mirrors extends api.Mirrors { def getPackageObjectIfDefined(fullname: TermName): Symbol = wrapMissing(getPackageObject(fullname)) - final def getPackageObjectWithMember(pre: Type, sym: Symbol): Symbol = { - // The owner of a symbol which requires package qualification may be the - // package object iself, but it also could be any superclass of the package - // object. In the latter case, we must go through the qualifier's info - // to obtain the right symbol. - if (sym.owner.isModuleClass) sym.owner.sourceModule // fast path, if the member is owned by a module class, that must be linked to the package object - else pre member nme.PACKAGE // otherwise we have to findMember - } - override def staticPackage(fullname: String): ModuleSymbol = try ensurePackageSymbol(fullname.toString, getModuleOrClass(newTermNameCached(fullname)), allowModules = false) catch { case mre: MissingRequirementError => throw new ScalaReflectionException(mre.msg) } @@ -275,7 +268,7 @@ trait Mirrors extends api.Mirrors { // TODO - having these as objects means they elude the attempt to // add synchronization in SynchronizedSymbols. But we should either - // flip on object overrides or find some other accomodation, because + // flip on object overrides or find some other accommodation, because // lazy vals are unnecessarily expensive relative to objects and it // is very beneficial for a handful of bootstrap symbols to have // first class identities diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index ae9f2da4e58a..32d12d305ee4 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -40,7 +40,10 @@ trait Names extends api.Names { /** Hashtable for finding type names quickly. */ private val typeHashtable = new Array[TypeName](HASH_SIZE) - /** The hashcode of a name. */ + /** + * The hashcode of a name depends on the first, the last and the middle character, + * and the length of the name. + */ private def hashValue(cs: Array[Char], offset: Int, len: Int): Int = if (len > 0) (len * (41 * 41 * 41) + @@ -104,10 +107,21 @@ trait Names extends api.Names { // The logic order here is future-proofing against the possibility // that name.toString will become an eager val, in which case the call // to enterChars cannot follow the construction of the TermName. - val ncStart = nc - enterChars(cs, offset, len) - if (cachedString ne null) new TermName_S(ncStart, len, h, cachedString) - else new TermName_R(ncStart, len, h) + var startIndex = 0 + if (cs == chrs) { + // Optimize for subName, the new name is already stored in chrs + startIndex = offset + } else { + startIndex = nc + enterChars(cs, offset, len) + } + val next = termHashtable(h) + val termName = + if (cachedString ne null) new TermName_S(startIndex, len, next, cachedString) + else new TermName_R(startIndex, len, next) + // Add the new termName to the hashtable only after it's been fully constructed + termHashtable(h) = termName + termName } } if (synchronizeNames) nameLock.synchronized(body) else body @@ -117,11 +131,11 @@ trait Names extends api.Names { newTermName(cs, offset, len, cachedString).toTypeName /** Create a term name from string. */ - @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overriden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala + @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overridden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala def newTermName(s: String): TermName = newTermName(s.toCharArray(), 0, s.length(), null) /** Create a type name from string. */ - @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overriden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala + @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overridden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala def newTypeName(s: String): TypeName = newTermName(s).toTypeName /** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */ @@ -145,40 +159,20 @@ trait Names extends api.Names { newTermName(bs, offset, len).toTypeName /** - * Used only by the GenBCode backend, to represent bytecode-level types in a way that makes equals() and hashCode() efficient. - * For bytecode-level types of OBJECT sort, its internal name (not its descriptor) is stored. - * For those of ARRAY sort, its descriptor is stored ie has a leading '[' - * For those of METHOD sort, its descriptor is stored ie has a leading '(' + * Used by the GenBCode backend to lookup type names that are known to already exist. This method + * might be invoked in a multi-threaded setting. Invoking newTypeName instead might be unsafe. * - * can-multi-thread - * TODO SI-6240 !!! JZ Really? the constructors TermName and TypeName publish unconstructed `this` references - * into the hash tables; we could observe them here before the subclass constructor completes. + * can-multi-thread: names are added to the hash tables only after they are fully constructed. */ - final def lookupTypeName(cs: Array[Char]): TypeName = { lookupTypeNameIfExisting(cs, true) } - - final def lookupTypeNameIfExisting(cs: Array[Char], failOnNotFound: Boolean): TypeName = { - - val hterm = hashValue(cs, 0, cs.size) & HASH_MASK - var nterm = termHashtable(hterm) - while ((nterm ne null) && (nterm.length != cs.size || !equals(nterm.start, cs, 0, cs.size))) { - nterm = nterm.next - } - if (nterm eq null) { - if (failOnNotFound) { assert(false, "TermName not yet created: " + new String(cs)) } - return null - } + final def lookupTypeName(cs: Array[Char]): TypeName = { + val hash = hashValue(cs, 0, cs.length) & HASH_MASK + var typeName = typeHashtable(hash) - val htype = hashValue(chrs, nterm.start, nterm.length) & HASH_MASK - var ntype = typeHashtable(htype) - while ((ntype ne null) && ntype.start != nterm.start) { - ntype = ntype.next + while ((typeName ne null) && (typeName.length != cs.length || !equals(typeName.start, cs, 0, cs.length))) { + typeName = typeName.next } - if (ntype eq null) { - if (failOnNotFound) { assert(false, "TypeName not yet created: " + new String(cs)) } - return null - } - - ntype + assert(typeName != null, s"TypeName ${new String(cs)} not yet created.") + typeName } // Classes ---------------------------------------------------------------------- @@ -515,43 +509,47 @@ trait Names extends api.Names { /** TermName_S and TypeName_S have fields containing the string version of the name. * TermName_R and TypeName_R recreate it each time toString is called. */ - private final class TermName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TermName(index0, len0, hash) { - protected def createCompanionName(h: Int): TypeName = new TypeName_S(index, len, h, toString) + private final class TermName_S(index0: Int, len0: Int, next0: TermName, override val toString: String) extends TermName(index0, len0, next0) { + protected def createCompanionName(next: TypeName): TypeName = new TypeName_S(index, len, next, toString) override def newName(str: String): TermName = newTermNameCached(str) } - private final class TypeName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TypeName(index0, len0, hash) { - protected def createCompanionName(h: Int): TermName = new TermName_S(index, len, h, toString) + private final class TypeName_S(index0: Int, len0: Int, next0: TypeName, override val toString: String) extends TypeName(index0, len0, next0) { override def newName(str: String): TypeName = newTypeNameCached(str) } - private final class TermName_R(index0: Int, len0: Int, hash: Int) extends TermName(index0, len0, hash) { - protected def createCompanionName(h: Int): TypeName = new TypeName_R(index, len, h) + private final class TermName_R(index0: Int, len0: Int, next0: TermName) extends TermName(index0, len0, next0) { + protected def createCompanionName(next: TypeName): TypeName = new TypeName_R(index, len, next) override def toString = new String(chrs, index, len) } - private final class TypeName_R(index0: Int, len0: Int, hash: Int) extends TypeName(index0, len0, hash) { - protected def createCompanionName(h: Int): TermName = new TermName_R(index, len, h) + private final class TypeName_R(index0: Int, len0: Int, next0: TypeName) extends TypeName(index0, len0, next0) { override def toString = new String(chrs, index, len) } // SYNCNOTE: caller to constructor must synchronize if `synchronizeNames` is enabled - sealed abstract class TermName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) with TermNameApi { + sealed abstract class TermName(index0: Int, len0: Int, val next: TermName) extends Name(index0, len0) with TermNameApi { type ThisNameType = TermName protected[this] def thisName: TermName = this - val next: TermName = termHashtable(hash) - termHashtable(hash) = this + def isTermName: Boolean = true def isTypeName: Boolean = false def toTermName: TermName = this def toTypeName: TypeName = { def body = { + // Re-computing the hash saves a field for storing it in the TermName val h = hashValue(chrs, index, len) & HASH_MASK var n = typeHashtable(h) while ((n ne null) && n.start != index) n = n.next if (n ne null) n - else createCompanionName(h) + else { + val next = typeHashtable(h) + val typeName = createCompanionName(next) + // Add the new typeName to the hashtable only after it's been fully constructed + typeHashtable(h) = typeName + typeName + } } if (synchronizeNames) nameLock.synchronized(body) else body } @@ -562,7 +560,7 @@ trait Names extends api.Names { def nameKind = "term" /** SYNCNOTE: caller must synchronize if `synchronizeNames` is enabled */ - protected def createCompanionName(h: Int): TypeName + protected def createCompanionName(next: TypeName): TypeName } implicit val TermNameTag = ClassTag[TermName](classOf[TermName]) @@ -572,24 +570,22 @@ trait Names extends api.Names { def unapply(name: TermName): Option[String] = Some(name.toString) } - sealed abstract class TypeName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) with TypeNameApi { + sealed abstract class TypeName(index0: Int, len0: Int, val next: TypeName) extends Name(index0, len0) with TypeNameApi { type ThisNameType = TypeName protected[this] def thisName: TypeName = this - val next: TypeName = typeHashtable(hash) - typeHashtable(hash) = this - def isTermName: Boolean = false def isTypeName: Boolean = true def toTermName: TermName = { def body = { + // Re-computing the hash saves a field for storing it in the TypeName val h = hashValue(chrs, index, len) & HASH_MASK var n = termHashtable(h) while ((n ne null) && n.start != index) n = n.next - if (n ne null) n - else createCompanionName(h) + assert (n ne null, s"TypeName $this is missing its correspondent") + n } if (synchronizeNames) nameLock.synchronized(body) else body } @@ -601,8 +597,6 @@ trait Names extends api.Names { def nameKind = "type" override def decode = if (nameDebug) super.decode + "!" else super.decode - /** SYNCNOTE: caller must synchronize if `synchronizeNames` is enabled */ - protected def createCompanionName(h: Int): TermName } implicit val TypeNameTag = ClassTag[TypeName](classOf[TypeName]) diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala index 01fba1efc16c..4d0e31b03788 100644 --- a/src/reflect/scala/reflect/internal/Positions.scala +++ b/src/reflect/scala/reflect/internal/Positions.scala @@ -23,13 +23,10 @@ import scala.collection.mutable.ListBuffer * Otherwise, the singleton consisting of the node itself. */ trait Positions extends api.Positions { self: SymbolTable => - type Position = scala.reflect.internal.util.Position val NoPosition = scala.reflect.internal.util.NoPosition implicit val PositionTag = ClassTag[Position](classOf[Position]) - def inform(msg: String): Unit - def useOffsetPositions: Boolean = true /** A position that wraps a set of trees. @@ -100,7 +97,7 @@ trait Positions extends api.Positions { self: SymbolTable => inform("\nWhile validating #" + tree.id) inform(treeStatus(tree)) inform("\nChildren:") - tree.children map (t => " " + treeStatus(t, tree)) foreach inform + tree.children foreach (t => inform(" " + treeStatus(t, tree))) inform("=======") throw new ValidateException(msg) } @@ -109,7 +106,7 @@ trait Positions extends api.Positions { self: SymbolTable => if (!tree.isEmpty && tree.canHaveAttrs) { if (settings.Yposdebug && (settings.verbose || settings.Yrangepos)) - println("[%10s] %s".format("validate", treeStatus(tree, encltree))) + inform("[%10s] %s".format("validate", treeStatus(tree, encltree))) if (!tree.pos.isDefined) positionError("Unpositioned tree #"+tree.id) { @@ -176,7 +173,7 @@ trait Positions extends api.Positions { self: SymbolTable => case r :: rs1 => assert(!t.pos.isTransparent) if (r.isFree && (r.pos includes t.pos)) { -// println("subdividing "+r+"/"+t.pos) +// inform("subdividing "+r+"/"+t.pos) maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1 } else { if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree @@ -207,7 +204,7 @@ trait Positions extends api.Positions { self: SymbolTable => /** Set position of all children of a node * @param pos A target position. * Uses the point of the position as the point of all positions it assigns. - * Uses the start of this position as an Offset position for unpositioed trees + * Uses the start of this position as an Offset position for unpositioned trees * without children. * @param trees The children to position. All children must be positionable. */ @@ -225,7 +222,7 @@ trait Positions extends api.Positions { self: SymbolTable => } } catch { case ex: Exception => - println("error while set children pos "+pos+" of "+trees) + inform("error while set children pos "+pos+" of "+trees) throw ex } diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 680c19e426d0..98b2c4837927 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -546,10 +546,11 @@ trait Printers extends api.Printers { self: SymbolTable => import Chars._ val decName = name.decoded val bslash = '\\' + val isDot = (x: Char) => x == '.' val brackets = List('[',']','(',')','{','}') def addBackquotes(s: String) = - if (decoded && (decName.exists(ch => brackets.contains(ch) || isWhitespace(ch)) || + if (decoded && (decName.exists(ch => brackets.contains(ch) || isWhitespace(ch) || isDot(ch)) || (name.isOperatorName && decName.exists(isOperatorPart) && decName.exists(isScalaLetter) && !decName.contains(bslash)))) s"`$s`" else s @@ -567,8 +568,8 @@ trait Printers extends api.Printers { self: SymbolTable => override protected val commentsRequired = true - protected def needsParentheses(parent: Tree)(insideIf: Boolean = true, insideMatch: Boolean = true, - insideTry: Boolean = true, insideAnnotated: Boolean = true, insideBlock: Boolean = true, insideLabelDef: Boolean = true) = { + protected def needsParentheses(parent: Tree)(insideIf: Boolean = true, insideMatch: Boolean = true, insideTry: Boolean = true, + insideAnnotated: Boolean = true, insideBlock: Boolean = true, insideLabelDef: Boolean = true, insideAssign: Boolean = true) = { parent match { case _: If => insideIf case _: Match => insideMatch @@ -576,6 +577,7 @@ trait Printers extends api.Printers { self: SymbolTable => case _: Annotated => insideAnnotated case _: Block => insideBlock case _: LabelDef => insideLabelDef + case _: Assign => insideAssign case _ => false } } @@ -595,18 +597,26 @@ trait Printers extends api.Printers { self: SymbolTable => } } - protected def emptyTree(tree: Tree) = tree match { - case EmptyTree | build.SyntacticEmptyTypeTree() => true - case _ => false + object EmptyTypeTree { + def unapply(tt: TypeTree): Boolean = tt match { + case build.SyntacticEmptyTypeTree() if tt.wasEmpty || tt.isEmpty => true + case _ => false + } } + protected def isEmptyTree(tree: Tree) = + tree match { + case EmptyTree | EmptyTypeTree() => true + case _ => false + } + protected def originalTypeTrees(trees: List[Tree]) = - trees.filter(!emptyTree(_)) map { - case tt: TypeTree => tt.original - case tree => tree + trees.filter(!isEmptyTree(_)) map { + case tt: TypeTree if tt.original != null => tt.original + case tree => tree } - val defaultClasses = List(tpnme.AnyRef) + val defaultClasses = List(tpnme.AnyRef, tpnme.Object) val defaultTraitsForCase = List(tpnme.Product, tpnme.Serializable) protected def removeDefaultTypesFromList(trees: List[Tree])(classesToRemove: List[Name] = defaultClasses)(traitsToRemove: List[Name]) = { def removeDefaultTraitsFromList(trees: List[Tree], traitsToRemove: List[Name]): List[Tree] = @@ -622,9 +632,10 @@ trait Printers extends api.Printers { self: SymbolTable => removeDefaultTraitsFromList(removeDefaultClassesFromList(trees, classesToRemove), traitsToRemove) } - protected def removeDefaultClassesFromList(trees: List[Tree], classesToRemove: List[Name] = defaultClasses) = + protected def removeDefaultClassesFromList(trees: List[Tree], classesToRemove: List[Name] = defaultClasses) = originalTypeTrees(trees) filter { case Select(Ident(sc), name) => !(classesToRemove.contains(name) && sc == nme.scala_) + case tt: TypeTree if tt.tpe != null => !(classesToRemove contains(newTypeName(tt.tpe.toString()))) case _ => true } @@ -636,7 +647,7 @@ trait Printers extends api.Printers { self: SymbolTable => } override def printOpt(prefix: String, tree: Tree) = - if (!emptyTree(tree)) super.printOpt(prefix, tree) + if (!isEmptyTree(tree)) super.printOpt(prefix, tree) override def printColumn(ts: List[Tree], start: String, sep: String, end: String) = { super.printColumn(ts.filter(!syntheticToRemove(_)), start, sep, end) @@ -751,7 +762,7 @@ trait Printers extends api.Printers { self: SymbolTable => val build.SyntacticClassDef(_, _, _, ctorMods, vparamss, earlyDefs, parents, selfType, body) = cl // constructor's modifier - if (ctorMods.hasFlag(AccessFlags)) { + if (ctorMods.hasFlag(AccessFlags) || ctorMods.hasAccessBoundary) { print(" ") printModifiers(ctorMods, primaryCtorParam = false) } @@ -951,7 +962,7 @@ trait Printers extends api.Printers { self: SymbolTable => def printTp = print("(", tp, ")") tp match { - case EmptyTree | build.SyntacticEmptyTypeTree() => printTp + case EmptyTree | EmptyTypeTree() => printTp // case for untypechecked trees case Annotated(annot, arg) if (expr ne null) && (arg ne null) && expr.equalsStructure(arg) => printTp // remove double arg - 5: 5: @unchecked case tt: TypeTree if tt.original.isInstanceOf[Annotated] => printTp @@ -962,7 +973,7 @@ trait Printers extends api.Printers { self: SymbolTable => // print only fun when targs are TypeTrees with empty original case TypeApply(fun, targs) => - if (targs.exists(emptyTree(_))) { + if (targs.exists(isEmptyTree(_))) { print(fun) } else super.printTree(tree) @@ -983,8 +994,8 @@ trait Printers extends api.Printers { self: SymbolTable => case treeInfo.Unapplied(body) => body match { case Select(qual, name) if name == nme.unapply => print(qual) - case TypeApply(Select(qual, name), args) if name == nme.unapply || name == nme.unapplySeq => - print(TypeApply(qual, args)) + case TypeApply(Select(qual, name), _) if name == nme.unapply || name == nme.unapplySeq => + print(qual) case _ => print(body) } case _ => print(fun) @@ -1060,7 +1071,11 @@ trait Printers extends api.Printers { self: SymbolTable => print("(", qualifier, ")#", blankForOperatorName(selector), printedName(selector)) case tt: TypeTree => - if (!emptyTree(tt)) print(tt.original) + if (!isEmptyTree(tt)) { + val original = tt.original + if (original != null) print(original) + else super.printTree(tree) + } case AppliedTypeTree(tp, args) => // it's possible to have (=> String) => String type but Function1[=> String, String] is not correct diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala index 66ac4bc751b6..c41832123403 100644 --- a/src/reflect/scala/reflect/internal/ReificationSupport.scala +++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala @@ -6,8 +6,7 @@ import Flags._ import util._ trait ReificationSupport { self: SymbolTable => - import definitions.{TupleClass, FunctionClass, ScalaPackage, UnitClass} - import internal._ + import definitions._ class ReificationSupportImpl extends ReificationSupportApi { def selectType(owner: Symbol, name: String): TypeSymbol = @@ -97,11 +96,13 @@ trait ReificationSupport { self: SymbolTable => def toStats(tree: Tree): List[Tree] = tree match { case EmptyTree => Nil case SyntacticBlock(stats) => stats + case defn if defn.isDef => defn :: Nil + case imp: Import => imp :: Nil case _ => throw new IllegalArgumentException(s"can't flatten $tree") } def mkAnnotation(tree: Tree): Tree = tree match { - case SyntacticNew(Nil, SyntacticApplied(SyntacticTypeApplied(_, _), _) :: Nil, noSelfType, Nil) => + case SyntacticNew(Nil, SyntacticApplied(SyntacticAppliedType(_, _), _) :: Nil, noSelfType, Nil) => tree case _ => throw new IllegalArgumentException(s"Tree ${showRaw(tree)} isn't a correct representation of annotation." + @@ -121,7 +122,7 @@ trait ReificationSupport { self: SymbolTable => if (vd.rhs.nonEmpty) newmods |= DEFAULTPARAM copyValDef(vd)(mods = newmods | extraFlags) case _ => - throw new IllegalArgumentException(s"$tree is not valid represenation of a parameter, " + + throw new IllegalArgumentException(s"$tree is not valid representation of a parameter, " + """consider reformatting it into q"val $name: $T = $default" shape""") } @@ -201,17 +202,35 @@ trait ReificationSupport { self: SymbolTable => def unapply(flags: Long): Some[Long] = Some(flags) } + /** Construct/deconstruct type application term trees. + * Treats other term trees as zero-argument type applications. + */ object SyntacticTypeApplied extends SyntacticTypeAppliedExtractor { def apply(tree: Tree, targs: List[Tree]): Tree = if (targs.isEmpty) tree else if (tree.isTerm) TypeApply(tree, targs) - else if (tree.isType) AppliedTypeTree(tree, targs) - else throw new IllegalArgumentException(s"can't apply types to $tree") + else throw new IllegalArgumentException(s"can't apply type arguments to $tree") - def unapply(tree: Tree): Some[(Tree, List[Tree])] = tree match { + def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match { case TypeApply(fun, targs) => Some((fun, targs)) - case AppliedTypeTree(tpe, targs) => Some((tpe, targs)) - case _ => Some((tree, Nil)) + case _ if tree.isTerm => Some((tree, Nil)) + case _ => None + } + } + + /** Construct/deconstruct applied type trees. + * Treats other types as zero-arity applied types. + */ + object SyntacticAppliedType extends SyntacticTypeAppliedExtractor { + def apply(tree: Tree, targs: List[Tree]): Tree = + if (targs.isEmpty) tree + else if (tree.isType) AppliedTypeTree(tree, targs) + else throw new IllegalArgumentException(s"can't create applied type from non-type $tree") + + def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match { + case MaybeTypeTreeOriginal(AppliedTypeTree(tpe, targs)) => Some((tpe, targs)) + case _ if tree.isType => Some((tree, Nil)) + case _ => None } } @@ -223,7 +242,15 @@ trait ReificationSupport { self: SymbolTable => case UnApply(treeInfo.Unapplied(Select(fun, nme.unapply)), pats) => Some((fun, pats :: Nil)) case treeInfo.Applied(fun, targs, argss) => - Some((SyntacticTypeApplied(fun, targs), argss)) + fun match { + case Select(_: New, nme.CONSTRUCTOR) => + Some((tree, Nil)) + case _ => + val callee = + if (fun.isTerm) SyntacticTypeApplied(fun, targs) + else SyntacticAppliedType(fun, targs) + Some((callee, argss)) + } } } @@ -489,8 +516,10 @@ trait ReificationSupport { self: SymbolTable => gen.mkNew(parents, mkSelfType(selfType), earlyDefs ::: body, NoPosition, NoPosition) def unapply(tree: Tree): Option[(List[Tree], List[Tree], ValDef, List[Tree])] = tree match { - case SyntacticApplied(Select(New(SyntacticTypeApplied(ident, targs)), nme.CONSTRUCTOR), argss) => - Some((Nil, SyntacticApplied(SyntacticTypeApplied(ident, targs), argss) :: Nil, noSelfType, Nil)) + case treeInfo.Applied(Select(New(SyntacticAppliedType(ident, targs)), nme.CONSTRUCTOR), Nil, List(Nil)) => + Some((Nil, SyntacticAppliedType(ident, targs) :: Nil, noSelfType, Nil)) + case treeInfo.Applied(Select(New(SyntacticAppliedType(ident, targs)), nme.CONSTRUCTOR), Nil, argss) => + Some((Nil, SyntacticApplied(SyntacticAppliedType(ident, targs), argss) :: Nil, noSelfType, Nil)) case SyntacticBlock(SyntacticClassDef(_, tpnme.ANON_CLASS_NAME, Nil, _, ListOfNil, earlyDefs, parents, selfType, body) :: Apply(Select(New(Ident(tpnme.ANON_CLASS_NAME)), nme.CONSTRUCTOR), Nil) :: Nil) => Some((earlyDefs, parents, selfType, body)) @@ -502,11 +531,21 @@ trait ReificationSupport { self: SymbolTable => object SyntacticDefDef extends SyntacticDefDefExtractor { def apply(mods: Modifiers, name: TermName, tparams: List[Tree], vparamss: List[List[Tree]], tpt: Tree, rhs: Tree): DefDef = { + val tparams0 = mkTparams(tparams) val vparamss0 = mkParam(vparamss, PARAM) - DefDef(mods, name, mkTparams(tparams), vparamss0, tpt, rhs) + val rhs0 = { + if (name != nme.CONSTRUCTOR) rhs + else rhs match { + case Block(_, _) => rhs + case _ => Block(List(rhs), gen.mkSyntheticUnit) + } + } + DefDef(mods, name, tparams0, vparamss0, tpt, rhs0) } def unapply(tree: Tree): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)] = tree match { + case DefDef(mods, nme.CONSTRUCTOR, tparams, vparamss, tpt, Block(List(expr), Literal(Constant(())))) => + Some((mods, nme.CONSTRUCTOR, tparams, vparamss, tpt, expr)) case DefDef(mods, name, tparams, vparamss, tpt, rhs) => Some((mods, name, tparams, vparamss, tpt, rhs)) case _ => None @@ -808,10 +847,10 @@ trait ReificationSupport { self: SymbolTable => // drop potential @scala.unchecked annotation protected object MaybeUnchecked { def unapply(tree: Tree): Some[Tree] = tree match { - case Annotated(SyntacticNew(Nil, Apply(ScalaDot(tpnme.unchecked), Nil) :: Nil, noSelfType, Nil), annottee) => + case Annotated(SyntacticNew(Nil, ScalaDot(tpnme.unchecked) :: Nil, noSelfType, Nil), annottee) => Some(annottee) case Typed(annottee, MaybeTypeTreeOriginal( - Annotated(SyntacticNew(Nil, Apply(ScalaDot(tpnme.unchecked), Nil) :: Nil, noSelfType, Nil), _))) => + Annotated(SyntacticNew(Nil, ScalaDot(tpnme.unchecked) :: Nil, noSelfType, Nil), _))) => Some(annottee) case annottee => Some(annottee) } @@ -826,12 +865,43 @@ trait ReificationSupport { self: SymbolTable => protected def mkCases(cases: List[Tree]): List[CaseDef] = cases.map { case c: CaseDef => c - case tree => throw new IllegalArgumentException("$tree is not valid representation of pattern match case") + case tree => throw new IllegalArgumentException(s"$tree is not valid representation of pattern match case") + } + + object SyntacticPartialFunction extends SyntacticPartialFunctionExtractor { + def apply(cases: List[Tree]): Match = Match(EmptyTree, mkCases(cases)) + def unapply(tree: Tree): Option[List[CaseDef]] = tree match { + case Match(EmptyTree, cases) => Some(cases) + case Typed( + Block( + List(ClassDef(clsMods, tpnme.ANON_FUN_NAME, Nil, Template( + List(abspf: TypeTree, ser: TypeTree), noSelfType, List( + DefDef(_, nme.CONSTRUCTOR, _, _, _, _), + DefDef(_, nme.applyOrElse, _, _, _, + Match(_, cases :+ + CaseDef(Bind(nme.DEFAULT_CASE, Ident(nme.WILDCARD)), _, _))), + DefDef(_, nme.isDefinedAt, _, _, _, _))))), + Apply(Select(New(Ident(tpnme.ANON_FUN_NAME)), termNames.CONSTRUCTOR), List())), + pf: TypeTree) + if pf.tpe != null && pf.tpe.typeSymbol.eq(PartialFunctionClass) && + abspf.tpe != null && abspf.tpe.typeSymbol.eq(AbstractPartialFunctionClass) && + ser.tpe != null && ser.tpe.typeSymbol.eq(SerializableClass) && + clsMods.hasFlag(FINAL) && clsMods.hasFlag(SYNTHETIC) => + Some(cases) + case _ => None + } } object SyntacticMatch extends SyntacticMatchExtractor { - def apply(selector: Tree, cases: List[Tree]) = Match(selector, mkCases(cases)) - def unapply(tree: Match): Option[(Tree, List[CaseDef])] = Match.unapply(tree) + def apply(scrutinee: Tree, cases: List[Tree]) = { + require(scrutinee.nonEmpty, "match's scrutinee may not be empty") + Match(scrutinee, mkCases(cases)) + } + + def unapply(tree: Match): Option[(Tree, List[CaseDef])] = tree match { + case Match(scrutinee, cases) if scrutinee.nonEmpty => Some((scrutinee, cases)) + case _ => None + } } object SyntacticTry extends SyntacticTryExtractor { @@ -839,13 +909,24 @@ trait ReificationSupport { self: SymbolTable => def unapply(tree: Try): Option[(Tree, List[CaseDef], Tree)] = Try.unapply(tree) } - object SyntacticIdent extends SyntacticIdentExtractor { - def apply(name: Name, isBackquoted: Boolean) = { + object SyntacticTermIdent extends SyntacticTermIdentExtractor { + def apply(name: TermName, isBackquoted: Boolean): Ident = { val id = self.Ident(name) if (isBackquoted) id updateAttachment BackquotedIdentifierAttachment id } - def unapply(tree: Ident): Some[(Name, Boolean)] = Some((tree.name, tree.hasAttachment[BackquotedIdentifierAttachment.type])) + def unapply(id: Ident): Option[(TermName, Boolean)] = id.name match { + case name: TermName => Some((name, id.hasAttachment[BackquotedIdentifierAttachment.type])) + case _ => None + } + } + + object SyntacticTypeIdent extends SyntacticTypeIdentExtractor { + def apply(name: TypeName): Ident = self.Ident(name) + def unapply(tree: Tree): Option[TypeName] = tree match { + case MaybeTypeTreeOriginal(Ident(name: TypeName)) => Some(name) + case _ => None + } } /** Facade over Imports and ImportSelectors that lets to structurally @@ -987,6 +1068,79 @@ trait ReificationSupport { self: SymbolTable => Some((imp.expr, selectors)) } } + + object SyntacticSelectType extends SyntacticSelectTypeExtractor { + def apply(qual: Tree, name: TypeName): Select = Select(qual, name) + def unapply(tree: Tree): Option[(Tree, TypeName)] = tree match { + case MaybeTypeTreeOriginal(Select(qual, name: TypeName)) => Some((qual, name)) + case _ => None + } + } + + object SyntacticSelectTerm extends SyntacticSelectTermExtractor { + def apply(qual: Tree, name: TermName): Select = Select(qual, name) + def unapply(tree: Tree): Option[(Tree, TermName)] = tree match { + case Select(qual, name: TermName) => Some((qual, name)) + case _ => None + } + } + + object SyntacticCompoundType extends SyntacticCompoundTypeExtractor { + def apply(parents: List[Tree], defns: List[Tree]) = + CompoundTypeTree(Template(gen.mkParents(NoMods, parents), noSelfType, defns)) + def unapply(tree: Tree): Option[(List[Tree], List[Tree])] = tree match { + case MaybeTypeTreeOriginal(CompoundTypeTree(Template(parents, _, defns))) => + Some((parents, defns)) + case _ => + None + } + } + + object SyntacticSingletonType extends SyntacitcSingletonTypeExtractor { + def apply(ref: Tree): SingletonTypeTree = SingletonTypeTree(ref) + def unapply(tree: Tree): Option[Tree] = tree match { + case MaybeTypeTreeOriginal(SingletonTypeTree(ref)) => + Some(ref) + case _ => + None + } + } + + object SyntacticTypeProjection extends SyntacticTypeProjectionExtractor { + def apply(qual: Tree, name: TypeName): SelectFromTypeTree = + SelectFromTypeTree(qual, name) + def unapply(tree: Tree): Option[(Tree, TypeName)] = tree match { + case MaybeTypeTreeOriginal(SelectFromTypeTree(qual, name)) => + Some((qual, name)) + case _ => + None + } + } + + object SyntacticAnnotatedType extends SyntacticAnnotatedTypeExtractor { + def apply(tpt: Tree, annot: Tree): Annotated = + Annotated(annot, tpt) + def unapply(tree: Tree): Option[(Tree, Tree)] = tree match { + case MaybeTypeTreeOriginal(Annotated(annot, tpt)) => + Some((tpt, annot)) + case _ => + None + } + } + + object SyntacticExistentialType extends SyntacticExistentialTypeExtractor { + def apply(tpt: Tree, where: List[Tree]): ExistentialTypeTree = + ExistentialTypeTree(tpt, where.map { + case md: MemberDef => md + case tree => throw new IllegalArgumentException("$tree is not legal forSome definition") + }) + def unapply(tree: Tree): Option[(Tree, List[MemberDef])] = tree match { + case MaybeTypeTreeOriginal(ExistentialTypeTree(tpt, where)) => + Some((tpt, where)) + case _ => + None + } + } } val build = new ReificationSupportImpl diff --git a/src/reflect/scala/reflect/internal/Reporting.scala b/src/reflect/scala/reflect/internal/Reporting.scala new file mode 100644 index 000000000000..f2de83bc5de2 --- /dev/null +++ b/src/reflect/scala/reflect/internal/Reporting.scala @@ -0,0 +1,116 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL, Typesafe Inc. + * @author Adriaan Moors + */ + +package scala +package reflect +package internal + +/** Provides delegates to the reporter doing the actual work. + * All forwarding methods should be marked final, + * but some subclasses out of our reach stil override them. + * + * Eventually, this interface should be reduced to one method: `reporter`, + * and clients should indirect themselves (reduce duplication of forwarders). + */ +trait Reporting { self : Positions => + def reporter: Reporter + def currentRun: RunReporting + + trait RunReporting { + val reporting: PerRunReporting = PerRunReporting + } + + type PerRunReporting <: PerRunReportingBase + protected def PerRunReporting: PerRunReporting + abstract class PerRunReportingBase { + def deprecationWarning(pos: Position, msg: String): Unit + + /** Have we already supplemented the error message of a compiler crash? */ + private[this] var supplementedError = false + def supplementErrorMessage(errorMessage: String): String = + if (supplementedError) errorMessage + else { + supplementedError = true + supplementTyperState(errorMessage) + } + + } + + // overridden in Global + def supplementTyperState(errorMessage: String): String = errorMessage + + def supplementErrorMessage(errorMessage: String) = currentRun.reporting.supplementErrorMessage(errorMessage) + + @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2") + def inform(msg: String): Unit = inform(NoPosition, msg) + @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2") + def warning(msg: String): Unit = warning(NoPosition, msg) + // globalError(msg: String) used to abort -- not sure that was a good idea, so I made it more regular + // (couldn't find any uses that relied on old behavior) + @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2") + def globalError(msg: String): Unit = globalError(NoPosition, msg) + + def abort(msg: String): Nothing = { + val augmented = supplementErrorMessage(msg) + // Needs to call error to make sure the compile fails. + globalError(augmented) + throw new FatalError(augmented) + } + + @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2") + def inform(pos: Position, msg: String) = reporter.echo(pos, msg) + @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2") + def warning(pos: Position, msg: String) = reporter.warning(pos, msg) + @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2") + def globalError(pos: Position, msg: String) = reporter.error(pos, msg) +} + +import util.Position + +/** Report information, warnings and errors. + * + * This describes the (future) external interface for issuing information, warnings and errors. + * Currently, scala.tools.nsc.Reporter is used by sbt/ide/partest. + */ +abstract class Reporter { + protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit + + def echo(pos: Position, msg: String): Unit = info0(pos, msg, INFO, force = true) + def warning(pos: Position, msg: String): Unit = info0(pos, msg, WARNING, force = false) + def error(pos: Position, msg: String): Unit = info0(pos, msg, ERROR, force = false) + + type Severity + val INFO: Severity + val WARNING: Severity + val ERROR: Severity + + def count(severity: Severity): Int + def resetCount(severity: Severity): Unit + + def errorCount: Int = count(ERROR) + def warningCount: Int = count(WARNING) + + def hasErrors: Boolean = count(ERROR) > 0 + def hasWarnings: Boolean = count(WARNING) > 0 + + def reset(): Unit = { + resetCount(INFO) + resetCount(WARNING) + resetCount(ERROR) + } + + def flush(): Unit = { } +} + +// TODO: move into superclass once partest cuts tie on Severity +abstract class ReporterImpl extends Reporter { + class Severity(val id: Int)(name: String) { var count: Int = 0 ; override def toString = name} + object INFO extends Severity(0)("INFO") + object WARNING extends Severity(1)("WARNING") + object ERROR extends Severity(2)("ERROR") + + def count(severity: Severity): Int = severity.count + def resetCount(severity: Severity): Unit = severity.count = 0 +} diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index cf3f356daabd..103f885ad4a9 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -48,22 +48,17 @@ trait Scopes extends api.Scopes { self: SymbolTable => * This is necessary because when run from reflection every scope needs to have a * SynchronizedScope as mixin. */ - class Scope protected[Scopes] (initElems: ScopeEntry = null, initFingerPrints: Long = 0L) extends ScopeApi with MemberScopeApi { + class Scope protected[Scopes]() extends ScopeApi with MemberScopeApi { - protected[Scopes] def this(base: Scope) = { - this(base.elems) - nestinglevel = base.nestinglevel + 1 - } - - private[scala] var elems: ScopeEntry = initElems + private[scala] var elems: ScopeEntry = _ /** The number of times this scope is nested in another */ - private var nestinglevel = 0 + private[Scopes] var nestinglevel = 0 /** the hash table */ - private var hashtable: Array[ScopeEntry] = null + private[Scopes] var hashtable: Array[ScopeEntry] = null /** a cache for all elements, to be used by symbol iterator. */ @@ -84,8 +79,6 @@ trait Scopes extends api.Scopes { self: SymbolTable => */ private val MIN_HASH = 8 - if (size >= MIN_HASH) createHash() - /** Returns a new scope with the same content as this one. */ def cloneScope: Scope = newScopeWith(this.toList: _*) @@ -435,7 +428,14 @@ trait Scopes extends api.Scopes { self: SymbolTable => } /** Create a new scope nested in another one with which it shares its elements */ - def newNestedScope(outer: Scope): Scope = new Scope(outer) + final def newNestedScope(outer: Scope): Scope = { + val nested = newScope // not `new Scope`, we must allow the runtime reflection universe to mixin SynchronizedScopes! + nested.elems = outer.elems + nested.nestinglevel = outer.nestinglevel + 1 + if (outer.hashtable ne null) + nested.hashtable = java.util.Arrays.copyOf(outer.hashtable, outer.hashtable.length) + nested + } /** Create a new scope with given initial elements */ def newScopeWith(elems: Symbol*): Scope = { diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index 614e71b597fe..cca33253be11 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -27,7 +27,7 @@ trait StdAttachments { def importAttachment(importer: Importer): this.type } - /** Attachment that doesn't contain any reflection artificats and can be imported as-is. */ + /** Attachment that doesn't contain any reflection artifacts and can be imported as-is. */ trait PlainAttachment extends ImportableAttachment { def importAttachment(importer: Importer): this.type = this } @@ -42,7 +42,7 @@ trait StdAttachments { */ case object BackquotedIdentifierAttachment extends PlainAttachment - /** Identifies trees are either result or intermidiate value of for loop desugaring. + /** Identifies trees are either result or intermediate value of for loop desugaring. */ case object ForAttachment extends PlainAttachment diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 339923a061d1..ea07fb2a7432 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -99,17 +99,19 @@ trait StdNames { val SINGLETON_SUFFIX: String = ".type" - val ANON_CLASS_NAME: NameType = "$anon" - val ANON_FUN_NAME: NameType = "$anonfun" - val EMPTY: NameType = "" - val EMPTY_PACKAGE_NAME: NameType = "" - val IMPL_CLASS_SUFFIX = "$class" - val IMPORT: NameType = "" - val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING - val MODULE_VAR_SUFFIX: NameType = "$module" - val PACKAGE: NameType = "package" - val ROOT: NameType = "" - val SPECIALIZED_SUFFIX: NameType = "$sp" + val ANON_CLASS_NAME: NameType = "$anon" + val DELAMBDAFY_LAMBDA_CLASS_NAME: NameType = "$lambda" + val ANON_FUN_NAME: NameType = "$anonfun" + val EMPTY: NameType = "" + val EMPTY_PACKAGE_NAME: NameType = "" + val IMPL_CLASS_SUFFIX = "$class" + val IMPORT: NameType = "" + val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING + val MODULE_VAR_SUFFIX: NameType = "$module" + val PACKAGE: NameType = "package" + val ROOT: NameType = "" + val SPECIALIZED_SUFFIX: NameType = "$sp" + val CASE_ACCESSOR: NameType = "$access" // value types (and AnyRef) are all used as terms as well // as (at least) arguments to the @specialize annotation. @@ -127,6 +129,7 @@ trait StdNames { final val AnyRef: NameType = "AnyRef" final val Array: NameType = "Array" final val List: NameType = "List" + final val Option: NameType = "Option" final val Seq: NameType = "Seq" final val Symbol: NameType = "Symbol" final val WeakTypeTag: NameType = "WeakTypeTag" @@ -246,6 +249,7 @@ trait StdNames { final val Unliftable: NameType = "Unliftable" final val Name: NameType = "Name" final val Tree: NameType = "Tree" + final val Text: NameType = "Text" final val TermName: NameType = "TermName" final val Type : NameType = "Type" final val TypeName: NameType = "TypeName" @@ -473,7 +477,7 @@ trait StdNames { ) def localDummyName(clazz: Symbol): TermName = newTermName(LOCALDUMMY_PREFIX + clazz.name + ">") - def superName(name: Name): TermName = newTermName(SUPER_PREFIX_STRING + name) + def superName(name: Name, mix: Name = EMPTY): TermName = newTermName(SUPER_PREFIX_STRING + name + (if (mix.isEmpty) "" else "$" + mix)) /** The name of an accessor for protected symbols. */ def protName(name: Name): TermName = newTermName(PROTECTED_PREFIX + name) @@ -612,33 +616,6 @@ trait StdNames { val SelectFromTypeTree: NameType = "SelectFromTypeTree" val SingleType: NameType = "SingleType" val SuperType: NameType = "SuperType" - val SyntacticApplied: NameType = "SyntacticApplied" - val SyntacticAssign: NameType = "SyntacticAssign" - val SyntacticBlock: NameType = "SyntacticBlock" - val SyntacticClassDef: NameType = "SyntacticClassDef" - val SyntacticDefDef: NameType = "SyntacticDefDef" - val SyntacticEmptyTypeTree: NameType = "SyntacticEmptyTypeTree" - val SyntacticFilter: NameType = "SyntacticFilter" - val SyntacticFor: NameType = "SyntacticFor" - val SyntacticForYield: NameType = "SyntacticForYield" - val SyntacticFunction: NameType = "SyntacticFunction" - val SyntacticFunctionType: NameType = "SyntacticFunctionType" - val SyntacticIdent: NameType = "SyntacticIdent" - val SyntacticImport: NameType = "SyntacticImport" - val SyntacticMatch: NameType = "SyntacticMatch" - val SyntacticNew: NameType = "SyntacticNew" - val SyntacticObjectDef: NameType = "SyntacticObjectDef" - val SyntacticPackageObjectDef: NameType = "SyntacticPackageObjectDef" - val SyntacticPatDef: NameType = "SyntacticPatDef" - val SyntacticTraitDef: NameType = "SyntacticTraitDef" - val SyntacticTry: NameType = "SyntacticTry" - val SyntacticTuple: NameType = "SyntacticTuple" - val SyntacticTupleType: NameType = "SyntacticTupleType" - val SyntacticTypeApplied: NameType = "SyntacticTypeApplied" - val SyntacticValDef: NameType = "SyntacticValDef" - val SyntacticValEq: NameType = "SyntacticValEq" - val SyntacticValFrom: NameType = "SyntacticValFrom" - val SyntacticVarDef: NameType = "SyntacticVarDef" val This: NameType = "This" val ThisType: NameType = "ThisType" val Tuple2: NameType = "Tuple2" @@ -803,14 +780,54 @@ trait StdNames { val values : NameType = "values" val wait_ : NameType = "wait" val withFilter: NameType = "withFilter" + val xml: NameType = "xml" val zero: NameType = "zero" // quasiquote interpolators: - val q: NameType = "q" - val tq: NameType = "tq" - val cq: NameType = "cq" - val pq: NameType = "pq" - val fq: NameType = "fq" + val q: NameType = "q" + val tq: NameType = "tq" + val cq: NameType = "cq" + val pq: NameType = "pq" + val fq: NameType = "fq" + + // quasiquote's syntactic combinators + val SyntacticAnnotatedType: NameType = "SyntacticAnnotatedType" + val SyntacticApplied: NameType = "SyntacticApplied" + val SyntacticAppliedType: NameType = "SyntacticAppliedType" + val SyntacticAssign: NameType = "SyntacticAssign" + val SyntacticBlock: NameType = "SyntacticBlock" + val SyntacticClassDef: NameType = "SyntacticClassDef" + val SyntacticCompoundType: NameType = "SyntacticCompoundType" + val SyntacticDefDef: NameType = "SyntacticDefDef" + val SyntacticEmptyTypeTree: NameType = "SyntacticEmptyTypeTree" + val SyntacticExistentialType: NameType = "SyntacticExistentialType" + val SyntacticFilter: NameType = "SyntacticFilter" + val SyntacticFor: NameType = "SyntacticFor" + val SyntacticForYield: NameType = "SyntacticForYield" + val SyntacticFunction: NameType = "SyntacticFunction" + val SyntacticFunctionType: NameType = "SyntacticFunctionType" + val SyntacticImport: NameType = "SyntacticImport" + val SyntacticMatch: NameType = "SyntacticMatch" + val SyntacticNew: NameType = "SyntacticNew" + val SyntacticObjectDef: NameType = "SyntacticObjectDef" + val SyntacticPackageObjectDef: NameType = "SyntacticPackageObjectDef" + val SyntacticPartialFunction: NameType = "SyntacticPartialFunction" + val SyntacticPatDef: NameType = "SyntacticPatDef" + val SyntacticSelectTerm: NameType = "SyntacticSelectTerm" + val SyntacticSelectType: NameType = "SyntacticSelectType" + val SyntacticSingletonType: NameType = "SyntacticSingletonType" + val SyntacticTermIdent: NameType = "SyntacticTermIdent" + val SyntacticTraitDef: NameType = "SyntacticTraitDef" + val SyntacticTry: NameType = "SyntacticTry" + val SyntacticTuple: NameType = "SyntacticTuple" + val SyntacticTupleType: NameType = "SyntacticTupleType" + val SyntacticTypeApplied: NameType = "SyntacticTypeApplied" + val SyntacticTypeIdent: NameType = "SyntacticTypeIdent" + val SyntacticTypeProjection: NameType = "SyntacticTypeProjection" + val SyntacticValDef: NameType = "SyntacticValDef" + val SyntacticValEq: NameType = "SyntacticValEq" + val SyntacticValFrom: NameType = "SyntacticValFrom" + val SyntacticVarDef: NameType = "SyntacticVarDef" // unencoded operators object raw { @@ -1131,6 +1148,7 @@ trait StdNames { final val GetClassLoader: TermName = newTermName("getClassLoader") final val GetMethod: TermName = newTermName("getMethod") final val Invoke: TermName = newTermName("invoke") + final val InvokeExact: TermName = newTermName("invokeExact") val Boxed = immutable.Map[TypeName, TypeName]( tpnme.Boolean -> BoxedBoolean, diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala index c088e8f57c10..4763e77a348a 100644 --- a/src/reflect/scala/reflect/internal/SymbolPairs.scala +++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala @@ -8,7 +8,6 @@ package reflect package internal import scala.collection.mutable -import Flags._ import util.HashSet import scala.annotation.tailrec diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index e50c65c9ca88..bea697943186 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -46,15 +46,12 @@ abstract class SymbolTable extends macros.Universe with pickling.Translations with FreshNames with Internals + with Reporting { val gen = new InternalTreeGen { val global: SymbolTable.this.type = SymbolTable.this } def log(msg: => AnyRef): Unit - def warning(msg: String): Unit = Console.err.println(msg) - def inform(msg: String): Unit = Console.err.println(msg) - def globalError(msg: String): Unit = abort(msg) - def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg)) protected def elapsedMessage(msg: String, start: Long) = msg + " in " + (TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) - start) + "ms" @@ -81,9 +78,6 @@ abstract class SymbolTable extends macros.Universe /** Prints a stack trace if -Ydebug or equivalent was given, otherwise does nothing. */ def debugStack(t: Throwable): Unit = devWarning(throwableAsString(t)) - /** Overridden when we know more about what was happening during a failure. */ - def supplementErrorMessage(msg: String): String = msg - private[scala] def printCaller[T](msg: String)(result: T) = { Console.err.println("%s: %s\nCalled from: %s".format(msg, result, (new Throwable).getStackTrace.drop(2).take(50).mkString("\n"))) @@ -338,13 +332,12 @@ abstract class SymbolTable extends macros.Universe /** if there's a `package` member object in `pkgClass`, enter its members into it. */ def openPackageModule(pkgClass: Symbol) { - val pkgModule = pkgClass.info.decl(nme.PACKAGEkw) + val pkgModule = pkgClass.packageObject def fromSource = pkgModule.rawInfo match { case ltp: SymLoader => ltp.fromSource case _ => false } if (pkgModule.isModule && !fromSource) { - // println("open "+pkgModule)//DEBUG openPackageModule(pkgModule, pkgClass) } } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 03d8f9783150..f2aa14b8660a 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -55,25 +55,22 @@ trait Symbols extends api.Symbols { self: SymbolTable => def newFreeTypeSymbol(name: TypeName, flags: Long = 0L, origin: String): FreeTypeSymbol = new FreeTypeSymbol(name, origin) initFlags flags - /** The original owner of a class. Used by the backend to generate - * EnclosingMethod attributes. + /** + * This map stores the original owner the the first time the owner of a symbol is re-assigned. + * The original owner of a symbol is needed in some places in the backend. Ideally, owners should + * be versioned like the type history. */ - val originalOwner = perRunCaches.newMap[Symbol, Symbol]() + private val originalOwnerMap = perRunCaches.newMap[Symbol, Symbol]() // TODO - don't allow the owner to be changed without checking invariants, at least // when under some flag. Define per-phase invariants for owner/owned relationships, // e.g. after flatten all classes are owned by package classes, there are lots and // lots of these to be declared (or more realistically, discovered.) - protected def saveOriginalOwner(sym: Symbol) { - if (originalOwner contains sym) () - else originalOwner(sym) = sym.rawowner - } - protected def originalEnclosingMethod(sym: Symbol): Symbol = { - if (sym.isMethod || sym == NoSymbol) sym - else { - val owner = originalOwner.getOrElse(sym, sym.rawowner) - if (sym.isLocalDummy) owner.enclClass.primaryConstructor - else originalEnclosingMethod(owner) + protected def saveOriginalOwner(sym: Symbol): Unit = { + // some synthetic symbols have NoSymbol as owner initially + if (sym.owner != NoSymbol) { + if (originalOwnerMap contains sym) () + else originalOwnerMap(sym) = sym.rawowner } } @@ -176,7 +173,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => with HasFlags with Annotatable[Symbol] with Attachable { - // makes sure that all symbols that runtime reflection deals with are synchronized private def isSynchronized = this.isInstanceOf[scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol] private def isAprioriThreadsafe = isThreadsafe(AllOps) @@ -185,7 +181,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => type AccessBoundaryType = Symbol type AnnotationType = AnnotationInfo - // TODO - don't allow names to be renamed in this unstructured a fashion. + // TODO - don't allow names to be renamed in this unstructured fashion. // Rename as little as possible. Enforce invariants on all renames. type TypeOfClonedSymbol >: Null <: Symbol { type NameType = Symbol.this.NameType } @@ -686,7 +682,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * to fix the core of the compiler risk stability a few weeks before the final release. * upd. Haha, "a few weeks before the final release". This surely sounds familiar :) * - * However we do need to fix this for runtime reflection, since this idionsynchrazy is not something + * However we do need to fix this for runtime reflection, since this idiosyncrasy is not something * we'd like to expose to reflection users. Therefore a proposed solution is to check whether we're in a * runtime reflection universe, and if yes and if we've not yet loaded the requested info, then to commence initialization. */ @@ -738,27 +734,41 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def hasGetter = isTerm && nme.isLocalName(name) - /** A little explanation for this confusing situation. - * Nested modules which have no static owner when ModuleDefs - * are eliminated (refchecks) are given the lateMETHOD flag, - * which makes them appear as methods after refchecks. - * Here's an example where one can see all four of FF FT TF TT - * for (isStatic, isMethod) at various phases. + /** + * Nested modules which have no static owner when ModuleDefs are eliminated (refchecks) are + * given the lateMETHOD flag, which makes them appear as methods after refchecks. * - * trait A1 { case class Quux() } - * object A2 extends A1 { object Flax } - * // -- namer object Quux in trait A1 - * // -M flatten object Quux in trait A1 - * // S- flatten object Flax in object A2 - * // -M posterasure object Quux in trait A1 - * // -M jvm object Quux in trait A1 - * // SM jvm object Quux in object A2 + * Note: the lateMETHOD flag is added lazily in the info transformer of the RefChecks phase. + * This means that forcing the `sym.info` may change the value of `sym.isMethod`. Forcing the + * info is in the responsibility of the caller. Doing it eagerly here was tried (0ccdb151f) but + * has proven to lead to bugs (SI-8907). * - * So "isModuleNotMethod" exists not for its achievement in - * brevity, but to encapsulate the relevant condition. + * Here's an example where one can see all four of FF FT TF TT for (isStatic, isMethod) at + * various phases. + * + * trait A1 { case class Quux() } + * object A2 extends A1 { object Flax } + * // -- namer object Quux in trait A1 + * // -M flatten object Quux in trait A1 + * // S- flatten object Flax in object A2 + * // -M posterasure object Quux in trait A1 + * // -M jvm object Quux in trait A1 + * // SM jvm object Quux in object A2 + * + * So "isModuleNotMethod" exists not for its achievement in brevity, but to encapsulate the + * relevant condition. */ def isModuleNotMethod = isModule && !isMethod - def isStaticModule = isModuleNotMethod && isStatic + + // After RefChecks, the `isStatic` check is mostly redundant: all non-static modules should + // be methods (and vice versa). There's a corner case on the vice-versa with mixed-in module + // symbols: + // trait T { object A } + // object O extends T + // The module symbol A is cloned into T$impl (addInterfaces), and then cloned into O (mixin). + // Since the original A is not static, it's turned into a method. The clone in O however is + // static (owned by a module), but it's also a method. + def isStaticModule = isModuleNotMethod && isStatic final def isInitializedToDefault = !isType && hasAllFlags(DEFAULTINIT | ACCESSOR) final def isThisSym = isTerm && owner.thisSym == this @@ -775,12 +785,17 @@ trait Symbols extends api.Symbols { self: SymbolTable => info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass final def isMethodWithExtension = - isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isMacro + isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isMacro && !isSpecialized final def isAnonymousFunction = isSynthetic && (name containsName tpnme.ANON_FUN_NAME) + final def isDelambdafyFunction = isSynthetic && (name containsName tpnme.DELAMBDAFY_LAMBDA_CLASS_NAME) final def isDefinedInPackage = effectiveOwner.isPackageClass final def needsFlatClasses = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass + // TODO introduce a flag for these? + final def isPatternTypeVariable: Boolean = + isAbstractType && !isExistential && !isTypeParameterOrSkolem && isLocalToBlock + /** change name by appending $$ * Do the same for any accessed symbols or setters/getters. * Implementation in TermSymbol. @@ -798,6 +813,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def skipPackageObject: Symbol = this + /** The package object symbol corresponding to this package or package class symbol, or NoSymbol otherwise */ + def packageObject: Symbol = + if (isPackageClass) tpe.packageObject + else if (isPackage) moduleClass.packageObject + else NoSymbol + /** If this is a constructor, its owner: otherwise this. */ final def skipConstructor: Symbol = if (isConstructor) owner else this @@ -909,10 +930,31 @@ trait Symbols extends api.Symbols { self: SymbolTable => ) final def isModuleVar = hasFlag(MODULEVAR) - /** Is this symbol static (i.e. with no outer instance)? - * Q: When exactly is a sym marked as STATIC? - * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep. - * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6 + /** + * Is this symbol static (i.e. with no outer instance)? + * Q: When exactly is a sym marked as STATIC? + * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or + * any number of levels deep. + * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6 + * + * TODO: should this only be invoked on class / module symbols? because there's also `isStaticMember`. + * + * Note: the result of `isStatic` changes over time. + * - Lambdalift local definitions to the class level, the `owner` field is modified. + * object T { def foo { object O } } + * After lambdalift, the OModule.isStatic is true. + * + * - After flatten, nested classes are moved to the package level. Invoking `owner` on a + * class returns a package class, for which `isStaticOwner` is true. For example, + * class C { object O } + * OModuleClass.isStatic is true after flatten. Using phase travel to get before flatten, + * method `owner` returns the class C. + * + * Why not make a stable version of `isStatic`? Maybe some parts of the compiler depend on the + * current implementation. For example + * trait T { def foo = 1 } + * The method `foo` in the implementation class T$impl will be `isStatic`, because trait + * impl classes get the `lateMODULE` flag (T$impl.isStaticOwner is true). */ def isStatic = (this hasFlag STATIC) || owner.isStaticOwner @@ -1099,13 +1141,28 @@ trait Symbols extends api.Symbols { self: SymbolTable => // ------ owner attribute -------------------------------------------------------------- - /** In general when seeking the owner of a symbol, one should call `owner`. - * The other possibilities include: - * - call `safeOwner` if it is expected that the target may be NoSymbol - * - call `assertOwner` if it is an unrecoverable error if the target is NoSymbol + /** + * The owner of a symbol. Changes over time to adapt to the structure of the trees: + * - Up to lambdalift, the owner is the lexically enclosing definition. For definitions + * in a local block, the owner is also the next enclosing definition. + * - After lambdalift, all local method and class definitions (those not owned by a class + * or package class) change their owner to the enclosing class. This is done through + * a destructive "sym.owner = sym.owner.enclClass". The old owner is saved by + * saveOriginalOwner. + * - After flatten, all classes are owned by a PackageClass. This is done through a + * phase check (if after flatten) in the (overridden) method "def owner" in + * ModuleSymbol / ClassSymbol. The `rawowner` field is not modified. + * - Owners are also changed in other situations, for example when moving trees into a new + * lexical context, e.g. in the named/default arguments tranformation, or when translating + * extension method definitions. + * + * In general when seeking the owner of a symbol, one should call `owner`. + * The other possibilities include: + * - call `safeOwner` if it is expected that the target may be NoSymbol + * - call `assertOwner` if it is an unrecoverable error if the target is NoSymbol * - * `owner` behaves like `safeOwner`, but logs NoSymbol.owner calls under -Xdev. - * `assertOwner` aborts compilation immediately if called on NoSymbol. + * `owner` behaves like `safeOwner`, but logs NoSymbol.owner calls under -Xdev. + * `assertOwner` aborts compilation immediately if called on NoSymbol. */ def owner: Symbol = { if (Statistics.hotEnabled) Statistics.incCounter(ownerCount) @@ -1114,6 +1171,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner final def assertOwner: Symbol = if (this eq NoSymbol) abort("no-symbol does not have an owner") else owner + /** + * The initial owner of this symbol. + */ + def originalOwner: Symbol = originalOwnerMap.getOrElse(this, rawowner) + // TODO - don't allow the owner to be changed without checking invariants, at least // when under some flag. Define per-phase invariants for owner/owned relationships, // e.g. after flatten all classes are owned by package classes, there are lots and @@ -1127,7 +1189,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => } def ownerChain: List[Symbol] = this :: owner.ownerChain - def originalOwnerChain: List[Symbol] = this :: originalOwner.getOrElse(this, rawowner).originalOwnerChain // Non-classes skip self and return rest of owner chain; overridden in ClassSymbol. def enclClassChain: List[Symbol] = owner.enclClassChain @@ -1406,11 +1467,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => def info: Type = try { var cnt = 0 while (validTo == NoPeriod) { - //if (settings.debug.value) System.out.println("completing " + this);//DEBUG assert(infos ne null, this.name) assert(infos.prev eq null, this.name) val tp = infos.info - //if (settings.debug.value) System.out.println("completing " + this.rawname + tp.getClass());//debug if ((_rawflags & LOCKED) != 0L) { // rolled out once for performance lock { @@ -1419,6 +1478,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } } else { _rawflags |= LOCKED + // TODO another commented out lines - this should be solved in one way or another // activeLocks += 1 // lockedSyms += this } @@ -1540,13 +1600,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => assert(isCompilerUniverse) if (infos == null || runId(infos.validFrom) == currentRunId) { infos - } else if (isPackageClass) { - // SI-7801 early phase package scopes are mutated in new runs (Namers#enterPackage), so we have to - // discard transformed infos, rather than just marking them as from this run. - val oldest = infos.oldest - oldest.validFrom = validTo - this.infos = oldest - oldest + } else if (infos ne infos.oldest) { + // SI-8871 Discard all but the first element of type history. Specialization only works in the resident + // compiler / REPL if re-run its info transformer in this run to correctly populate its + // per-run caches, e.g. typeEnv + adaptInfos(infos.oldest) } else { val prev1 = adaptInfos(infos.prev) if (prev1 ne infos.prev) prev1 @@ -1904,6 +1962,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** The next enclosing method. */ def enclMethod: Symbol = if (isSourceMethod) this else owner.enclMethod + /** The primary constructor of a class. */ def primaryConstructor: Symbol = NoSymbol /** The self symbol (a TermSymbol) of a class with explicit self type, or else the @@ -1970,12 +2029,19 @@ trait Symbols extends api.Symbols { self: SymbolTable => info.decls.filter(sym => !sym.isMethod && sym.isParamAccessor).toList /** The symbol accessed by this accessor (getter or setter) function. */ - final def accessed: Symbol = accessed(owner.info) - - /** The symbol accessed by this accessor function, but with given owner type. */ - final def accessed(ownerTp: Type): Symbol = { + final def accessed: Symbol = { assert(hasAccessorFlag, this) - ownerTp decl localName + val localField = owner.info decl localName + + if (localField == NoSymbol && this.hasFlag(MIXEDIN)) { + // SI-8087: private[this] fields don't have a `localName`. When searching the accessed field + // for a mixin accessor of such a field, we need to look for `name` instead. + // The phase travel ensures that the field is found (`owner` is the trait class symbol, the + // field gets removed from there in later phases). + enteringPhase(picklerPhase)(owner.info).decl(name).suchThat(!_.isAccessor) + } else { + localField + } } /** The module corresponding to this module class (note that this @@ -2061,16 +2127,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => * is not one. */ def enclosingPackage: Symbol = enclosingPackageClass.companionModule - /** Return the original enclosing method of this symbol. It should return - * the same thing as enclMethod when called before lambda lift, - * but it preserves the original nesting when called afterwards. - * - * @note This method is NOT available in the presentation compiler run. The - * originalOwner map is not populated for memory considerations (the symbol - * may hang on to lazy types and in turn to whole (outdated) compilation units. - */ - def originalEnclosingMethod: Symbol = Symbols.this.originalEnclosingMethod(this) - /** The method or class which logically encloses the current symbol. * If the symbol is defined in the initialization part of a template * this is the template's primary constructor, otherwise it is @@ -2107,6 +2163,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (isClass) this else moduleClass } else owner.enclosingTopLevelClass + /** The top-level class or local dummy symbol containing this symbol. */ + def enclosingTopLevelClassOrDummy: Symbol = + if (isTopLevel) { + if (isClass) this else moduleClass.orElse(this) + } else owner.enclosingTopLevelClassOrDummy + /** Is this symbol defined in the same scope and compilation unit as `that` symbol? */ def isCoDefinedWith(that: Symbol) = ( !rawInfoIsNoType @@ -2342,7 +2404,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => def localName: TermName = name.localName /** The setter of this value or getter definition, or NoSymbol if none exists */ + @deprecated("Use `setterIn` instead", "2.11.0") final def setter(base: Symbol, hasExpandedName: Boolean = needsExpandedSetterName): Symbol = + setterIn(base, hasExpandedName) + + final def setterIn(base: Symbol, hasExpandedName: Boolean = needsExpandedSetterName): Symbol = base.info decl setterNameInBase(base, hasExpandedName) filter (_.hasAccessorFlag) def needsExpandedSetterName = ( @@ -2752,7 +2818,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def outerSource: Symbol = // SI-6888 Approximate the name to workaround the deficiencies in `nme.originalName` - // in the face of clases named '$'. SI-2806 remains open to address the deeper problem. + // in the face of classes named '$'. SI-2806 remains open to address the deeper problem. if (originalName endsWith (nme.OUTER)) initialize.referenced else NoSymbol @@ -2806,6 +2872,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def owner = { if (Statistics.hotEnabled) Statistics.incCounter(ownerCount) + // a module symbol may have the lateMETHOD flag after refchecks, see isModuleNotMethod if (!isMethod && needsFlatClasses) rawowner.owner else rawowner } @@ -3188,8 +3255,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def primaryConstructor = { val c = info decl primaryConstructorName - if (isJavaDefined) NoSymbol // need to force info before checking the flag - else if (c.isOverloaded) c.alternatives.head else c + if (c.isOverloaded) c.alternatives.head else c } override def associatedFile = ( @@ -3310,13 +3376,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => def implicitMembers: Scope = { val tp = info if ((implicitMembersCacheKey1 ne tp) || (implicitMembersCacheKey2 ne tp.decls.elems)) { - // Skip a package object class, because the members are also in - // the package and we wish to avoid spurious ambiguities as in pos/t3999. - if (!isPackageObjectClass) { - implicitMembersCacheValue = tp.implicitMembers - implicitMembersCacheKey1 = tp - implicitMembersCacheKey2 = tp.decls.elems - } + implicitMembersCacheValue = tp.membersBasedOnFlags(BridgeFlags, IMPLICIT) + implicitMembersCacheKey1 = tp + implicitMembersCacheKey2 = tp.decls.elems } implicitMembersCacheValue } @@ -3371,10 +3433,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => trait StubSymbol extends Symbol { devWarning("creating stub symbol to defer error: " + missingMessage) - protected def missingMessage: String + def missingMessage: String /** Fail the stub by throwing a [[scala.reflect.internal.MissingRequirementError]]. */ - override final def failIfStub() = {MissingRequirementError.signal(missingMessage)} // + override final def failIfStub() = + MissingRequirementError.signal(missingMessage) /** Fail the stub by reporting an error to the reporter, setting the IS_ERROR flag * on this symbol, and returning the dummy value `alt`. @@ -3399,8 +3462,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def rawInfo = fail(NoType) override def companionSymbol = fail(NoSymbol) } - class StubClassSymbol(owner0: Symbol, name0: TypeName, protected val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol - class StubTermSymbol(owner0: Symbol, name0: TermName, protected val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol + class StubClassSymbol(owner0: Symbol, name0: TypeName, val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol + class StubTermSymbol(owner0: Symbol, name0: TermName, val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol trait FreeSymbol extends Symbol { def origin: String @@ -3451,6 +3514,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def enclClassChain = Nil override def enclClass: Symbol = this override def enclosingTopLevelClass: Symbol = this + override def enclosingTopLevelClassOrDummy: Symbol = this override def enclosingPackageClass: Symbol = this override def enclMethod: Symbol = this override def associatedFile = NoAbstractFile @@ -3467,7 +3531,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def rawInfo: Type = NoType override def accessBoundary(base: Symbol): Symbol = enclosingRootClass def cloneSymbolImpl(owner: Symbol, newFlags: Long) = abort("NoSymbol.clone()") - override def originalEnclosingMethod = this } protected def makeNoSymbol: NoSymbol = new NoSymbol @@ -3509,7 +3572,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * @param syms the prototypical symbols * @param symFn the function to create new symbols * @param tpe the prototypical type - * @return the new symbol-subsituted type + * @return the new symbol-substituted type */ def deriveType(syms: List[Symbol], symFn: Symbol => Symbol)(tpe: Type): Type = { val syms1 = deriveSymbols(syms, symFn) @@ -3524,7 +3587,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * @param as arguments to be passed to symFn together with symbols from syms (must be same length) * @param symFn the function to create new symbols based on `as` * @param tpe the prototypical type - * @return the new symbol-subsituted type + * @return the new symbol-substituted type */ def deriveType2[A](syms: List[Symbol], as: List[A], symFn: (Symbol, A) => Symbol)(tpe: Type): Type = { val syms1 = deriveSymbols2(syms, as, symFn) diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 9066c73393d0..4cedfe266568 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -191,8 +191,8 @@ abstract class TreeGen { ) val pkgQualifier = if (needsPackageQualifier) { - val packageObject = rootMirror.getPackageObjectWithMember(qual.tpe, sym) - Select(qual, nme.PACKAGE) setSymbol packageObject setType singleType(qual.tpe, packageObject) + val packageObject = qualsym.packageObject + Select(qual, nme.PACKAGE) setSymbol packageObject setType packageObject.typeOfThis } else qual @@ -451,10 +451,10 @@ abstract class TreeGen { def mkSyntheticUnit() = Literal(Constant(())).updateAttachment(SyntheticUnitAttachment) /** Create block of statements `stats` */ - def mkBlock(stats: List[Tree]): Tree = + def mkBlock(stats: List[Tree], doFlatten: Boolean = true): Tree = if (stats.isEmpty) mkSyntheticUnit() else if (!stats.last.isTerm) Block(stats, mkSyntheticUnit()) - else if (stats.length == 1) stats.head + else if (stats.length == 1 && doFlatten) stats.head else Block(stats.init, stats.last) /** Create a block that wraps multiple statements but don't @@ -713,7 +713,7 @@ abstract class TreeGen { val rhsUnchecked = mkUnchecked(rhs) - // TODO: clean this up -- there is too much information packked into mkPatDef's `pat` argument + // TODO: clean this up -- there is too much information packed into mkPatDef's `pat` argument // when it's a simple identifier (case Some((name, tpt)) -- above), // pat should have the type ascription that was specified by the user // however, in `case None` (here), we must be careful not to generate illegal pattern trees (such as `(a, b): Tuple2[Int, String]`) diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 7cf749c04831..4657fa0000ee 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -51,6 +51,11 @@ abstract class TreeInfo { case _ => false } + def isConstructorWithDefault(t: Tree) = t match { + case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => mexists(vparamss)(_.mods.hasDefault) + case _ => false + } + /** Is tree a pure (i.e. non-side-effecting) definition? */ def isPureDef(tree: Tree): Boolean = tree match { @@ -504,13 +509,6 @@ abstract class TreeInfo { case _ => false } - /** The parameter ValDefs of a method definition that have vararg types of the form T* - */ - def repeatedParams(tree: Tree): List[ValDef] = tree match { - case DefDef(_, _, _, vparamss, _, _) => vparamss.flatten filter (vd => isRepeatedParamType(vd.tpt)) - case _ => Nil - } - /** Is tpt a by-name parameter type of the form => T? */ def isByNameParamType(tpt: Tree) = tpt match { case TypeTree() => definitions.isByNameParamType(tpt.tpe) @@ -590,7 +588,7 @@ abstract class TreeInfo { private def hasNoSymbol(t: Tree) = t.symbol == null || t.symbol == NoSymbol - /** Is this pattern node a synthetic catch-all case, added during PartialFuction synthesis before we know + /** Is this pattern node a synthetic catch-all case, added during PartialFunction synthesis before we know * whether the user provided cases are exhaustive. */ def isSyntheticDefaultCase(cdef: CaseDef) = cdef match { case CaseDef(Bind(nme.DEFAULT_CASE, _), EmptyTree, _) => true @@ -817,7 +815,7 @@ abstract class TreeInfo { object Unapplied { // Duplicated with `spliceApply` def unapply(tree: Tree): Option[Tree] = tree match { - // SI-7868 Admit Select() to account for numeric widening, e.g. .toInt + // SI-7868 Admit Select() to account for numeric widening, e.g. .toInt case Apply(fun, (Ident(nme.SELECTOR_DUMMY)| Select(Ident(nme.SELECTOR_DUMMY), _)) :: Nil) => Some(fun) case Apply(fun, _) => unapply(fun) diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 9dc4baee32f9..35de3adff6ff 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -8,8 +8,8 @@ package reflect package internal import Flags._ -import pickling.PickleFormat._ import scala.collection.{ mutable, immutable } +import scala.reflect.macros.Attachments import util.Statistics trait Trees extends api.Trees { @@ -1075,6 +1075,13 @@ trait Trees extends api.Trees { override def setType(t: Type) = { requireLegal(t, NoType, "tpe"); this } override def tpe_=(t: Type) = setType(t) + // We silently ignore attempts to add attachments to `EmptyTree`. See SI-8947 for an + // example of a bug in macro expansion that this solves. + override def setAttachments(attachments: Attachments {type Pos = Position}): this.type = attachmentWarning() + override def updateAttachment[T: ClassTag](attachment: T): this.type = attachmentWarning() + override def removeAttachment[T: ClassTag]: this.type = attachmentWarning() + private def attachmentWarning(): this.type = {devWarning(s"Attempt to mutate attachments on $self ignored"); this} + private def requireLegal(value: Any, allowed: Any, what: String) = ( if (value != allowed) { log(s"can't set $what for $self to value other than $allowed") diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index e9e5a89aa7c8..4d230cc1cc93 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -589,7 +589,12 @@ trait Types def nonPrivateMembersAdmitting(admit: Long): Scope = membersBasedOnFlags(BridgeAndPrivateFlags & ~admit, 0) /** A list of all implicit symbols of this type (defined or inherited) */ - def implicitMembers: Scope = membersBasedOnFlags(BridgeFlags, IMPLICIT) + def implicitMembers: Scope = { + typeSymbolDirect match { + case sym: ModuleClassSymbol => sym.implicitMembers + case _ => membersBasedOnFlags(BridgeFlags, IMPLICIT) + } + } /** A list of all deferred symbols of this type (defined or inherited) */ def deferredMembers: Scope = membersBasedOnFlags(BridgeFlags, DEFERRED) @@ -606,6 +611,8 @@ trait Types def nonPrivateMember(name: Name): Symbol = memberBasedOnName(name, BridgeAndPrivateFlags) + def packageObject: Symbol = member(nme.PACKAGE) + /** The non-private member with given name, admitting members with given flags `admit`. * "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE * flag are usually excluded from findMember results, but supplying any of those flags @@ -731,7 +738,7 @@ trait Types * `substThis(from, to).substSym(symsFrom, symsTo)`. * * `SubstThisAndSymMap` performs a breadth-first map over this type, which meant that - * symbol substitution occured before `ThisType` substitution. Consequently, in substitution + * symbol substitution occurred before `ThisType` substitution. Consequently, in substitution * of a `SingleType(ThisType(`from`), sym), symbols were rebound to `from` rather than `to`. */ def substThisAndSym(from: Symbol, to: Type, symsFrom: List[Symbol], symsTo: List[Symbol]): Type = @@ -1600,7 +1607,14 @@ trait Types private var normalized: Type = _ private def normalizeImpl = { // TODO see comments around def intersectionType and def merge - def flatten(tps: List[Type]): List[Type] = tps flatMap { case RefinedType(parents, ds) if ds.isEmpty => flatten(parents) case tp => List(tp) } + // SI-8575 The dealias is needed here to keep subtyping transitive, example in run/t8575b.scala + def flatten(tps: List[Type]): List[Type] = { + def dealiasRefinement(tp: Type) = if (tp.dealias.isInstanceOf[RefinedType]) tp.dealias else tp + tps map dealiasRefinement flatMap { + case RefinedType(parents, ds) if ds.isEmpty => flatten(parents) + case tp => List(tp) + } + } val flattened = flatten(parents).distinct if (decls.isEmpty && hasLength(flattened, 1)) { flattened.head @@ -1687,7 +1701,7 @@ trait Types */ private var refs: Array[RefMap] = _ - /** The initialization state of the class: UnInialized --> Initializing --> Initialized + /** The initialization state of the class: UnInitialized --> Initializing --> Initialized * Syncnote: This var need not be protected with synchronized, because * it is accessed only from expansiveRefs, which is called only from * Typer. @@ -1972,7 +1986,7 @@ trait Types require(sym.isNonClassType, sym) /* Syncnote: These are pure caches for performance; no problem to evaluate these - * several times. Hence, no need to protected with synchronzied in a mutli-threaded + * several times. Hence, no need to protected with synchronized in a multi-threaded * usage scenario. */ private var relativeInfoCache: Type = _ @@ -2271,7 +2285,7 @@ trait Types case _ => args.mkString("(", ", ", ")") } private def customToString = sym match { - case RepeatedParamClass => args.head + "*" + case RepeatedParamClass | JavaRepeatedParamClass => args.head + "*" case ByNameParamClass => "=> " + args.head case _ => if (isFunctionTypeDirect(this)) { @@ -2605,7 +2619,7 @@ trait Types // derived from the existentially quantified type into the typing environment // (aka \Gamma, which tracks types for variables and constraints/kinds for types) // as a nice bonus, delaying this until we need it avoids cyclic errors - def tpars = underlying.typeSymbol.initialize.typeParams + def tpars = underlying.typeSymbolDirect.initialize.typeParams def newSkolem(quant: Symbol) = owner.newExistentialSkolem(quant, origin) def newSharpenedSkolem(quant: Symbol, tparam: Symbol): Symbol = { @@ -2643,7 +2657,7 @@ trait Types * nowhere inside a type argument * - no quantified type argument contains a quantified variable in its bound * - the typeref's symbol is not itself quantified - * - the prefix is not quanitified + * - the prefix is not quantified */ def isRepresentableWithWildcards = { val qset = quantified.toSet @@ -3101,7 +3115,7 @@ trait Types // addressed here: all lower bounds are retained and their intersection calculated when the // bounds are solved. // - // In a side-effect free universe, checking tp and tp.parents beofre checking tp.baseTypeSeq + // In a side-effect free universe, checking tp and tp.parents before checking tp.baseTypeSeq // would be pointless. In this case, each check we perform causes us to lose specificity: in // the end the best we'll do is the least specific type we tested against, since the typevar // does not see these checks as "probes" but as requirements to fulfill. @@ -3332,7 +3346,7 @@ trait Types * * SI-6385 Erasure's creation of bridges considers method signatures `exitingErasure`, * which contain `ErasedValueType`-s. In order to correctly consider the overriding - * and overriden signatures as equivalent in `run/t6385.scala`, it is critical that + * and overridden signatures as equivalent in `run/t6385.scala`, it is critical that * this type contains the erasure of the wrapped type, rather than the unerased type * of the value class itself, as was originally done. * @@ -4113,8 +4127,8 @@ trait Types def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Depth): Boolean = { def isSubArg(t1: Type, t2: Type, variance: Variance) = ( - (variance.isContravariant || isSubType(t1, t2, depth)) - && (variance.isCovariant || isSubType(t2, t1, depth)) + (variance.isCovariant || isSubType(t2, t1, depth)) // The order of these two checks can be material for performance (SI-8478) + && (variance.isContravariant || isSubType(t1, t2, depth)) ) corresponds3(tps1, tps2, mapList(tparams)(_.variance))(isSubArg) diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala index 3bcfed7d341b..ef22df3f2e7f 100644 --- a/src/reflect/scala/reflect/internal/Variances.scala +++ b/src/reflect/scala/reflect/internal/Variances.scala @@ -32,7 +32,7 @@ trait Variances { /** Is every symbol in the owner chain between `site` and the owner of `sym` * either a term symbol or private[this]? If not, add `sym` to the set of - * esacped locals. + * escaped locals. * @pre sym.isLocalToThis */ @tailrec final def checkForEscape(sym: Symbol, site: Symbol) { @@ -75,7 +75,14 @@ trait Variances { def nextVariance(sym: Symbol, v: Variance): Variance = ( if (shouldFlip(sym, tvar)) v.flip else if (isLocalOnly(sym)) Bivariant - else if (sym.isAliasType) Invariant + else if (sym.isAliasType) ( + // Unsound pre-2.11 behavior preserved under -Xsource:2.10 + if (settings.isScala211 || sym.isOverridingSymbol) Invariant + else { + currentRun.reporting.deprecationWarning(sym.pos, s"Construct depends on unsound variance analysis and will not compile in scala 2.11 and beyond") + Bivariant + } + ) else v ) def loop(sym: Symbol, v: Variance): Variance = ( diff --git a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala index a44bb547345c..662d841c917e 100644 --- a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala +++ b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala @@ -5,7 +5,7 @@ package annotations /** * An annotation that designates the annotated type should not be checked for violations of * type parameter bounds in the `refchecks` phase of the compiler. This can be used by synthesized - * code the uses an inferred type of an expression as the type of an artifict val/def (for example, + * code the uses an inferred type of an expression as the type of an artifact val/def (for example, * a temporary value introduced by an ANF transform). See [[https://issues.scala-lang.org/browse/SI-7694]]. * * @since 2.10.3 diff --git a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala index 8615e34fad91..241638e88e31 100644 --- a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala +++ b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala @@ -196,10 +196,10 @@ object ByteCodecs { * * Sometimes returns (length+1) of the decoded array. Example: * - * scala> val enc = scala.reflect.generic.ByteCodecs.encode(Array(1,2,3)) + * scala> val enc = scala.reflect.internal.pickling.ByteCodecs.encode(Array(1,2,3)) * enc: Array[Byte] = Array(2, 5, 13, 1) * - * scala> scala.reflect.generic.ByteCodecs.decode(enc) + * scala> scala.reflect.internal.pickling.ByteCodecs.decode(enc) * res43: Int = 4 * * scala> enc diff --git a/src/reflect/scala/reflect/internal/pickling/Translations.scala b/src/reflect/scala/reflect/internal/pickling/Translations.scala index e56cf796cb10..d924cb3a0c7c 100644 --- a/src/reflect/scala/reflect/internal/pickling/Translations.scala +++ b/src/reflect/scala/reflect/internal/pickling/Translations.scala @@ -62,21 +62,22 @@ trait Translations { } def picklerTag(tpe: Type): Int = tpe match { - case NoType => NOtpe - case NoPrefix => NOPREFIXtpe - case _: ThisType => THIStpe - case _: SingleType => SINGLEtpe - case _: SuperType => SUPERtpe - case _: ConstantType => CONSTANTtpe - case _: TypeBounds => TYPEBOUNDStpe - case _: TypeRef => TYPEREFtpe - case _: RefinedType => REFINEDtpe - case _: ClassInfoType => CLASSINFOtpe - case _: MethodType => METHODtpe - case _: PolyType => POLYtpe - case _: NullaryMethodType => POLYtpe // bad juju, distinct ints are not at a premium! - case _: ExistentialType => EXISTENTIALtpe - case _: AnnotatedType => ANNOTATEDtpe + case NoType => NOtpe + case NoPrefix => NOPREFIXtpe + case _: ThisType => THIStpe + case _: SingleType => SINGLEtpe + case _: SuperType => SUPERtpe + case _: ConstantType => CONSTANTtpe + case _: TypeBounds => TYPEBOUNDStpe + case _: TypeRef => TYPEREFtpe + case _: RefinedType => REFINEDtpe + case _: ClassInfoType => CLASSINFOtpe + case _: MethodType => METHODtpe + case _: PolyType => POLYtpe + case _: NullaryMethodType => POLYtpe // bad juju, distinct ints are not at a premium! + case _: ExistentialType => EXISTENTIALtpe + case StaticallyAnnotatedType(_, _) => ANNOTATEDtpe + case _: AnnotatedType => picklerTag(tpe.underlying) } def picklerSubTag(tree: Tree): Int = tree match { diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 42f794736a52..1fc7aebab057 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -211,7 +211,12 @@ abstract class UnPickler { def fromName(name: Name) = name.toTermName match { case nme.ROOT => loadingMirror.RootClass case nme.ROOTPKG => loadingMirror.RootPackage - case _ => adjust(owner.info.decl(name)) + case _ => + val decl = owner match { + case stub: StubSymbol => NoSymbol // SI-8502 Don't call .info and fail the stub + case _ => owner.info.decl(name) + } + adjust(decl) } def nestedObjectSymbol: Symbol = { // If the owner is overloaded (i.e. a method), it's not possible to select the @@ -229,8 +234,28 @@ abstract class UnPickler { NoSymbol } + def moduleAdvice(missing: String): String = { + val module = + if (missing.startsWith("scala.xml")) Some(("org.scala-lang.modules", "scala-xml")) + else if (missing.startsWith("scala.util.parsing")) Some(("org.scala-lang.modules", "scala-parser-combinators")) + else if (missing.startsWith("scala.swing")) Some(("org.scala-lang.modules", "scala-swing")) + else if (missing.startsWith("scala.util.continuations")) Some(("org.scala-lang.plugins", "scala-continuations-library")) + else None + + (module map { case (group, art) => + s"""\n(NOTE: It looks like the $art module is missing; try adding a dependency on "$group" : "$art". + | See http://docs.scala-lang.org/overviews/core/scala-2.11.html for more information.)""".stripMargin + } getOrElse "") + } + + def localDummy = { + if (nme.isLocalDummyName(name)) + owner.newLocalDummy(NoPosition) + else NoSymbol + } + // (1) Try name. - fromName(name) orElse { + localDummy orElse fromName(name) orElse { // (2) Try with expanded name. Can happen if references to private // symbols are read from outside: for instance when checking the children // of a class. See #1722. @@ -240,11 +265,13 @@ abstract class UnPickler { // (4) Call the mirror's "missing" hook. adjust(mirrorThatLoaded(owner).missingHook(owner, name)) orElse { // (5) Create a stub symbol to defer hard failure a little longer. + val advice = moduleAdvice(s"${owner.fullName}.$name") val missingMessage = - s"""|bad symbolic reference. A signature in $filename refers to ${name.longString} - |in ${owner.kindString} ${owner.fullName} which is not available. - |It may be completely missing from the current classpath, or the version on - |the classpath might be incompatible with the version used when compiling $filename.""".stripMargin + s"""|missing or invalid dependency detected while loading class file '$filename'. + |Could not access ${name.longString} in ${owner.kindString} ${owner.fullName}, + |because it (or its dependencies) are missing. Check your build definition for + |missing or conflicting dependencies. (Re-run with `-Ylog-classpath` to see the problematic classpath.) + |A full rebuild may help if '$filename' was compiled against an incompatible version of ${owner.fullName}.$advice""".stripMargin owner.newStubSymbol(name, missingMessage) } } @@ -275,6 +302,27 @@ abstract class UnPickler { def pflags = flags & PickledFlags def finishSym(sym: Symbol): Symbol = { + /** + * member symbols (symbols owned by a class) are added to the class's scope, with a number + * of exceptions: + * + * (.) ... + * (1) `local child` represents local child classes, see comment in Pickler.putSymbol. + * Since it is not a member, it should not be entered in the owner's scope. + * (2) Similarly, we ignore local dummy symbols, as seen in SI-8868 + */ + def shouldEnterInOwnerScope = { + sym.owner.isClass && + sym != classRoot && + sym != moduleRoot && + !sym.isModuleClass && + !sym.isRefinementClass && + !sym.isTypeParameter && + !sym.isExistentiallyBound && + sym.rawname != tpnme.LOCAL_CHILD && // (1) + !nme.isLocalDummyName(sym.rawname) // (2) + } + markFlagsCompleted(sym)(mask = AllFlags) sym.privateWithin = privateWithin sym.info = ( @@ -287,8 +335,7 @@ abstract class UnPickler { newLazyTypeRefAndAlias(inforef, readNat()) } ) - if (sym.owner.isClass && sym != classRoot && sym != moduleRoot && - !sym.isModuleClass && !sym.isRefinementClass && !sym.isTypeParameter && !sym.isExistentiallyBound) + if (shouldEnterInOwnerScope) symScope(sym.owner) enter sym sym @@ -347,14 +394,24 @@ abstract class UnPickler { case CLASSINFOtpe => ClassInfoType(parents, symScope(clazz), clazz) } + def readThisType(): Type = { + val sym = readSymbolRef() match { + case stub: StubSymbol if !stub.isClass => + // SI-8502 This allows us to create a stub for a unpickled reference to `missingPackage.Foo`. + stub.owner.newStubSymbol(stub.name.toTypeName, stub.missingMessage) + case sym => sym + } + ThisType(sym) + } + // We're stuck with the order types are pickled in, but with judicious use // of named parameters we can recapture a declarative flavor in a few cases. // But it's still a rat's nest of adhockery. (tag: @switch) match { case NOtpe => NoType case NOPREFIXtpe => NoPrefix - case THIStpe => ThisType(readSymbolRef()) - case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef()) + case THIStpe => readThisType() + case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef().filter(_.isStable)) // SI-7596 account for overloading case SUPERtpe => SuperType(readTypeRef(), readTypeRef()) case CONSTANTtpe => ConstantType(readConstantRef()) case TYPEREFtpe => TypeRef(readTypeRef(), readSymbolRef(), readTypes()) @@ -666,10 +723,24 @@ abstract class UnPickler { private val p = phase protected def completeInternal(sym: Symbol) : Unit = try { val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType` - if (p ne null) - slowButSafeEnteringPhase(p) (sym setInfo tp) + + // This is a temporary fix allowing to read classes generated by an older, buggy pickler. + // See the generation of the LOCAL_CHILD class in Pickler.scala. In an earlier version, the + // pickler did not add the ObjectTpe superclass, it used a trait as the first parent. This + // tripped an assertion in AddInterfaces which checks that the first parent is not a trait. + // This workaround can probably be removed in 2.12, because the 2.12 compiler is supposed + // to only read classfiles generated by 2.12. + val fixLocalChildTp = if (sym.rawname == tpnme.LOCAL_CHILD) tp match { + case ClassInfoType(superClass :: traits, decls, typeSymbol) if superClass.typeSymbol.isTrait => + ClassInfoType(definitions.ObjectTpe :: superClass :: traits, decls, typeSymbol) + case _ => tp + } else tp + + if (p ne null) { + slowButSafeEnteringPhase(p)(sym setInfo fixLocalChildTp) + } if (currentRunId != definedAtRunId) - sym.setInfo(adaptToNewRunMap(tp)) + sym.setInfo(adaptToNewRunMap(fixLocalChildTp)) } catch { case e: MissingRequirementError => throw toTypeError(e) diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index a494c7f0d07c..38893d8db3b8 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -31,6 +31,9 @@ abstract class MutableSettings extends AbsSettings { v = arg postSetHook() } + + /** Returns Some(value) in the case of a value set by user and None otherwise. */ + def valueSetByUser: Option[T] = if (isSetByUser) Some(value) else None } def Xexperimental: BooleanSetting diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala index de54f3768e61..42b13944f6e3 100644 --- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala +++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala @@ -12,7 +12,7 @@ import TypesStats._ trait FindMembers { this: SymbolTable => - /** Implementatation of `Type#{findMember, findMembers}` */ + /** Implementation of `Type#{findMember, findMembers}` */ private[internal] abstract class FindMemberBase[T](tpe: Type, name: Name, excludedFlags: Long, requiredFlags: Long) { protected val initBaseClasses: List[Symbol] = tpe.baseClasses diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 876685e24aac..123b44aa05cc 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -347,7 +347,9 @@ private[internal] trait GlbLubs { def lubsym(proto: Symbol): Symbol = { val prototp = lubThisType.memberInfo(proto) val syms = narrowts map (t => - t.nonPrivateMember(proto.name).suchThat(sym => + // SI-7602 With erroneous code, we could end up with overloaded symbols after filtering + // so `suchThat` unsuitable. + t.nonPrivateMember(proto.name).filter(sym => sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t))) if (syms contains NoSymbol) NoSymbol diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala index 564cbb1ce3ab..f79099213a4c 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala @@ -16,8 +16,9 @@ private[internal] trait TypeConstraints { private lazy val _undoLog = new UndoLog def undoLog = _undoLog + import TypeConstraints.UndoPair class UndoLog extends Clearable { - private type UndoPairs = List[(TypeVar, TypeConstraint)] + type UndoPairs = List[UndoPair[TypeVar, TypeConstraint]] //OPT this method is public so we can do `manual inlining` var log: UndoPairs = List() @@ -29,7 +30,7 @@ private[internal] trait TypeConstraints { def undoTo(limit: UndoPairs) { assertCorrectThread() while ((log ne limit) && log.nonEmpty) { - val (tv, constr) = log.head + val UndoPair(tv, constr) = log.head tv.constr = constr log = log.tail } @@ -40,7 +41,7 @@ private[internal] trait TypeConstraints { * which is already synchronized. */ private[reflect] def record(tv: TypeVar) = { - log ::= ((tv, tv.constr.cloneInternal)) + log ::= UndoPair(tv, tv.constr.cloneInternal) } def clear() { @@ -74,7 +75,7 @@ private[internal] trait TypeConstraints { /* Syncnote: Type constraints are assumed to be used from only one * thread. They are not exposed in api.Types and are used only locally * in operations that are exposed from types. Hence, no syncing of any - * variables should be ncessesary. + * variables should be necessary. */ /** Guard these lists against AnyClass and NothingClass appearing, @@ -266,3 +267,9 @@ private[internal] trait TypeConstraints { tvars forall (tv => tv.instWithinBounds || util.andFalse(logBounds(tv))) } } + +private[internal] object TypeConstraints { + // UndoPair is declared in companion object to not hold an outer pointer reference + final case class UndoPair[TypeVar <: SymbolTable#TypeVar, + TypeConstraint <: TypeConstraints#TypeConstraint](tv: TypeVar, tConstraint: TypeConstraint) +} diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index f06420de96d7..c705ca70694d 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -422,6 +422,22 @@ private[internal] trait TypeMaps { } } + /** + * Get rid of BoundedWildcardType where variance allows us to do so. + * Invariant: `wildcardExtrapolation(tp) =:= tp` + * + * For example, the MethodType given by `def bla(x: (_ >: String)): (_ <: Int)` + * is both a subtype and a supertype of `def bla(x: String): Int`. + */ + object wildcardExtrapolation extends TypeMap(trackVariance = true) { + def apply(tp: Type): Type = + tp match { + case BoundedWildcardType(TypeBounds(lo, AnyTpe)) if variance.isContravariant => lo + case BoundedWildcardType(TypeBounds(NothingTpe, hi)) if variance.isCovariant => hi + case tp => mapOver(tp) + } + } + /** Might the given symbol be important when calculating the prefix * of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`, * the result will be `tp` unchanged if `pre` is trivial and `clazz` diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index d5b596714590..ac7839bcfd95 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -280,8 +280,17 @@ trait Erasure { } object boxingErasure extends ScalaErasureMap { + private var boxPrimitives = true + + override def applyInArray(tp: Type): Type = { + val saved = boxPrimitives + boxPrimitives = false + try super.applyInArray(tp) + finally boxPrimitives = saved + } + override def eraseNormalClassRef(tref: TypeRef) = - if (isPrimitiveValueClass(tref.sym)) boxedClass(tref.sym).tpe + if (boxPrimitives && isPrimitiveValueClass(tref.sym)) boxedClass(tref.sym).tpe else super.eraseNormalClassRef(tref) override def eraseDerivedValueClassRef(tref: TypeRef) = super.eraseNormalClassRef(tref) diff --git a/src/reflect/scala/reflect/internal/transform/PostErasure.scala b/src/reflect/scala/reflect/internal/transform/PostErasure.scala index f0c7d0f05038..dd4f0448182f 100644 --- a/src/reflect/scala/reflect/internal/transform/PostErasure.scala +++ b/src/reflect/scala/reflect/internal/transform/PostErasure.scala @@ -5,7 +5,6 @@ package transform trait PostErasure { val global: SymbolTable import global._ - import definitions._ object elimErasedValueType extends TypeMap { def apply(tp: Type) = tp match { diff --git a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala index 10a8b4c812d3..30dcbc21cad2 100644 --- a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala +++ b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala @@ -5,16 +5,16 @@ package scala package reflect.internal.util -import scala.reflect.io.AbstractFile +import scala.collection.{ mutable, immutable } +import scala.reflect.io.{ AbstractFile, Streamable } +import java.net.{ URL, URLConnection, URLStreamHandler } import java.security.cert.Certificate import java.security.{ ProtectionDomain, CodeSource } -import java.net.{ URL, URLConnection, URLStreamHandler } -import scala.collection.{ mutable, immutable } +import java.util.{ Collections => JCollections, Enumeration => JEnumeration } -/** - * A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}. +/** A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}. * - * @author Lex Spoon + * @author Lex Spoon */ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) extends ClassLoader(parent) @@ -22,7 +22,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) { protected def classNameToPath(name: String): String = if (name endsWith ".class") name - else name.replace('.', '/') + ".class" + else s"${name.replace('.', '/')}.class" protected def findAbstractFile(name: String): AbstractFile = { var file: AbstractFile = root @@ -56,35 +56,25 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) file } - // parent delegation in JCL uses getResource; so either add parent.getResAsStream - // or implement findResource, which we do here as a study in scarlet (my complexion - // after looking at CLs and URLs) - override def findResource(name: String): URL = findAbstractFile(name) match { + override protected def findClass(name: String): Class[_] = { + val bytes = classBytes(name) + if (bytes.length == 0) + throw new ClassNotFoundException(name) + else + defineClass(name, bytes, 0, bytes.length, protectionDomain) + } + override protected def findResource(name: String): URL = findAbstractFile(name) match { case null => null - case file => new URL(null, "repldir:" + file.path, new URLStreamHandler { + case file => new URL(null, s"memory:${file.path}", new URLStreamHandler { override def openConnection(url: URL): URLConnection = new URLConnection(url) { - override def connect() { } + override def connect() = () override def getInputStream = file.input } }) } - - // this inverts delegation order: super.getResAsStr calls parent.getRes if we fail - override def getResourceAsStream(name: String) = findAbstractFile(name) match { - case null => super.getResourceAsStream(name) - case file => file.input - } - // ScalaClassLoader.classBytes uses getResAsStream, so we'll try again before delegating - override def classBytes(name: String): Array[Byte] = findAbstractFile(classNameToPath(name)) match { - case null => super.classBytes(name) - case file => file.toByteArray - } - override def findClass(name: String): Class[_] = { - val bytes = classBytes(name) - if (bytes.length == 0) - throw new ClassNotFoundException(name) - else - defineClass(name, bytes, 0, bytes.length, protectionDomain) + override protected def findResources(name: String): JEnumeration[URL] = findResource(name) match { + case null => JCollections.enumeration(JCollections.emptyList[URL]) //JCollections.emptyEnumeration[URL] + case url => JCollections.enumeration(JCollections.singleton(url)) } lazy val protectionDomain = { @@ -106,15 +96,13 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) throw new UnsupportedOperationException() } - override def getPackage(name: String): Package = { - findAbstractDir(name) match { - case null => super.getPackage(name) - case file => packages.getOrElseUpdate(name, { - val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader]) - ctor.setAccessible(true) - ctor.newInstance(name, null, null, null, null, null, null, null, this) - }) - } + override def getPackage(name: String): Package = findAbstractDir(name) match { + case null => super.getPackage(name) + case file => packages.getOrElseUpdate(name, { + val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader]) + ctor.setAccessible(true) + ctor.newInstance(name, null, null, null, null, null, null, null, this) + }) } override def getPackages(): Array[Package] = diff --git a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala index 63ea6e2c4993..41011f6c6b59 100644 --- a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala +++ b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala @@ -53,8 +53,10 @@ trait ScalaClassLoader extends JClassLoader { } /** An InputStream representing the given class name, or null if not found. */ - def classAsStream(className: String) = - getResourceAsStream(className.replaceAll("""\.""", "/") + ".class") + def classAsStream(className: String) = getResourceAsStream { + if (className endsWith ".class") className + else s"${className.replace('.', '/')}.class" // classNameToPath + } /** Run the main method of a class to be loaded by this classloader */ def run(objectName: String, arguments: Seq[String]) { diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala index 4fccad74ac61..a2642628a431 100644 --- a/src/reflect/scala/reflect/internal/util/SourceFile.scala +++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala @@ -40,7 +40,7 @@ abstract class SourceFile { def lineToString(index: Int): String = { val start = lineToOffset(index) var end = start - while (!isEndOfLine(end) && end <= length) end += 1 + while (end < length && !isEndOfLine(end)) end += 1 new String(content, start, end - start) } diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala index a9a7c7780d01..3a7a7626fb73 100644 --- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala +++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala @@ -7,13 +7,13 @@ import scala.collection.generic.Clearable import scala.collection.mutable.{Set => MSet} /** - * A HashSet where the elements are stored weakly. Elements in this set are elligible for GC if no other + * A HashSet where the elements are stored weakly. Elements in this set are eligible for GC if no other * hard references are associated with them. Its primary use case is as a canonical reference * identity holder (aka "hash-consing") via findEntryOrUpdate * * This Set implementation cannot hold null. Any attempt to put a null in it will result in a NullPointerException * - * This set implmeentation is not in general thread safe without external concurrency control. However it behaves + * This set implementation is not in general thread safe without external concurrency control. However it behaves * properly when GC concurrently collects elements in this set. */ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with MSet[A] { @@ -26,7 +26,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D /** * queue of Entries that hold elements scheduled for GC - * the removeStaleEntries() method works through the queue to remeove + * the removeStaleEntries() method works through the queue to remove * stale entries from the table */ private[this] val queue = new ReferenceQueue[A] @@ -62,7 +62,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D private[this] def computeThreshHold: Int = (table.size * loadFactor).ceil.toInt /** - * find the bucket associated with an elements's hash code + * find the bucket associated with an element's hash code */ private[this] def bucketFor(hash: Int): Int = { // spread the bits around to try to avoid accidental collisions using the diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index ac1159b2ac42..bcefcc471ff7 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -48,14 +48,16 @@ object AbstractFile { else null /** - * If the specified URL exists and is a readable zip or jar archive, - * returns an abstract directory backed by it. Otherwise, returns - * `null`. + * If the specified URL exists and is a regular file or a directory, returns an + * abstract regular file or an abstract directory, respectively, backed by it. + * Otherwise, returns `null`. */ - def getURL(url: URL): AbstractFile = { - if (url == null || !Path.isExtensionJarOrZip(url.getPath)) null - else ZipArchive fromURL url - } + def getURL(url: URL): AbstractFile = + if (url.getProtocol == "file") { + val f = new java.io.File(url.getPath) + if (f.isDirectory) getDirectory(f) + else getFile(f) + } else null def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url } diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala index 45f38db74550..1cb4f2fe6f5a 100644 --- a/src/reflect/scala/reflect/io/VirtualFile.scala +++ b/src/reflect/scala/reflect/io/VirtualFile.scala @@ -75,10 +75,10 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF } /** Does this abstract file denote an existing file? */ - def create() { unsupported() } + def create(): Unit = unsupported() /** Delete the underlying file or directory (recursively). */ - def delete() { unsupported() } + def delete(): Unit = unsupported() /** * Returns the abstract file in this abstract directory with the diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 826018945909..0c63acb86cb3 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -74,12 +74,6 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq def container = unsupported() def absolute = unsupported() - private def walkIterator(its: Iterator[AbstractFile]): Iterator[AbstractFile] = { - its flatMap { f => - if (f.isDirectory) walkIterator(f.iterator) - else Iterator(f) - } - } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) { // have to keep this name for compat with sbt's compiler-interface @@ -87,6 +81,7 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq override def underlyingSource = Some(self) override def toString = self.path + "(" + path + ")" } + /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class DirEntry(path: String) extends Entry(path) { val entries = mutable.HashMap[String, Entry]() @@ -125,14 +120,15 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ final class FileZipArchive(file: JFile) extends ZipArchive(file) { - def iterator: Iterator[Entry] = { + lazy val (root, allDirs) = { + val root = new DirEntry("/") + val dirs = mutable.HashMap[String, DirEntry]("/" -> root) val zipFile = try { new ZipFile(file) } catch { case ioe: IOException => throw new IOException("Error accessing " + file.getPath, ioe) } - val root = new DirEntry("/") - val dirs = mutable.HashMap[String, DirEntry]("/" -> root) + val enum = zipFile.entries() while (enum.hasMoreElements) { @@ -150,11 +146,11 @@ final class FileZipArchive(file: JFile) extends ZipArchive(file) { dir.entries(f.name) = f } } - - try root.iterator - finally dirs.clear() + (root, dirs) } + def iterator: Iterator[Entry] = root.iterator + def name = file.getName def path = file.getPath def input = File(file).inputStream() @@ -244,11 +240,9 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) { val manifest = new Manifest(input) val iter = manifest.getEntries().keySet().iterator().filter(_.endsWith(".class")).map(new ZipEntry(_)) - while (iter.hasNext) { - val zipEntry = iter.next() + for (zipEntry <- iter) { val dir = getDir(dirs, zipEntry) - if (zipEntry.isDirectory) dir - else { + if (!zipEntry.isDirectory) { class FileEntry() extends Entry(zipEntry.getName) { override def lastModified = zipEntry.getTime() override def input = resourceInputStream(path) @@ -284,14 +278,14 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) { private def resourceInputStream(path: String): InputStream = { new FilterInputStream(null) { override def read(): Int = { - if(in == null) in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path); + if(in == null) in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path) if(in == null) throw new RuntimeException(path + " not found") - super.read(); + super.read() } override def close(): Unit = { - super.close(); - in = null; + super.close() + in = null } } } diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala index 5ccdc15a03f4..b5c340645a64 100644 --- a/src/reflect/scala/reflect/macros/Attachments.scala +++ b/src/reflect/scala/reflect/macros/Attachments.scala @@ -35,7 +35,7 @@ abstract class Attachments { self => def all: Set[Any] = Set.empty private def matchesTag[T: ClassTag](datum: Any) = - classTag[T].runtimeClass == datum.getClass + classTag[T].runtimeClass.isInstance(datum) /** An underlying payload of the given class type `T`. */ def get[T: ClassTag]: Option[T] = diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala index 69ede42cc70d..1eb6832b5be5 100644 --- a/src/reflect/scala/reflect/macros/Enclosures.scala +++ b/src/reflect/scala/reflect/macros/Enclosures.scala @@ -47,7 +47,7 @@ trait Enclosures { /** Tries to guess a position for the enclosing application. * But that is simple, right? Just dereference `pos` of `macroApplication`? Not really. - * If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggerd this expansion. + * If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggered this expansion. * Surprisingly, quite often we can do this by navigation the `enclosingMacros` stack. */ def enclosingPosition: Position diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala index 720b75464904..5fc0fd5078da 100644 --- a/src/reflect/scala/reflect/macros/Parsers.scala +++ b/src/reflect/scala/reflect/macros/Parsers.scala @@ -13,7 +13,7 @@ trait Parsers { /** Parses a string with a Scala expression into an abstract syntax tree. * Only works for expressions, i.e. parsing a package declaration will fail. - * @throws [[scala.reflect.macros.ParseException]] + * @throws scala.reflect.macros.ParseException */ def parse(code: String): Tree } diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala index d0dccb469dfc..bd608601dc08 100644 --- a/src/reflect/scala/reflect/macros/Typers.scala +++ b/src/reflect/scala/reflect/macros/Typers.scala @@ -2,8 +2,6 @@ package scala package reflect package macros -import scala.reflect.internal.{Mode => InternalMode} - /** * EXPERIMENTAL * @@ -72,7 +70,7 @@ trait Typers { * `withImplicitViewsDisabled` recursively prohibits implicit views (though, implicit vals will still be looked up and filled in), default value is false * `withMacrosDisabled` recursively prohibits macro expansions and macro-based implicits, default value is false * - * @throws [[scala.reflect.macros.TypecheckException]] + * @throws scala.reflect.macros.TypecheckException */ def typecheck(tree: Tree, mode: TypecheckMode = TERMmode, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree @@ -84,7 +82,7 @@ trait Typers { * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. * Unlike in `typecheck`, `silent` is true by default. * - * @throws [[scala.reflect.macros.TypecheckException]] + * @throws scala.reflect.macros.TypecheckException */ def inferImplicitValue(pt: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree @@ -96,7 +94,7 @@ trait Typers { * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. * Unlike in `typecheck`, `silent` is true by default. * - * @throws [[scala.reflect.macros.TypecheckException]] + * @throws scala.reflect.macros.TypecheckException */ def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala index 1eb67215bb45..3b571695654e 100644 --- a/src/reflect/scala/reflect/macros/Universe.scala +++ b/src/reflect/scala/reflect/macros/Universe.scala @@ -44,7 +44,7 @@ abstract class Universe extends scala.reflect.api.Universe { * it is imperative that you either call `untypecheck` or do `changeOwner(tree, x, y)`. * * Since at the moment `untypecheck` has fundamental problem that can sometimes lead to tree corruption, - * `changeOwner` becomes an indispensible tool in building 100% robust macros. + * `changeOwner` becomes an indispensable tool in building 100% robust macros. * Future versions of the reflection API might obviate the need in taking care of * these low-level details, but at the moment this is what we've got. */ diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index f5bddb178486..1c751fb93b57 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -38,7 +38,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive override lazy val rootMirror: Mirror = createMirror(NoSymbol, rootClassLoader) - // overriden by ReflectGlobal + // overridden by ReflectGlobal def rootClassLoader: ClassLoader = this.getClass.getClassLoader trait JavaClassCompleter @@ -760,8 +760,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive module.moduleClass setInfo new ClassInfoType(List(), newScope, module.moduleClass) } - def enter(sym: Symbol, mods: JavaAccFlags) = - ( if (mods.isStatic) module.moduleClass else clazz ).info.decls enter sym + def enter(sym: Symbol, mods: JavaAccFlags) = followStatic(clazz, module, mods).info.decls enter sym def enterEmptyCtorIfNecessary(): Unit = { if (jclazz.getConstructors.isEmpty) @@ -801,34 +800,33 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive * If Java modifiers `mods` contain STATIC, return the module class * of the companion module of `clazz`, otherwise the class `clazz` itself. */ - private def followStatic(clazz: Symbol, mods: JavaAccFlags) = - if (mods.isStatic) clazz.companionModule.moduleClass else clazz + private def followStatic(clazz: Symbol, mods: JavaAccFlags): Symbol = followStatic(clazz, clazz.companionModule, mods) - /** Methods which need to be treated with care - * because they either are getSimpleName or call getSimpleName: + private def followStatic(clazz: Symbol, module: Symbol, mods: JavaAccFlags): Symbol = + // SI-8196 `orElse(clazz)` needed for implementation details of the backend, such as the static + // field containing the cache for structural calls. + if (mods.isStatic) module.moduleClass.orElse(clazz) else clazz + + /** + * Certain method of the Java reflection api cannot be used on classfiles created by Scala. + * See the comment in test/files/jvm/javaReflection/Test.scala. The methods are * * public String getSimpleName() * public boolean isAnonymousClass() * public boolean isLocalClass() * public String getCanonicalName() - * - * A typical manifestation: - * - * // java.lang.Error: sOwner(class Test$A$1) has failed - * // Caused by: java.lang.InternalError: Malformed class name - * // at java.lang.Class.getSimpleName(Class.java:1133) - * // at java.lang.Class.isAnonymousClass(Class.java:1188) - * // at java.lang.Class.isLocalClass(Class.java:1199) - * // (see t5256c.scala for more details) + * public boolean isSynthetic() * * TODO - find all such calls and wrap them. * TODO - create mechanism to avoid the recurrence of unwrapped calls. */ implicit class RichClass(jclazz: jClass[_]) { - // `jclazz.isLocalClass` doesn't work because of problems with `getSimpleName` - // hence we have to approximate by removing the `isAnonymousClass` check -// def isLocalClass0: Boolean = jclazz.isLocalClass - def isLocalClass0: Boolean = jclazz.getEnclosingMethod != null || jclazz.getEnclosingConstructor != null + // As explained in the javaReflection test, Class.isLocalClass is true for all non-member + // nested classes in Scala. This is fine per se, however the implementation may throw an + // InternalError. We therefore re-implement it here. + // TODO: this method should be renamed to `isLocalOrAnonymousClass`. + // due to bin compat that's only possible in 2.12, we cannot introduce a new alias in 2.11. + def isLocalClass0: Boolean = jclazz.getEnclosingClass != null && !jclazz.isMemberClass } /** @@ -1193,7 +1191,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive * - top-level classes * - Scala classes that were generated via jclassToScala * - classes that have a class owner that has a corresponding Java class - * @throws A `ClassNotFoundException` for all Scala classes not in one of these categories. + * @throws ClassNotFoundException for all Scala classes not in one of these categories. */ @throws(classOf[ClassNotFoundException]) def classToJava(clazz: ClassSymbol): jClass[_] = classCache.toJava(clazz) { diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index b5446694ed10..7848753e6911 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -5,7 +5,7 @@ package runtime import scala.reflect.internal.{TreeInfo, SomePhase} import scala.reflect.internal.{SymbolTable => InternalSymbolTable} import scala.reflect.runtime.{SymbolTable => RuntimeSymbolTable} -import scala.reflect.api.{TreeCreator, TypeCreator, Universe} +import scala.reflect.api.{TypeCreator, Universe} /** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders. * @@ -14,15 +14,27 @@ import scala.reflect.api.{TreeCreator, TypeCreator, Universe} * @contentDiagram hideNodes "*Api" "*Extractor" */ class JavaUniverse extends InternalSymbolTable with JavaUniverseForce with ReflectSetup with RuntimeSymbolTable { self => - - override def inform(msg: String): Unit = log(msg) def picklerPhase = SomePhase def erasurePhase = SomePhase lazy val settings = new Settings - private val isLogging = sys.props contains "scala.debug.reflect" + private val isLogging = sys.props contains "scala.debug.reflect" def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg) + // TODO: why put output under isLogging? Calls to inform are already conditional on debug/verbose/... + import scala.reflect.internal.{Reporter, ReporterImpl} + override def reporter: Reporter = new ReporterImpl { + protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = log(msg) + } + + // minimal Run to get Reporting wired + def currentRun = new RunReporting {} + class PerRunReporting extends PerRunReportingBase { + def deprecationWarning(pos: Position, msg: String): Unit = reporter.warning(pos, msg) + } + protected def PerRunReporting = new PerRunReporting + + type TreeCopier = InternalTreeCopierOps implicit val TreeCopierTag: ClassTag[TreeCopier] = ClassTag[TreeCopier](classOf[TreeCopier]) def newStrictTreeCopier: TreeCopier = new StrictTreeCopier diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index dcd262c288cc..1c0aa7cf6da2 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -170,6 +170,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.dropSingletonType this.abstractTypesToBounds this.dropIllegalStarTypes + this.wildcardExtrapolation this.IsDependentCollector this.ApproximateDependentMap this.wildcardToTypeVarMap @@ -309,6 +310,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.QuasiquoteClass_api_unapply definitions.ScalaSignatureAnnotation definitions.ScalaLongSignatureAnnotation + definitions.MethodHandle definitions.OptionClass definitions.OptionModule definitions.SomeClass @@ -360,6 +362,8 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.AnnotationClass definitions.ClassfileAnnotationClass definitions.StaticAnnotationClass + definitions.AnnotationRetentionAttr + definitions.AnnotationRetentionPolicyAttr definitions.BridgeClass definitions.ElidableMethodClass definitions.ImplicitNotFoundClass diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala index c56bc28d9081..50ea8d986877 100644 --- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala +++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala @@ -65,10 +65,15 @@ private[reflect] trait SymbolLoaders { self: SymbolTable => class LazyPackageType extends LazyType with FlagAgnosticCompleter { override def complete(sym: Symbol) { assert(sym.isPackageClass) - sym setInfo new ClassInfoType(List(), new PackageScope(sym), sym) + // Time travel to a phase before refchecks avoids an initialization issue. `openPackageModule` + // creates a module symbol and invokes invokes `companionModule` while the `infos` field is + // still null. This calls `isModuleNotMethod`, which forces the `info` if run after refchecks. + slowButSafeEnteringPhaseNotLaterThan(picklerPhase) { + sym setInfo new ClassInfoType(List(), new PackageScope(sym), sym) // override def safeToString = pkgClass.toString - openPackageModule(sym) - markAllCompleted(sym) + openPackageModule(sym) + markAllCompleted(sym) + } } } @@ -91,7 +96,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable => // // Short of significantly changing SymbolLoaders I see no other way than just // to slap a global lock on materialization in runtime reflection. - class PackageScope(pkgClass: Symbol) extends Scope(initFingerPrints = -1L) // disable fingerprinting as we do not know entries beforehand + class PackageScope(pkgClass: Symbol) extends Scope with SynchronizedScope { assert(pkgClass.isType) diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala index 02155578f8e9..092bbd711f10 100644 --- a/src/reflect/scala/reflect/runtime/SymbolTable.scala +++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala @@ -2,8 +2,6 @@ package scala package reflect package runtime -import scala.reflect.internal.Flags._ - /** * This symbol table trait fills in the definitions so that class information is obtained by refection. * It can be used either from a reflexive universe (class scala.reflect.runtime.JavaUniverse), or else from diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala index c90901410a35..4a8585d61603 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala @@ -37,8 +37,7 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable // Scopes - override def newScope = new Scope() with SynchronizedScope - override def newNestedScope(outer: Scope): Scope = new Scope(outer) with SynchronizedScope + override def newScope = new Scope with SynchronizedScope trait SynchronizedScope extends Scope { // we can keep this lock fine-grained, because methods of Scope don't do anything extraordinary, which makes deadlocks impossible diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index f5e16c6640cc..4f0c0253e919 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -2,8 +2,7 @@ package scala package reflect package runtime -import scala.reflect.io.AbstractFile -import scala.collection.{ immutable, mutable } +import scala.collection.immutable import scala.reflect.internal.Flags._ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable => @@ -40,7 +39,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb * Reasons for that differ from artifact to artifact. In some cases it's quite bad (e.g. types use a number * of non-concurrent compiler caches, so we need to serialize certain operations on types in order to make * sure that things stay deterministic). However, in case of symbols there's hope, because it's only during - * initializaton that symbols are thread-unsafe. After everything's set up, symbols become immutable + * initialization that symbols are thread-unsafe. After everything's set up, symbols become immutable * (sans a few deterministic caches that can be populated simultaneously by multiple threads) and therefore thread-safe. * * Note that by saying "symbols become immutable" I mean literally that. In a very common case of PackageClassSymbol's, @@ -103,10 +102,10 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb * * Just a volatile var is fine, because: * 1) Status can only be changed in a single-threaded fashion (this is enforced by gilSynchronized - * that effecively guards `Symbol.initialize`), which means that there can't be update conflicts. + * that effectively guards `Symbol.initialize`), which means that there can't be update conflicts. * 2) If someone reads a stale value of status, then the worst thing that might happen is that this someone - * is going to spuriously call `initialize`, which is either a gil-protected operation (if the symbol isn't inited yet) - * or a no-op (if the symbol is already inited), and that is fine in both cases. + * is going to spuriously call `initialize`, which is either a gil-protected operation (if the symbol isn't initialized yet) + * or a no-op (if the symbol is already initialized), and that is fine in both cases. * * upd. It looks like we also need to keep track of a mask of initialized flags to make sure * that normal symbol initialization routines don't trigger auto-init in Symbol.flags-related routines (e.g. Symbol.getFlag). diff --git a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala index 5edc05146189..586b8a525732 100644 --- a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala +++ b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala @@ -11,12 +11,16 @@ private[reflect] trait ThreadLocalStorage { trait ThreadLocalStorage[T] { def get: T; def set(newValue: T): Unit } private class MyThreadLocalStorage[T](initialValue: => T) extends ThreadLocalStorage[T] { // TODO: how do we use org.cliffc.high_scale_lib.NonBlockingHashMap here? - val values = new java.util.concurrent.ConcurrentHashMap[Thread, T]() + // (we would need a version that uses weak keys) + private val values = java.util.Collections.synchronizedMap(new java.util.WeakHashMap[Thread, T]()) def get: T = { if (values containsKey currentThread) values.get(currentThread) else { val value = initialValue - values.putIfAbsent(currentThread, value) + // since the key is currentThread, and `values` is private, it + // would be impossible for a value to have been set after the + // above containsKey check. `putIfAbsent` is not necessary. + values.put(currentThread, value) value } } diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala index 3c9bbccba38f..e240bed0a771 100644 --- a/src/reflect/scala/reflect/runtime/package.scala +++ b/src/reflect/scala/reflect/runtime/package.scala @@ -30,7 +30,8 @@ package runtime { import c.universe._ val runtimeClass = c.reifyEnclosingRuntimeClass if (runtimeClass.isEmpty) c.abort(c.enclosingPosition, "call site does not have an enclosing class") - val runtimeUniverse = Select(Select(Select(Ident(newTermName("scala")), newTermName("reflect")), newTermName("runtime")), newTermName("universe")) + val scalaPackage = Select(Ident(newTermName("_root_")), newTermName("scala")) + val runtimeUniverse = Select(Select(Select(scalaPackage, newTermName("reflect")), newTermName("runtime")), newTermName("universe")) val currentMirror = Apply(Select(runtimeUniverse, newTermName("runtimeMirror")), List(Select(runtimeClass, newTermName("getClassLoader")))) c.Expr[Nothing](currentMirror)(c.WeakTypeTag.Nothing) } diff --git a/src/repl/scala/tools/nsc/MainGenericRunner.scala b/src/repl/scala/tools/nsc/MainGenericRunner.scala index 43f0ea1256ea..34057ed34165 100644 --- a/src/repl/scala/tools/nsc/MainGenericRunner.scala +++ b/src/repl/scala/tools/nsc/MainGenericRunner.scala @@ -8,7 +8,6 @@ package tools.nsc import io.{ File } import util.{ ClassPath, ScalaClassLoader } -import Properties.{ versionString, copyrightString } import GenericRunnerCommand._ object JarRunner extends CommonRunner { @@ -28,79 +27,78 @@ object JarRunner extends CommonRunner { } /** An object that runs Scala code. It has three possible - * sources for the code to run: pre-compiled code, a script file, - * or interactive entry. - */ + * sources for the code to run: pre-compiled code, a script file, + * or interactive entry. + */ class MainGenericRunner { - def errorFn(ex: Throwable): Boolean = { - ex.printStackTrace() - false - } - def errorFn(str: String): Boolean = { - Console.err println str - false + def errorFn(str: String, e: Option[Throwable] = None, isFailure: Boolean = true): Boolean = { + if (str.nonEmpty) Console.err println str + e foreach (_.printStackTrace()) + !isFailure } def process(args: Array[String]): Boolean = { val command = new GenericRunnerCommand(args.toList, (x: String) => errorFn(x)) - import command.{ settings, howToRun, thingToRun } - def sampleCompiler = new Global(settings) // def so its not created unless needed - - if (!command.ok) return errorFn("\n" + command.shortUsageMsg) - else if (settings.version) return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString)) - else if (command.shouldStopWithInfo) return errorFn(command getInfoMessage sampleCompiler) - - def isE = !settings.execute.isDefault - def dashe = settings.execute.value - - def isI = !settings.loadfiles.isDefault - def dashi = settings.loadfiles.value - - // Deadlocks on startup under -i unless we disable async. - if (isI) - settings.Yreplsync.value = true - - def combinedCode = { - val files = if (isI) dashi map (file => File(file).slurp()) else Nil - val str = if (isE) List(dashe) else Nil - - files ++ str mkString "\n\n" - } - - def runTarget(): Either[Throwable, Boolean] = howToRun match { - case AsObject => - ObjectRunner.runAndCatch(settings.classpathURLs, thingToRun, command.arguments) - case AsScript => - ScriptRunner.runScriptAndCatch(settings, thingToRun, command.arguments) - case AsJar => - JarRunner.runJar(settings, thingToRun, command.arguments) - case Error => - Right(false) - case _ => - // We start the repl when no arguments are given. - Right(new interpreter.ILoop process settings) + import command.{ settings, howToRun, thingToRun, shortUsageMsg, shouldStopWithInfo } + def sampleCompiler = new Global(settings) // def so it's not created unless needed + + def run(): Boolean = { + def isE = !settings.execute.isDefault + def dashe = settings.execute.value + + def isI = !settings.loadfiles.isDefault + def dashi = settings.loadfiles.value + + // Deadlocks on startup under -i unless we disable async. + if (isI) + settings.Yreplsync.value = true + + def combinedCode = { + val files = if (isI) dashi map (file => File(file).slurp()) else Nil + val str = if (isE) List(dashe) else Nil + + files ++ str mkString "\n\n" + } + + def runTarget(): Either[Throwable, Boolean] = howToRun match { + case AsObject => + ObjectRunner.runAndCatch(settings.classpathURLs, thingToRun, command.arguments) + case AsScript => + ScriptRunner.runScriptAndCatch(settings, thingToRun, command.arguments) + case AsJar => + JarRunner.runJar(settings, thingToRun, command.arguments) + case Error => + Right(false) + case _ => + // We start the repl when no arguments are given. + Right(new interpreter.ILoop process settings) + } + + /** If -e and -i were both given, we want to execute the -e code after the + * -i files have been included, so they are read into strings and prepended to + * the code given in -e. The -i option is documented to only make sense + * interactively so this is a pretty reasonable assumption. + * + * This all needs a rewrite though. + */ + if (isE) { + ScriptRunner.runCommand(settings, combinedCode, thingToRun +: command.arguments) + } + else runTarget() match { + case Left(ex) => errorFn("", Some(ex)) // there must be a useful message of hope to offer here + case Right(b) => b + } } - /** If -e and -i were both given, we want to execute the -e code after the - * -i files have been included, so they are read into strings and prepended to - * the code given in -e. The -i option is documented to only make sense - * interactively so this is a pretty reasonable assumption. - * - * This all needs a rewrite though. - */ - if (isE) { - ScriptRunner.runCommand(settings, combinedCode, thingToRun +: command.arguments) - } - else runTarget() match { - case Left(ex) => errorFn(ex) - case Right(b) => b - } + if (!command.ok) + errorFn(f"%n$shortUsageMsg") + else if (shouldStopWithInfo) + errorFn(command getInfoMessage sampleCompiler, isFailure = false) + else + run() } } object MainGenericRunner extends MainGenericRunner { - def main(args: Array[String]) { - if (!process(args)) - sys.exit(1) - } + def main(args: Array[String]): Unit = if (!process(args)) sys.exit(1) } diff --git a/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala index e66e4eff29ff..df49e6a2e471 100644 --- a/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala +++ b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala @@ -30,7 +30,7 @@ class AbstractOrMissingHandler[T](onError: String => Unit, value: T) extends Par |Failed to initialize compiler: %s not found. |** Note that as of 2.8 scala does not assume use of the java classpath. |** For the old behavior pass -usejavacp to scala, or if using a Settings - |** object programatically, settings.usejavacp.value = true.""".stripMargin.format(x.req) + |** object programmatically, settings.usejavacp.value = true.""".stripMargin.format(x.req) ) value } diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index a96bed46960f..4fd5768b79d6 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -19,6 +19,7 @@ import scala.reflect.internal.util.{ BatchSourceFile, ScalaClassLoader } import ScalaClassLoader._ import scala.reflect.io.{ File, Directory } import scala.tools.util._ +import io.AbstractFile import scala.collection.generic.Clearable import scala.concurrent.{ ExecutionContext, Await, Future, future } import ExecutionContext.Implicits._ @@ -75,6 +76,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def history = in.history // classpath entries added via :cp + @deprecated("Use reset, replay or require to update class path", since = "2.11") var addedClasspath: String = "" /** A reverse list of commands to replay if the user requests a :replay */ @@ -124,22 +126,18 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } /** print a friendly help message */ - def helpCommand(line: String): Result = { - if (line == "") helpSummary() - else uniqueCommand(line) match { - case Some(lc) => echo("\n" + lc.help) - case _ => ambiguousError(line) - } + def helpCommand(line: String): Result = line match { + case "" => helpSummary() + case CommandMatch(cmd) => echo(f"%n${cmd.help}") + case _ => ambiguousError(line) } private def helpSummary() = { - val usageWidth = commands map (_.usageMsg.length) max - val formatStr = "%-" + usageWidth + "s %s" + val usageWidth = commands map (_.usageMsg.length) max + val formatStr = s"%-${usageWidth}s %s" - echo("All commands can be abbreviated, e.g. :he instead of :help.") + echo("All commands can be abbreviated, e.g., :he instead of :help.") - commands foreach { cmd => - echo(formatStr.format(cmd.usageMsg, cmd.help)) - } + for (cmd <- commands) echo(formatStr.format(cmd.usageMsg, cmd.help)) } private def ambiguousError(cmd: String): Result = { matchingCommands(cmd) match { @@ -148,14 +146,14 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } Result(keepRunning = true, None) } + // this lets us add commands willy-nilly and only requires enough command to disambiguate private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd) - private def uniqueCommand(cmd: String): Option[LoopCommand] = { - // this lets us add commands willy-nilly and only requires enough command to disambiguate - matchingCommands(cmd) match { - case List(x) => Some(x) - // exact match OK even if otherwise appears ambiguous - case xs => xs find (_.name == cmd) - } + private object CommandMatch { + def unapply(name: String): Option[LoopCommand] = + matchingCommands(name) match { + case x :: Nil => Some(x) + case xs => xs find (_.name == name) // accept an exact match + } } /** Show the history */ @@ -207,7 +205,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) /** Standard commands **/ lazy val standardCommands = List( - cmd("cp", "", "add a jar or directory to the classpath", addClasspath), cmd("edit", "|", "edit history", editCommand), cmd("help", "[command]", "print this summary or command-specific help", helpCommand), historyCommand, @@ -220,11 +217,12 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) cmd("paste", "[-raw] [path]", "enter paste mode or paste a file", pasteCommand), nullary("power", "enable power user mode", powerCmd), nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)), - nullary("replay", "reset execution and replay all previous commands", replay), - nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand), + cmd("replay", "[options]", "reset the repl and replay all previous commands", replayCommand), + cmd("require", "", "add a jar to the classpath", require), + cmd("reset", "[options]", "reset the repl to its initial state, forgetting all session entries", resetCommand), cmd("save", "", "save replayable session to a file", saveCommand), shCommand, - cmd("settings", "[+|-]", "+enable/-disable flags, set compiler options", changeSettings), + cmd("settings", "", "update compiler options, if possible; see reset", changeSettings), nullary("silent", "disable/enable automatic printing of results", verbosity), cmd("type", "[-v] ", "display the type of an expression without evaluating it", typeCommand), cmd("kind", "[-v] ", "display the kind of expression's type", kindCommand), @@ -304,57 +302,23 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) } } - private def changeSettings(args: String): Result = { - def showSettings() = { - for (s <- settings.userSetSettings.toSeq.sorted) echo(s.toString) - } - def updateSettings() = { - // put aside +flag options - val (pluses, rest) = (args split "\\s+").toList partition (_.startsWith("+")) - val tmps = new Settings - val (ok, leftover) = tmps.processArguments(rest, processAll = true) - if (!ok) echo("Bad settings request.") - else if (leftover.nonEmpty) echo("Unprocessed settings.") - else { - // boolean flags set-by-user on tmp copy should be off, not on - val offs = tmps.userSetSettings filter (_.isInstanceOf[Settings#BooleanSetting]) - val (minuses, nonbools) = rest partition (arg => offs exists (_ respondsTo arg)) - // update non-flags - settings.processArguments(nonbools, processAll = true) - // also snag multi-value options for clearing, e.g. -Ylog: and -language: - for { - s <- settings.userSetSettings - if s.isInstanceOf[Settings#MultiStringSetting] || s.isInstanceOf[Settings#PhasesSetting] - if nonbools exists (arg => arg.head == '-' && arg.last == ':' && (s respondsTo arg.init)) - } s match { - case c: Clearable => c.clear() - case _ => - } - def update(bs: Seq[String], name: String=>String, setter: Settings#Setting=>Unit) = { - for (b <- bs) - settings.lookupSetting(name(b)) match { - case Some(s) => - if (s.isInstanceOf[Settings#BooleanSetting]) setter(s) - else echo(s"Not a boolean flag: $b") - case _ => - echo(s"Not an option: $b") - } - } - update(minuses, identity, _.tryToSetFromPropertyValue("false")) // turn off - update(pluses, "-" + _.drop(1), _.tryToSet(Nil)) // turn on - } - } - if (args.isEmpty) showSettings() else updateSettings() + private def changeSettings(line: String): Result = { + def showSettings() = for (s <- settings.userSetSettings.toSeq.sorted) echo(s.toString) + if (line.isEmpty) showSettings() else { updateSettings(line) ; () } + } + private def updateSettings(line: String) = { + val (ok, rest) = settings.processArguments(words(line), processAll = false) + ok && rest.isEmpty } private def javapCommand(line: String): Result = { if (javap == null) - ":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome) + s":javap unavailable, no tools.jar at $jdkHome. Set JDK_HOME." else if (line == "") ":javap [-lcsvp] [path1 path2 ...]" else javap(words(line)) foreach { res => - if (res.isError) return "Failed: " + res.value + if (res.isError) return s"Failed: ${res.value}" else res.show() } } @@ -402,7 +366,13 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) private val crashRecovery: PartialFunction[Throwable, Boolean] = { case ex: Throwable => - echo(intp.global.throwableAsString(ex)) + val (err, explain) = ( + if (intp.isInitializeComplete) + (intp.global.throwableAsString(ex), "") + else + (ex.getMessage, "The compiler did not initialize.\n") + ) + echo(err) ex match { case _: NoSuchMethodError | _: NoClassDefFoundError => @@ -410,7 +380,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) throw ex case _ => def fn(): Boolean = - try in.readYesOrNo(replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() }) + try in.readYesOrNo(explain + replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() }) catch { case _: RuntimeException => false } if (fn()) replay() @@ -419,39 +389,56 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) true } + // after process line, OK continue, ERR break, or EOF all done + object LineResults extends Enumeration { + type LineResult = Value + val EOF, ERR, OK = Value + } + import LineResults.LineResult + // return false if repl should exit def processLine(line: String): Boolean = { import scala.concurrent.duration._ - Await.ready(globalFuture, 60.seconds) + Await.ready(globalFuture, 10.minutes) // Long timeout here to avoid test failures under heavy load. - (line ne null) && (command(line) match { + command(line) match { case Result(false, _) => false case Result(_, Some(line)) => addReplay(line) ; true case _ => true - }) + } } private def readOneLine() = { + import scala.io.AnsiColor.{ MAGENTA, RESET } out.flush() - in readLine prompt + in readLine ( + if (replProps.colorOk) + MAGENTA + prompt + RESET + else + prompt + ) } /** The main read-eval-print loop for the repl. It calls * command() for each line of input, and stops when * command() returns false. */ - @tailrec final def loop() { - if ( try processLine(readOneLine()) catch crashRecovery ) - loop() + @tailrec final def loop(): LineResult = { + import LineResults._ + readOneLine() match { + case null => EOF + case line => if (try processLine(line) catch crashRecovery) loop() else ERR + } } /** interpret all lines from a specified file */ - def interpretAllFrom(file: File) { + def interpretAllFrom(file: File, verbose: Boolean = false) { savingReader { savingReplayStack { file applyReader { reader => - in = SimpleReader(reader, out, interactive = false) - echo("Loading " + file + "...") + in = if (verbose) new SimpleReader(reader, out, interactive = true) with EchoReader + else SimpleReader(reader, out, interactive = false) + echo(s"Loading $file...") loop() } } @@ -459,8 +446,16 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } /** create a new interpreter and replay the given commands */ - def replay() { - reset() + def replayCommand(line: String): Unit = { + def run(destructive: Boolean): Unit = { + if (destructive) createInterpreter() else reset() + replay() + } + if (line.isEmpty) run(destructive = false) + else if (updateSettings(line)) run(destructive = true) + } + /** Announces as it replays. */ + def replay(): Unit = { if (replayCommandStack.isEmpty) echo("Nothing to replay.") else for (cmd <- replayCommands) { @@ -469,21 +464,28 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) echo("") } } - def resetCommand() { - echo("Resetting interpreter state.") - if (replayCommandStack.nonEmpty) { - echo("Forgetting this session history:\n") - replayCommands foreach echo - echo("") - replayCommandStack = Nil + /** `reset` the interpreter in an attempt to start fresh. + * Supplying settings creates a new compiler. + */ + def resetCommand(line: String): Unit = { + def run(destructive: Boolean): Unit = { + echo("Resetting interpreter state.") + if (replayCommandStack.nonEmpty) { + echo("Forgetting this session history:\n") + replayCommands foreach echo + echo("") + replayCommandStack = Nil + } + if (intp.namedDefinedTerms.nonEmpty) + echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", ")) + if (intp.definedTypes.nonEmpty) + echo("Forgetting defined types: " + intp.definedTypes.mkString(", ")) + if (destructive) createInterpreter() else reset() } - if (intp.namedDefinedTerms.nonEmpty) - echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", ")) - if (intp.definedTypes.nonEmpty) - echo("Forgetting defined types: " + intp.definedTypes.mkString(", ")) - - reset() + if (line.isEmpty) run(destructive = false) + else if (updateSettings(line)) run(destructive = true) } + /** Resets without announcements. */ def reset() { intp.reset() unleashAndSetPhase() @@ -591,13 +593,17 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) res } - def loadCommand(arg: String) = { - var shouldReplay: Option[String] = None - withFile(arg)(f => { - interpretAllFrom(f) - shouldReplay = Some(":load " + arg) - }) - Result(keepRunning = true, shouldReplay) + def loadCommand(arg: String): Result = { + def run(file: String, verbose: Boolean) = withFile(file) { f => + interpretAllFrom(f, verbose) + Result recording s":load $arg" + } getOrElse Result.default + + words(arg) match { + case "-v" :: file :: Nil => run(file, verbose = true) + case file :: Nil => run(file, verbose = false) + case _ => echo("usage: :load -v file") ; Result.default + } } def saveCommand(filename: String): Result = ( @@ -606,17 +612,62 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) else File(filename).printlnAll(replayCommands: _*) ) + @deprecated("Use reset, replay or require to update class path", since = "2.11") def addClasspath(arg: String): Unit = { val f = File(arg).normalize if (f.exists) { addedClasspath = ClassPath.join(addedClasspath, f.path) - val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath) - echo("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, totalClasspath)) - replay() + intp.addUrlsToClassPath(f.toURI.toURL) + echo("Added '%s' to classpath.".format(f.path, intp.global.classPath.asClassPathString)) + repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClassPathString)) } else echo("The path '" + f + "' doesn't seem to exist.") } + /** Adds jar file to the current classpath. Jar will only be added if it + * does not contain classes that already exist on the current classpath. + * + * Importantly, `require` adds jars to the classpath ''without'' resetting + * the state of the interpreter. This is in contrast to `replay` which can + * be used to add jars to the classpath and which creates a new instance of + * the interpreter and replays all interpreter expressions. + */ + def require(arg: String): Unit = { + class InfoClassLoader extends java.lang.ClassLoader { + def classOf(arr: Array[Byte]): Class[_] = + super.defineClass(null, arr, 0, arr.length) + } + + val f = File(arg).normalize + + if (f.isDirectory) { + echo("Adding directories to the classpath is not supported. Add a jar instead.") + return + } + + val jarFile = AbstractFile.getDirectory(new java.io.File(arg)) + + def flatten(f: AbstractFile): Iterator[AbstractFile] = + if (f.isClassContainer) f.iterator.flatMap(flatten) + else Iterator(f) + + val entries = flatten(jarFile) + val cloader = new InfoClassLoader + + def classNameOf(classFile: AbstractFile): String = cloader.classOf(classFile.toByteArray).getName + def alreadyDefined(clsName: String) = intp.classLoader.tryToLoadClass(clsName).isDefined + val exists = entries.filter(_.hasExtension("class")).map(classNameOf).exists(alreadyDefined) + + if (!f.exists) echo(s"The path '$f' doesn't seem to exist.") + else if (exists) echo(s"The path '$f' cannot be loaded, because existing classpath entries conflict.") // TODO tell me which one + else { + addedClasspath = ClassPath.join(addedClasspath, f.path) + intp.addUrlsToClassPath(f.toURI.toURL) + echo("Added '%s' to classpath.".format(f.path, intp.global.classPath.asClassPathString)) + repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClassPathString)) + } + } + def powerCmd(): Result = { if (isReplPower) "Already in power mode." else enablePowerMode(isDuringInit = false) @@ -646,20 +697,23 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } /** Run one command submitted by the user. Two values are returned: - * (1) whether to keep running, (2) the line to record for replay, - * if any. */ + * (1) whether to keep running, (2) the line to record for replay, if any. + */ def command(line: String): Result = { - if (line startsWith ":") { - val cmd = line.tail takeWhile (x => !x.isWhitespace) - uniqueCommand(cmd) match { - case Some(lc) => lc(line.tail stripPrefix cmd dropWhile (_.isWhitespace)) - case _ => ambiguousError(cmd) - } - } + if (line startsWith ":") colonCommand(line.tail) else if (intp.global == null) Result(keepRunning = false, None) // Notice failure to create compiler else Result(keepRunning = true, interpretStartingWith(line)) } + private val commandish = """(\S+)(?:\s+)?(.*)""".r + + private def colonCommand(line: String): Result = line.trim match { + case "" => helpSummary() + case commandish(CommandMatch(cmd), rest) => cmd(rest) + case commandish(name, _) => ambiguousError(name) + case _ => echo("?") + } + private def readWhile(cond: String => Boolean) = { Iterator continually in.readLine("") takeWhile (x => x != null && cond(x)) } @@ -683,13 +737,13 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } val code = file match { case Some(name) => - withFile(name)(f => { + withFile(name) { f => shouldReplay = Some(s":paste $arg") val s = f.slurp.trim if (s.isEmpty) echo(s"File contains no code: $f") else echo(s"Pasting file $f...") s - }) getOrElse "" + } getOrElse "" case None => echo("// Entering paste mode (ctrl-D to finish)\n") val text = (readWhile(_ => true) mkString "\n").trim @@ -818,7 +872,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) ) catch { case ex @ (_: Exception | _: NoClassDefFoundError) => - echo("Failed to created JLineReader: " + ex + "\nFalling back to SimpleReader.") + echo(f"Failed to created JLineReader: ${ex}%nFalling back to SimpleReader.") SimpleReader() } } @@ -845,6 +899,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) case _ => } } + + // start an interpreter with the given settings def process(settings: Settings): Boolean = savingContextLoader { this.settings = settings createInterpreter() @@ -859,7 +915,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) loadFiles(settings) printWelcome() - try loop() + try loop() match { + case LineResults.EOF => out print Properties.shellInterruptedString + case _ => + } catch AbstractOrMissingHandler() finally closeInterpreter() diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 8bb5757bbbc8..0347622cf427 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -15,12 +15,16 @@ import scala.concurrent.{ Future, ExecutionContext } import scala.reflect.runtime.{ universe => ru } import scala.reflect.{ ClassTag, classTag } import scala.reflect.internal.util.{ BatchSourceFile, SourceFile } -import scala.tools.util.PathResolver +import scala.tools.util.PathResolverFactory import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings } -import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, stringFromWriter, StackTraceOps } +import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, stringFromWriter, StackTraceOps, ClassPath, MergedClassPath } +import ScalaClassLoader.URLClassLoader import scala.tools.nsc.util.Exceptional.unwrap +import scala.tools.nsc.backend.JavaPlatform import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable} +import java.net.URL +import java.io.File /** An interpreter for Scala code. * @@ -41,7 +45,7 @@ import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine * all variables defined by that code. To extract the result of an * interpreted line to show the user, a second "result object" is created * which imports the variables exported by the above object and then - * exports members called "$eval" and "$print". To accomodate user expressions + * exports members called "$eval" and "$print". To accommodate user expressions * that read from variables or methods defined in previous statements, "import" * statements are used. * @@ -82,9 +86,11 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set private var _classLoader: util.AbstractFileClassLoader = null // active classloader private val _compiler: ReplGlobal = newCompiler(settings, reporter) // our private compiler + private var _runtimeClassLoader: URLClassLoader = null // wrapper exposing addURL + def compilerClasspath: Seq[java.net.URL] = ( if (isInitializeComplete) global.classPath.asURLs - else new PathResolver(settings).result.asURLs // the compiler's classpath + else PathResolverFactory.create(settings).resultAsURLs // the compiler's classpath ) def settings = initialSettings // Run the code body with the given boolean settings flipped to true. @@ -110,15 +116,17 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set lazy val reporter: ReplReporter = new ReplReporter(this) import formatting._ - import reporter.{ printMessage, withoutTruncating } + import reporter.{ printMessage, printUntruncatedMessage } // This exists mostly because using the reporter too early leads to deadlock. private def echo(msg: String) { Console println msg } private def _initSources = List(new BatchSourceFile("", "class $repl_$init { }")) private def _initialize() = { try { - // todo. if this crashes, REPL will hang - new _compiler.Run() compileSources _initSources + // if this crashes, REPL will hang its head in shame + val run = new _compiler.Run() + assert(run.typerPhase != NoPhase, "REPL requires a typer phase.") + run compileSources _initSources _initializeComplete = true true } @@ -235,6 +243,18 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set new Global(settings, reporter) with ReplGlobal { override def toString: String = "" } } + /** + * Adds all specified jars to the compile and runtime classpaths. + * + * @note Currently only supports jars, not directories. + * @param urls The list of items to add to the compile and runtime classpaths. + */ + def addUrlsToClassPath(urls: URL*): Unit = { + new Run // force some initialization + urls.foreach(_runtimeClassLoader.addURL) // Add jars to runtime classloader + global.extendCompilerClassPath(urls: _*) // Add jars to compile-time classpath + } + /** Parent classloader. Overridable. */ protected def parentClassLoader: ClassLoader = settings.explicitParentLoader.getOrElse( this.getClass.getClassLoader() ) @@ -293,27 +313,43 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set def originalPath(name: Name): String = typerOp path name def originalPath(sym: Symbol): String = typerOp path sym def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName + def translatePath(path: String) = { val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path) sym.toOption map flatPath } + + /** If path represents a class resource in the default package, + * see if the corresponding symbol has a class file that is a REPL artifact + * residing at a different resource path. Translate X.class to $line3/$read$$iw$$iw$X.class. + */ + def translateSimpleResource(path: String): Option[String] = { + if (!(path contains '/') && (path endsWith ".class")) { + val name = path stripSuffix ".class" + val sym = if (name endsWith "$") symbolOfTerm(name.init) else symbolOfIdent(name) + def pathOf(s: String) = s"${s.replace('.', '/')}.class" + sym.toOption map (s => pathOf(flatPath(s))) + } else { + None + } + } def translateEnclosingClass(n: String) = symbolOfTerm(n).enclClass.toOption map flatPath + /** If unable to find a resource foo.class, try taking foo as a symbol in scope + * and use its java class name as a resource to load. + * + * $intp.classLoader classBytes "Bippy" or $intp.classLoader getResource "Bippy.class" just work. + */ private class TranslatingClassLoader(parent: ClassLoader) extends util.AbstractFileClassLoader(replOutput.dir, parent) { - /** Overridden here to try translating a simple name to the generated - * class name if the original attempt fails. This method is used by - * getResourceAsStream as well as findClass. - */ - override protected def findAbstractFile(name: String): AbstractFile = - super.findAbstractFile(name) match { - case null => translatePath(name) map (super.findAbstractFile(_)) orNull - case file => file - } + override protected def findAbstractFile(name: String): AbstractFile = super.findAbstractFile(name) match { + case null if _initializeComplete => translateSimpleResource(name) map super.findAbstractFile orNull + case file => file + } } private def makeClassLoader(): util.AbstractFileClassLoader = - new TranslatingClassLoader(parentClassLoader match { - case null => ScalaClassLoader fromURLs compilerClasspath - case p => new ScalaClassLoader.URLClassLoader(compilerClasspath, p) + new TranslatingClassLoader({ + _runtimeClassLoader = new URLClassLoader(compilerClasspath, parentClassLoader) + _runtimeClassLoader }) // Set the current Java "context" class loader to this interpreter's class loader @@ -384,6 +420,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set def compileSourcesKeepingRun(sources: SourceFile*) = { val run = new Run() + assert(run.typerPhase != NoPhase, "REPL requires a typer phase.") reporter.reset() run compileSources sources.toList (!reporter.hasErrors, run) @@ -606,7 +643,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set } else { // don't truncate stack traces - withoutTruncating(printMessage(result)) + printUntruncatedMessage(result) IR.Error } } @@ -790,7 +827,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set } ((pos, msg)) :: loop(filtered) } - val warnings = loop(run.allConditionalWarnings flatMap (_.warnings)) + val warnings = loop(run.reporting.allConditionalWarnings) if (warnings.nonEmpty) mostRecentWarnings = warnings } @@ -1118,7 +1155,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set def apply(line: String): Result = debugging(s"""parse("$line")""") { var isIncomplete = false - reporter.withIncompleteHandler((_, _) => isIncomplete = true) { + currentRun.parsing.withIncompleteHandler((_, _) => isIncomplete = true) { reporter.reset() val trees = newUnitParser(line).parseStats() if (reporter.hasErrors) Error @@ -1171,6 +1208,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set finally isettings.unwrapStrings = saved } + def withoutTruncating[A](body: => A): A = reporter withoutTruncating body + def symbolDefString(sym: Symbol) = { TypeStrings.quieter( exitingTyper(sym.defString), @@ -1243,9 +1282,11 @@ object IMain { def getProgram(statements: String*): String = null - def getScriptEngine: ScriptEngine = new IMain(this, new Settings() { - usemanifestcp.value = true - }) + def getScriptEngine: ScriptEngine = { + val settings = new Settings() + settings.usemanifestcp.value = true + new IMain(this, settings) + } } // The two name forms this is catching are the two sides of this assignment: diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala index 915fd57bf876..c80b94bf8944 100644 --- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala +++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala @@ -9,7 +9,7 @@ package interpreter import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable } import scala.tools.nsc.util.ScalaClassLoader -import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, Writer } +import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, StringWriter, Writer } import java.util.{ Locale } import java.util.concurrent.ConcurrentLinkedQueue import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener, @@ -18,39 +18,47 @@ import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener, import scala.reflect.io.{ AbstractFile, Directory, File, Path } import scala.io.Source import scala.util.{ Try, Success, Failure } -import scala.util.Properties.lineSeparator +import scala.util.Properties.{ lineSeparator => EOL } import scala.util.matching.Regex -import scala.collection.JavaConverters +import scala.collection.JavaConverters._ import scala.collection.generic.Clearable import java.net.URL import scala.language.reflectiveCalls +import PartialFunction.{ cond => when } import Javap._ +/** Javap command implementation. Supports platform tool for Java 6 or 7+. + * Adds a few options for REPL world, to show bodies of `App` classes and closures. + */ class JavapClass( val loader: ScalaClassLoader, val printWriter: PrintWriter, intp: Option[IMain] = None -) extends scala.tools.util.Javap { +) extends Javap { import JavapTool.ToolArgs import JavapClass._ lazy val tool = JavapTool() - /** Run the tool. Option args start with "-". + /** Run the tool. Option args start with "-", except that "-" itself + * denotes the last REPL result. * The default options are "-protected -verbose". * Byte data for filename args is retrieved with findBytes. + * @return results for invoking JpResult.show() */ def apply(args: Seq[String]): List[JpResult] = { - val (options, claases) = args partition (s => (s startsWith "-") && s.length > 1) - val (flags, upgraded) = upgrade(options) + val (options, classes) = args partition (s => (s startsWith "-") && s.length > 1) + val (flags, upgraded) = upgrade(options) import flags.{ app, fun, help, raw } - val targets = if (fun && !help) FunFinder(loader, intp).funs(claases) else claases - if (help || claases.isEmpty) + + val targets = if (fun && !help) FunFinder(loader, intp).funs(classes) else classes + + if (help || classes.isEmpty) List(JpResult(JavapTool.helper(printWriter))) else if (targets.isEmpty) - List(JpResult("No anonfuns found.")) + List(JpResult("No closures found.")) else - tool(raw, upgraded)(targets map (claas => targeted(claas, app))) + tool(raw, upgraded)(targets map (targeted(_, app))) // JavapTool.apply } /** Cull our tool options. */ @@ -67,19 +75,22 @@ class JavapClass( case f: Failure[_] => (path, Failure(f.exception)) } - /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member), "#bar" (by taking "bar"). */ + /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member), "#bar" (by taking "bar"). + * @return the path to use for filtering, and the byte array + */ private def bytesFor(path: String, app: Boolean) = Try { def last = intp.get.mostRecentVar // fail if no intp - def req = path match { - case "-" => last - case HashSplit(prefix, member) => - if (prefix != null) prefix - else if (member != null) member - else "#" - } - val targetedBytes = if (app) findAppBody(req) else (req, findBytes(req)) - if (targetedBytes._2.isEmpty) throw new FileNotFoundException(s"Could not find class bytes for '$path'") - targetedBytes + val req = path match { + case "-" => last + case HashSplit(prefix, _) if prefix != null => prefix + case HashSplit(_, member) if member != null => member + case s => s + } + val targetedBytes = if (app) findAppBody(req) else (path, findBytes(req)) + targetedBytes match { + case (_, bytes) if bytes.isEmpty => throw new FileNotFoundException(s"Could not find class bytes for '$path'") + case ok => ok + } } private def findAppBody(path: String): (String, Array[Byte]) = { @@ -88,16 +99,12 @@ class JavapClass( // assumes only the first match is of interest (because only one endpoint is generated). def findNewStyle(bytes: Array[Byte]) = { import scala.tools.asm.ClassReader - import scala.tools.asm.tree.ClassNode - import PartialFunction.cond - import JavaConverters._ - val rdr = new ClassReader(bytes) - val nod = new ClassNode - rdr.accept(nod, 0) //foo/Bar.delayedEndpoint$foo$Bar$1 val endpoint = "delayedEndpoint".r.unanchored - def isEndPoint(s: String) = (s contains '$') && cond(s) { case endpoint() => true } - nod.methods.asScala collectFirst { case m if isEndPoint(m.name) => m.name } + def isEndPoint(s: String) = (s contains '$') && when(s) { case endpoint() => true } + new ClassReader(bytes) withMethods { methods => + methods collectFirst { case m if isEndPoint(m.name) => m.name } + } } // try new style, and add foo#delayedEndpoint$bar$1 to filter on the endpoint def asNewStyle(bytes: Array[Byte]) = Some(bytes) filter (_.nonEmpty) flatMap { bs => @@ -121,8 +128,7 @@ class JavapClass( def findBytes(path: String): Array[Byte] = tryFile(path) getOrElse tryClass(path) - /** Assume the string is a path and try to find the classfile - * it represents. + /** Assume the string is a path and try to find the classfile it represents. */ def tryFile(path: String): Option[Array[Byte]] = (Try (File(path.asClassResource)) filter (_.exists) map (_.toByteArray())).toOption @@ -201,46 +207,67 @@ class JavapClass( w } - /** Create a Showable with output massage. - * @param raw show ugly repl names - * @param target attempt to filter output to show region of interest - * @param preamble other messages to output - */ - def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable = new Showable { - // ReplStrippingWriter clips and scrubs on write(String) - // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping - def show() = - if (raw && intp.isDefined) intp.get withoutUnwrapping { writeLines() } - else writeLines() - private def writeLines() { - // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#; - // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#? - val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s } - var filtering = false // true if in region matching filter - // true to output - def checkFilter(line: String) = if (filterOn.isEmpty) true else { - // cheap heuristic, todo maybe parse for the java sig. - // method sigs end in paren semi - def isAnyMethod = line.endsWith(");") - def isOurMethod = { - val lparen = line.lastIndexOf('(') - val blank = line.lastIndexOf(' ', lparen) - (blank >= 0 && line.substring(blank+1, lparen) == filterOn.get) + def filterLines(target: String, text: String): String = { + // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#; + // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#? + val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s } + var filtering = false // true if in region matching filter + // turn filtering on/off given the pattern of interest + def filterStatus(line: String, pattern: String) = { + def isSpecialized(method: String) = (method startsWith pattern+"$") && (method endsWith "$sp") + def isAnonymized(method: String) = (pattern == "$anonfun") && (method startsWith "$anonfun$") + // cheap heuristic, todo maybe parse for the java sig. + // method sigs end in paren semi + def isAnyMethod = line endsWith ");" + // take the method name between the space char and left paren. + // accept exact match or something that looks like what we might be asking for. + def isOurMethod = { + val lparen = line lastIndexOf '(' + val blank = line.lastIndexOf(' ', lparen) + if (blank < 0) false + else { + val method = line.substring(blank+1, lparen) + (method == pattern || isSpecialized(method) || isAnonymized(method)) } - filtering = if (filtering) { + } + filtering = + if (filtering) { // next blank line terminates section - // for -public, next line is next method, more or less - line.trim.nonEmpty && !isAnyMethod + // in non-verbose mode, next line is next method, more or less + line.trim.nonEmpty && (!isAnyMethod || isOurMethod) } else { isAnyMethod && isOurMethod } - filtering - } - for (line <- Source.fromString(preamble + written).getLines(); if checkFilter(line)) - printWriter write line+lineSeparator - printWriter.flush() + filtering } + // do we output this line? + def checkFilter(line: String) = filterOn map (filterStatus(line, _)) getOrElse true + val sw = new StringWriter + val pw = new PrintWriter(sw) + for { + line <- Source.fromString(text).getLines() + if checkFilter(line) + } pw println line + pw.flush() + sw.toString } + + /** Create a Showable with output massage. + * @param raw show ugly repl names + * @param target attempt to filter output to show region of interest + * @param preamble other messages to output + */ + def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable = + new Showable { + private def writeLines() = filterLines(target, preamble + written) + val output = writeLines() + + // ReplStrippingWriter clips and scrubs on write(String) + // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping + def show() = + if (raw && intp.isDefined) intp.get withoutUnwrapping { printWriter.write(output, 0, output.length) } + else intp.get withoutTruncating(printWriter write output) + } } class JavapTool6 extends JavapTool { @@ -275,12 +302,13 @@ class JavapClass( override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map { - case (claas, Success(ba)) => JpResult(showable(raw, claas, newPrinter(new ByteArrayInputStream(ba), newEnv(options)))) + case (klass, Success(ba)) => JpResult(showable(raw, klass, newPrinter(new ByteArrayInputStream(ba), newEnv(options)))) case (_, Failure(e)) => JpResult(e.toString) }).toList orFailed List(noToolError) } class JavapTool7 extends JavapTool { + import JavapTool._ type Task = { def call(): Boolean // true = ok //def run(args: Array[String]): Int // all args @@ -290,10 +318,10 @@ class JavapClass( //object TaskResult extends Enumeration { // val Ok, Error, CmdErr, SysErr, Abnormal = Value //} - val TaskClaas = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull - override protected def failed = TaskClaas eq null + val TaskClass = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull + override protected def failed = TaskClass eq null - val TaskCtor = TaskClaas.getConstructor( + val TaskCtor = TaskClass.getConstructor( classOf[Writer], classOf[JavaFileManager], classOf[DiagnosticListener[_]], @@ -312,19 +340,14 @@ class JavapClass( /** All diagnostic messages. * @param locale Locale for diagnostic messages, null by default. */ - def messages(implicit locale: Locale = null) = { - import JavaConverters._ - diagnostics.asScala.map(_ getMessage locale).toList - } + def messages(implicit locale: Locale = null) = diagnostics.asScala.map(_ getMessage locale).toList + // don't filter this message if raw, since the names are likely to differ + private val container = "Binary file .* contains .*".r def reportable(raw: Boolean): String = { - // don't filter this message if raw, since the names are likely to differ - val container = "Binary file .* contains .*".r - val m = if (raw) messages - else messages filter (_ match { case container() => false case _ => true }) + val m = if (raw) messages else messages filterNot (when(_) { case container() => true }) clear() - if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator) - else "" + if (m.nonEmpty) m mkString ("", EOL, EOL) else "" } } val reporter = new JavaReporter @@ -344,8 +367,12 @@ class JavapClass( import Kind._ import StandardLocation._ import JavaFileManager.Location - import java.net.URI - def uri(name: String): URI = new URI(name) // new URI("jfo:" + name) + import java.net.{ URI, URISyntaxException } + + // name#fragment is OK, but otherwise fragile + def uri(name: String): URI = + try new URI(name) // new URI("jfo:" + name) + catch { case _: URISyntaxException => new URI("dummy") } def inputNamed(name: String): Try[ByteAry] = (managed find (_._1 == name)).get._2 def managedFile(name: String, kind: Kind) = kind match { @@ -379,19 +406,18 @@ class JavapClass( def showable(raw: Boolean, target: String): Showable = showWithPreamble(raw, target, reporter.reportable(raw)) // eventually, use the tool interface - def task(options: Seq[String], claases: Seq[String], inputs: Seq[Input]): Task = { + def task(options: Seq[String], classes: Seq[String], inputs: Seq[Input]): Task = { //ServiceLoader.load(classOf[javax.tools.DisassemblerTool]). - //getTask(writer, fileManager, reporter, options.asJava, claases.asJava) - import JavaConverters.asJavaIterableConverter - TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, claases.asJava) + //getTask(writer, fileManager, reporter, options.asJava, classes.asJava) + TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, classes.asJava) .orFailed (throw new IllegalStateException) } // a result per input - private def applyOne(raw: Boolean, options: Seq[String], claas: String, inputs: Seq[Input]): Try[JpResult] = + private def applyOne(raw: Boolean, options: Seq[String], klass: String, inputs: Seq[Input]): Try[JpResult] = Try { - task(options, Seq(claas), inputs).call() + task(options, Seq(klass), inputs).call() } map { - case true => JpResult(showable(raw, claas)) + case true => JpResult(showable(raw, klass)) case _ => JpResult(reporter.reportable(raw)) } recoverWith { case e: java.lang.reflect.InvocationTargetException => e.getCause match { @@ -402,7 +428,7 @@ class JavapClass( reporter.clear() } override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map { - case (claas, Success(_)) => applyOne(raw, options, claas, inputs).get + case (klass, Success(_)) => applyOne(raw, options, klass, inputs).get case (_, Failure(e)) => JpResult(e.toString) }).toList orFailed List(noToolError) } @@ -462,7 +488,7 @@ class JavapClass( object ToolArgs { def fromArgs(args: Seq[String]): (ToolArgs, Seq[String]) = ((ToolArgs(), Seq[String]()) /: (args flatMap massage)) { case ((t,others), s) => s match { - case "-fun" => (t copy (fun=true), others) + case "-fun" => (t copy (fun=true), others :+ "-private") case "-app" => (t copy (app=true), others) case "-help" => (t copy (help=true), others) case "-raw" => (t copy (raw=true), others) @@ -528,24 +554,28 @@ class JavapClass( val DefaultOptions = List("-protected", "-verbose") - def isAvailable = Seq(Env, Tool) exists (cn => hasClass(loader, cn)) - private def hasClass(cl: ScalaClassLoader, cn: String) = cl.tryToInitializeClass[AnyRef](cn).isDefined - private def isTaskable(cl: ScalaClassLoader) = hasClass(cl, Tool) + def isAvailable = Seq(Env, Tool) exists (hasClass(loader, _)) - def apply() = if (isTaskable(loader)) new JavapTool7 else new JavapTool6 + /** Select the tool implementation for this platform. */ + def apply() = if (hasClass(loader, Tool)) new JavapTool7 else new JavapTool6 } } object JavapClass { + import scala.tools.asm.ClassReader + import scala.tools.asm.tree.{ ClassNode, MethodNode } + def apply( loader: ScalaClassLoader = ScalaClassLoader.appLoader, printWriter: PrintWriter = new PrintWriter(System.out, true), intp: Option[IMain] = None ) = new JavapClass(loader, printWriter, intp) - val HashSplit = "(.*?)(?:#([^#]*))?".r + /** Match foo#bar, both groups are optional (may be null). */ + val HashSplit = "([^#]+)?(?:#(.+)?)?".r + // We enjoy flexibility in specifying either a fully-qualified class name com.acme.Widget // or a resource path com/acme/Widget.class; but not widget.out implicit class MaybeClassLike(val s: String) extends AnyVal { @@ -564,9 +594,9 @@ object JavapClass { else (s take i, Some(s drop i+1)) } } - implicit class ClassLoaderOps(val cl: ClassLoader) extends AnyVal { + implicit class ClassLoaderOps(val loader: ScalaClassLoader) extends AnyVal { private def parentsOf(x: ClassLoader): List[ClassLoader] = if (x == null) Nil else x :: parentsOf(x.getParent) - def parents: List[ClassLoader] = parentsOf(cl) + def parents: List[ClassLoader] = parentsOf(loader) /* all file locations */ def locations = { def alldirs = parents flatMap (_ match { @@ -580,11 +610,11 @@ object JavapClass { /* only the file location from which the given class is loaded */ def locate(k: String): Option[Path] = { Try { - val claas = try cl loadClass k catch { + val klass = try loader loadClass k catch { case _: NoClassDefFoundError => null // let it snow } // cf ScalaClassLoader.originOfClass - claas.getProtectionDomain.getCodeSource.getLocation + klass.getProtectionDomain.getCodeSource.getLocation } match { case Success(null) => None case Success(loc) if loc.isFile => Some(Path(new JFile(loc.toURI))) @@ -592,44 +622,66 @@ object JavapClass { } } /* would classBytes succeed with a nonempty array */ - def resourceable(className: String): Boolean = cl.getResource(className.asClassResource) != null + def resourceable(className: String): Boolean = loader.getResource(className.asClassResource) != null + + /* class reader of class bytes */ + def classReader(resource: String): ClassReader = new ClassReader(loader classBytes resource) + } + implicit class `class reader convenience`(val reader: ClassReader) extends AnyVal { + def withMethods[A](f: Seq[MethodNode] => A): A = { + val cls = new ClassNode + reader.accept(cls, 0) + f(cls.methods.asScala) + } } implicit class PathOps(val p: Path) extends AnyVal { import scala.tools.nsc.io.Jar def isJar = Jar isJarOrZip p } + implicit class `fun with files`(val f: AbstractFile) extends AnyVal { + def descend(path: Seq[String]): Option[AbstractFile] = { + def lookup(f: AbstractFile, path: Seq[String]): Option[AbstractFile] = path match { + case p if p.isEmpty => Option(f) + case p => Option(f.lookupName(p.head, directory = true)) flatMap (lookup(_, p.tail)) + } + lookup(f, path) + } + } implicit class URLOps(val url: URL) extends AnyVal { def isFile: Boolean = url.getProtocol == "file" } object FunFinder { def apply(loader: ScalaClassLoader, intp: Option[IMain]) = new FunFinder(loader, intp) } + // FunFinder.funs(ks) finds anonfuns class FunFinder(loader: ScalaClassLoader, intp: Option[IMain]) { + // manglese for closure: typename, $anonfun or lambda, opt method, digits + val closure = """(.*)\$(\$anonfun|lambda)(?:\$+([^$]+))?\$(\d+)""".r + + // manglese for closure + val cleese = "(?:anonfun|lambda)" + // class k, candidate f without prefix - def isFunOfClass(k: String, f: String) = { - val p = (s"${Regex quote k}\\$$+anonfun").r - (p findPrefixOf f).nonEmpty - } + def isFunOfClass(k: String, f: String) = (s"${Regex quote k}\\$$+$cleese".r findPrefixOf f).nonEmpty + // class k, candidate f without prefix, method m - def isFunOfMethod(k: String, m: String, f: String) = { - val p = (s"${Regex quote k}\\$$+anonfun\\$$${Regex quote m}\\$$").r - (p findPrefixOf f).nonEmpty - } - def isFunOfTarget(k: String, m: Option[String], f: String) = - if (m.isEmpty) isFunOfClass(k, f) - else isFunOfMethod(k, m.get, f) - def listFunsInAbsFile(k: String, m: Option[String], d: AbstractFile) = { - for (f <- d; if !f.isDirectory && isFunOfTarget(k, m, f.name)) yield f.name - } - // path prefix p, class k, dir d - def listFunsInDir(p: String, k: String, m: Option[String])(d: Directory) = { - val subdir = Path(p) - for (f <- (d / subdir).toDirectory.list; if f.isFile && isFunOfTarget(k, m, f.name)) + def isFunOfMethod(k: String, m: String, f: String) = + (s"${Regex quote k}\\$$+$cleese\\$$+${Regex quote m}\\$$".r findPrefixOf f).nonEmpty + + def isFunOfTarget(target: Target, f: String) = + target.member map (isFunOfMethod(target.name, _, f)) getOrElse isFunOfClass(target.name, f) + + def listFunsInAbsFile(target: Target)(d: AbstractFile) = + for (f <- d; if !f.isDirectory && isFunOfTarget(target, f.name)) yield f.name + + def listFunsInDir(target: Target)(d: Directory) = { + val subdir = Path(target.prefix) + for (f <- (d / subdir).toDirectory.list; if f.isFile && isFunOfTarget(target, f.name)) yield f.name } - // path prefix p, class k, jar file f - def listFunsInJar(p: String, k: String, m: Option[String])(f: File) = { + + def listFunsInJar(target: Target)(f: File) = { import java.util.jar.JarEntry import scala.tools.nsc.io.Jar def maybe(e: JarEntry) = { @@ -638,78 +690,133 @@ object JavapClass { if (parts.length < 2) ("", e.getName) else (parts.init mkString "/", parts.last) } - if (path == p && isFunOfTarget(k, m, name)) Some(name) else None + if (path == target.prefix && isFunOfTarget(target, name)) Some(name) else None } (new Jar(f) map maybe).flatten } def loadable(name: String) = loader resourceable name - // translated class, optional member, opt member to filter on, whether it is repl output - def translate(s: String): (String, Option[String], Option[String], Boolean) = { + case class Target(path: String, member: Option[String], filter: Option[String], isRepl: Boolean, isModule: Boolean) { + val splat = path split "\\." + val name = splat.last + val prefix = if (splat.length > 1) splat.init mkString "/" else "" + val pkg = if (splat.length > 1) splat.init mkString "." else "" + val targetName = s"$name${ if (isModule) "$" else "" }" + } + // translated class, optional member, opt member to filter on, whether it is repl output and a module + def translate(s: String): Target = { val (k0, m0) = s.splitHashMember - val k = k0.asClassName + val isModule = k0 endsWith "$" + val k = (k0 stripSuffix "$").asClassName val member = m0 filter (_.nonEmpty) // take Foo# as no member, not "" val filter = m0 flatMap { case "" => Some("apply") case _ => None } // take Foo# as filter on apply // class is either something replish or available to loader // $line.$read$$etc$Foo#member - ((intp flatMap (_ translatePath k) filter (loadable) map ((_, member, filter, true))) + ((intp flatMap (_ translatePath k) filter (loadable) map (x => Target(x stripSuffix "$", member, filter, true, isModule))) // s = "f" and $line.$read$$etc$#f is what we're after, // ignoring any #member (except take # as filter on #apply) - orElse (intp flatMap (_ translateEnclosingClass k) map ((_, Some(k), filter, true))) - getOrElse ((k, member, filter, false))) + orElse (intp flatMap (_ translateEnclosingClass k) map (x => Target(x stripSuffix "$", Some(k), filter, true, isModule))) + getOrElse (Target(k, member, filter, false, isModule))) } /** Find the classnames of anonfuns associated with k, * where k may be an available class or a symbol in scope. */ - def funsOf(k0: String): Seq[String] = { + def funsOf(selection: String): Seq[String] = { // class is either something replish or available to loader - val (k, member, filter, isReplish) = translate(k0) - val splat = k split "\\." - val name = splat.last - val prefix = if (splat.length > 1) splat.init mkString "/" else "" - val pkg = if (splat.length > 1) splat.init mkString "." else "" + val target = translate(selection) + // reconstitute an anonfun with a package // if filtered, add the hash back, e.g. pkg.Foo#bar, pkg.Foo$anon$1#apply def packaged(s: String) = { - val p = if (pkg.isEmpty) s else s"$pkg.$s" - val pm = filter map (p + "#" + _) - pm getOrElse p + val p = if (target.pkg.isEmpty) s else s"${target.pkg}.$s" + target.filter map (p + "#" + _) getOrElse p } - // is this translated path in (usually virtual) repl outdir? or loadable from filesystem? - val fs = if (isReplish) { - def outed(d: AbstractFile, p: Seq[String]): Option[AbstractFile] = { - if (p.isEmpty) Option(d) - else Option(d.lookupName(p.head, directory = true)) flatMap (f => outed(f, p.tail)) - } - outed(intp.get.replOutput.dir, splat.init) map { d => - listFunsInAbsFile(name, member, d) map packaged - } - } else { - loader locate k map { w => - if (w.isDirectory) listFunsInDir(prefix, name, member)(w.toDirectory) map packaged - else if (w.isJar) listFunsInJar(prefix, name, member)(w.toFile) map packaged - else Nil + // find closure classes in repl outdir or try asking the classloader where to look + val fs = + if (target.isRepl) + (intp.get.replOutput.dir descend target.splat.init) map { d => + listFunsInAbsFile(target)(d) map (_.asClassName) map packaged + } + else + loader locate target.path map { + case d if d.isDirectory => listFunsInDir(target)(d.toDirectory) map packaged + case j if j.isJar => listFunsInJar(target)(j.toFile) map packaged + case _ => Nil + } + val res = fs map (_.to[Seq]) getOrElse Seq() + // on second thought, we don't care about lambda method classes, just the impl methods + val rev = + res flatMap { + case x @ closure(_, "lambda", _, _) => labdaMethod(x, target) + //target.member flatMap (_ => labdaMethod(x, target)) getOrElse s"${target.name}#$$anonfun" + case x => Some(x) + } + rev + } + // given C$lambda$$g$n for member g and n in 1..N, find the C.accessor$x + // and the C.$anonfun$x it forwards to. + def labdaMethod(lambda: String, target: Target): Option[String] = { + import scala.tools.asm.ClassReader + import scala.tools.asm.Opcodes.INVOKESTATIC + import scala.tools.asm.tree.{ ClassNode, MethodInsnNode } + // the accessor methods invoked statically by the apply of the given closure class + def accesses(s: String): Seq[(String, String)] = { + val accessor = """accessor\$\d+""".r + loader classReader s withMethods { ms => + ms filter (_.name == "apply") flatMap (_.instructions.toArray.collect { + case i: MethodInsnNode if i.getOpcode == INVOKESTATIC && when(i.name) { case accessor(_*) => true } => (i.owner, i.name) + }) } } - fs match { - case Some(xs) => xs.to[Seq] // maybe empty - case None => Seq() // nothing found, e.g., junk input + // get the k.$anonfun for the accessor k.m + def anonOf(k: String, m: String): String = { + val res = + loader classReader k withMethods { ms => + ms filter (_.name == m) flatMap (_.instructions.toArray.collect { + case i: MethodInsnNode if i.getOpcode == INVOKESTATIC && i.name.startsWith("$anonfun") => i.name + }) + } + assert(res.size == 1) + res.head + } + // the lambdas invoke accessors that call the anonfuns of interest. Filter k on the k#$anonfuns. + val ack = accesses(lambda) + assert(ack.size == 1) // There can be only one. + ack.head match { + case (k, _) if target.isModule && !(k endsWith "$") => None + case (k, m) => Some(s"${k}#${anonOf(k, m)}") } } - def funs(ks: Seq[String]) = ks flatMap funsOf _ + /** Translate the supplied targets to patterns for anonfuns. + * Pattern is typename $ label [[$]$func] $n where label is $anonfun or lambda, + * and lambda includes the extra dollar, func is a method name, and n is an int. + * The typename for a nested class is dollar notation, Betty$Bippy. + * + * If C has anonfun closure classes, then use C$$anonfun$f$1 (various names, C# filters on apply). + * If C has lambda closure classes, then use C#$anonfun (special-cased by output filter). + */ + def funs(ks: Seq[String]): Seq[String] = ks flatMap funsOf } } +trait Javap { + def loader: ScalaClassLoader + def printWriter: PrintWriter + def apply(args: Seq[String]): List[Javap.JpResult] + def tryFile(path: String): Option[Array[Byte]] + def tryClass(path: String): Array[Byte] +} + object Javap { def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapClass(cl).JavapTool.isAvailable def apply(path: String): Unit = apply(Seq(path)) def apply(args: Seq[String]): Unit = JavapClass() apply args foreach (_.show()) - trait Showable { + private[interpreter] trait Showable { def show(): Unit } - sealed trait JpResult extends scala.tools.util.JpResult { + sealed trait JpResult { type ResultType def isError: Boolean def value: ResultType @@ -735,8 +842,13 @@ object Javap { def isError = false def show() = value.show() // output to tool's PrintWriter } - implicit class Lastly[A](val t: Try[A]) extends AnyVal { - private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t } - def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _) - } +} + +object NoJavap extends Javap { + import Javap._ + def loader: ScalaClassLoader = getClass.getClassLoader + def printWriter: PrintWriter = new PrintWriter(System.err, true) + def apply(args: Seq[String]): List[JpResult] = Nil + def tryFile(path: String): Option[Array[Byte]] = None + def tryClass(path: String): Array[Byte] = Array() } diff --git a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala index 12d6ee5112dc..9f555aee14ba 100644 --- a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala +++ b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala @@ -76,8 +76,11 @@ trait LoopCommands { // the default result means "keep running, and don't record that line" val default = Result(keepRunning = true, None) + // "keep running, and record this line" + def recording(line: String) = Result(keepRunning = true, Option(line)) + // most commands do not want to micromanage the Result, but they might want - // to print something to the console, so we accomodate Unit and String returns. + // to print something to the console, so we accommodate Unit and String returns. implicit def resultFromUnit(x: Unit): Result = default implicit def resultFromString(msg: String): Result = { echoCommandMessage(msg) @@ -85,4 +88,3 @@ trait LoopCommands { } } } - diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala index f4cbcb50fefb..bcba7b6dfdfe 100644 --- a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -102,6 +102,18 @@ trait MemberHandlers { class GenericHandler(member: Tree) extends MemberHandler(member) + import scala.io.AnsiColor.{ BOLD, BLUE, GREEN, RESET } + + def color(c: String, s: String) = + if (replProps.colorOk) string2code(BOLD) + string2code(c) + s + string2code(RESET) + else s + + def colorName(s: String) = + color(BLUE, string2code(s)) + + def colorType(s: String) = + color(GREEN, string2code(s)) + class ValHandler(member: ValDef) extends MemberDefHandler(member) { val maxStringElements = 1000 // no need to mkString billions of elements override def definesValue = true @@ -116,18 +128,23 @@ trait MemberHandlers { else any2stringOf(path, maxStringElements) val vidString = - if (replProps.vids) s"""" + " @ " + "%%8x".format(System.identityHashCode($path)) + " """.trim + if (replProps.vids) s"""" + f"@$${System.identityHashCode($path)}%8x" + """" else "" - """ + "%s%s: %s = " + %s""".format(string2code(prettyName), vidString, string2code(req typeOf name), resultString) + val nameString = colorName(prettyName) + vidString + val typeString = colorType(req typeOf name) + s""" + "$nameString: $typeString = " + $resultString""" } } } class DefHandler(member: DefDef) extends MemberDefHandler(member) { override def definesValue = flattensToEmpty(member.vparamss) // true if 0-arity - override def resultExtractionCode(req: Request) = - if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else "" + override def resultExtractionCode(req: Request) = { + val nameString = colorName(name) + val typeString = colorType(req typeOf name) + if (mods.isPublic) s""" + "$nameString: $typeString\\n"""" else "" + } } abstract class MacroHandler(member: DefDef) extends MemberDefHandler(member) { diff --git a/src/repl/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala index f69a5b487de5..8d8140b63880 100644 --- a/src/repl/scala/tools/nsc/interpreter/Power.scala +++ b/src/repl/scala/tools/nsc/interpreter/Power.scala @@ -155,7 +155,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re } object InternalInfo extends LowPriorityInternalInfo { } - /** Now dealing with the problem of acidentally calling a method on Type + /** Now dealing with the problem of accidentally calling a method on Type * when you're holding a Symbol and seeing the Symbol converted to the * type of Symbol rather than the type of the thing represented by the * symbol, by only implicitly installing one method, "?", and the rest diff --git a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala index 51fab3082e8d..07d619bca5f3 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala @@ -55,6 +55,8 @@ trait ReplGlobal extends Global { // newNamer(rootContext(unit)).enterSym(unit.body) } } + // add to initial or terminal phase to sanity check Run at construction + override val requires = List("typer") // ensure they didn't -Ystop-after:parser } override protected def computePhaseDescriptors: List[SubComponent] = { diff --git a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala index 36e6dbbccc12..8c4faf7278c4 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala @@ -13,6 +13,9 @@ class ReplProps { private def bool(name: String) = BooleanProp.keyExists(name) private def int(name: String) = IntProp(name) + // This property is used in TypeDebugging. Let's recycle it. + val colorOk = bool("scala.color") + val info = bool("scala.repl.info") val debug = bool("scala.repl.debug") val trace = bool("scala.repl.trace") diff --git a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala index b20166d0703b..e6f5a4089ed8 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala @@ -9,11 +9,47 @@ package interpreter import reporters._ import IMain._ +import scala.reflect.internal.util.Position + /** Like ReplGlobal, a layer for ensuring extra functionality. */ class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.in, new ReplStrippingWriter(intp)) { def printUntruncatedMessage(msg: String) = withoutTruncating(printMessage(msg)) + /** Whether very long lines can be truncated. This exists so important + * debugging information (like printing the classpath) is not rendered + * invisible due to the max message length. + */ + private var _truncationOK: Boolean = !intp.settings.verbose + def truncationOK = _truncationOK + def withoutTruncating[T](body: => T): T = { + val saved = _truncationOK + _truncationOK = false + try body + finally _truncationOK = saved + } + + override def warning(pos: Position, msg: String): Unit = withoutTruncating(super.warning(pos, msg)) + override def error(pos: Position, msg: String): Unit = withoutTruncating(super.error(pos, msg)) + + import scala.io.AnsiColor.{ RED, YELLOW, RESET } + + def severityColor(severity: Severity): String = severity match { + case ERROR => RED + case WARNING => YELLOW + case INFO => RESET + } + + override def print(pos: Position, msg: String, severity: Severity) { + val prefix = ( + if (replProps.colorOk) + severityColor(severity) + clabel(severity) + RESET + else + clabel(severity) + ) + printMessage(pos, prefix + msg) + } + override def printMessage(msg: String) { // Avoiding deadlock if the compiler starts logging before // the lazy val is complete. @@ -31,4 +67,5 @@ class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.i if (intp.totalSilence) () else super.displayPrompt() } + } diff --git a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala index 43da5c6f1204..1664546cabdb 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala @@ -28,5 +28,8 @@ trait ReplStrings { def any2stringOf(x: Any, maxlen: Int) = "scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen) - def words(s: String) = (s.trim split "\\s+" filterNot (_ == "")).toList + // no escaped or nested quotes + private[this] val inquotes = """(['"])(.*?)\1""".r + def unquoted(s: String) = s match { case inquotes(_, w) => w ; case _ => s } + def words(s: String) = (s.trim split "\\s+" filterNot (_ == "") map unquoted).toList } diff --git a/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala index 6634dc694419..49b8433a8c36 100644 --- a/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala +++ b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala @@ -22,14 +22,19 @@ extends InteractiveReader def reset() = () def redrawLine() = () - def readOneLine(prompt: String): String = { - if (interactive) { - out.print(prompt) - out.flush() - } - in.readLine() + + // InteractiveReader internals + protected def readOneLine(prompt: String): String = { + echo(prompt) + readOneLine() + } + protected def readOneKey(prompt: String) = sys.error("No char-based input in SimpleReader") + + protected def readOneLine(): String = in.readLine() + protected def echo(s: String): Unit = if (interactive) { + out.print(s) + out.flush() } - def readOneKey(prompt: String) = sys.error("No char-based input in SimpleReader") } object SimpleReader { @@ -39,3 +44,13 @@ object SimpleReader { def apply(in: BufferedReader = defaultIn, out: JPrintWriter = defaultOut, interactive: Boolean = true): SimpleReader = new SimpleReader(in, out, interactive) } + +// pretend we are a console for verbose purposes +trait EchoReader extends SimpleReader { + // if there is more input, then maybe echo the prompt and the input + override def readOneLine(prompt: String) = { + val input = readOneLine() + if (input != null) echo(f"$prompt$input%n") + input + } +} diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala index 079097d7a2ae..56f1e6537673 100644 --- a/src/repl/scala/tools/nsc/interpreter/package.scala +++ b/src/repl/scala/tools/nsc/interpreter/package.scala @@ -11,6 +11,7 @@ import scala.reflect.runtime.{ universe => ru } import scala.reflect.{ClassTag, classTag} import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse} import scala.util.control.Exception.catching +import scala.util.Try /** The main REPL related classes and values are as follows. * In addition to standard compiler classes Global and Settings, there are: @@ -196,4 +197,14 @@ package object interpreter extends ReplConfig with ReplStrings { } } } + + /* debug assist + private[nsc] implicit class `smart stringifier`(val sc: StringContext) extends AnyVal { + import StringContext._, runtime.ScalaRunTime.stringOf + def ss(args: Any*): String = sc.standardInterpolator(treatEscapes, args map stringOf) + } debug assist */ + private[nsc] implicit class `try lastly`[A](val t: Try[A]) extends AnyVal { + private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t } + def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _) + } } diff --git a/src/scaladoc/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala index 36a1405b1106..034416e8442d 100644 --- a/src/scaladoc/scala/tools/ant/Scaladoc.scala +++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala @@ -543,7 +543,7 @@ class Scaladoc extends ScalaMatchingTask { /** Tests if a file exists and prints a warning in case it doesn't. Always * returns the file, even if it doesn't exist. * - * @param file A file to test for existance. + * @param file A file to test for existence. * @return The same file. */ private def existing(file: File): File = { diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala index 52a0c20a111e..32a6ba0ce350 100644 --- a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala +++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala @@ -18,14 +18,10 @@ class ScalaDoc { val versionMsg = "Scaladoc %s -- %s".format(Properties.versionString, Properties.copyrightString) def process(args: Array[String]): Boolean = { - var reporter: ConsoleReporter = null + var reporter: ScalaDocReporter = null val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n scaladoc -help gives more information"), msg => reporter.printMessage(msg)) - reporter = new ConsoleReporter(docSettings) { - // need to do this so that the Global instance doesn't trash all the - // symbols just because there was an error - override def hasErrors = false - } + reporter = new ScalaDocReporter(docSettings) val command = new ScalaDoc.Command(args.toList, docSettings) def hasFiles = command.files.nonEmpty || docSettings.uncompilableFiles.nonEmpty @@ -50,12 +46,18 @@ class ScalaDoc { } finally reporter.printSummary() - // not much point in returning !reporter.hasErrors when it has - // been overridden with constant false. - true + !reporter.reallyHasErrors } } +class ScalaDocReporter(settings: Settings) extends ConsoleReporter(settings) { + + // need to do sometimes lie so that the Global instance doesn't + // trash all the symbols just because there was an error + override def hasErrors = false + def reallyHasErrors = super.hasErrors +} + object ScalaDoc extends ScalaDoc { class Command(arguments: List[String], settings: doc.Settings) extends CompilerCommand(arguments, settings) { override def cmdName = "scaladoc" diff --git a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala index dce52af56a2e..47ddfb8aa9e4 100644 --- a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala @@ -95,11 +95,11 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor val documentError: PartialFunction[Throwable, Unit] = { case NoCompilerRunException => reporter.info(null, "No documentation generated with unsuccessful compiler run", force = false) - case _: ClassNotFoundException => - () + case e @ (_:ClassNotFoundException | _:IllegalAccessException | _:InstantiationException | _:SecurityException | _:ClassCastException) => + reporter.error(null, s"Cannot load the doclet class ${settings.docgenerator.value} (specified with ${settings.docgenerator.name}): $e. Leaving the default settings will generate the html version of scaladoc.") } - /** Generate document(s) for all `files` containing scaladoc documenataion. + /** Generate document(s) for all `files` containing scaladoc documentation. * @param files The list of paths (relative to the compiler's source path, or absolute) of files to document. */ def document(files: List[String]) { def generate() = { diff --git a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala index 6dc3e5a62b31..f03b848af618 100644 --- a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala +++ b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala @@ -15,13 +15,14 @@ import DocParser.Parsed * right after parsing so it can read `DocDefs` from source code which would * otherwise cause the compiler to go haywire. */ -class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) { +class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) with ScaladocGlobalTrait { def this(settings: Settings) = this(settings, new ConsoleReporter(settings)) def this() = this(new Settings(Console println _)) // the usual global initialization locally { new Run() } + override def forScaladoc = true override protected def computeInternalPhases() { phasesSet += syntaxAnalyzer } diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala index 212f94c53101..cbf8ff22ba82 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala @@ -37,14 +37,14 @@ trait ScaladocAnalyzer extends Analyzer { comment.defineVariables(sym) val typer1 = newTyper(context.makeNewScope(docDef, context.owner)) for (useCase <- comment.useCases) { - typer1.silent(_ => typer1 defineUseCases useCase) match { + typer1.silent(_.asInstanceOf[ScaladocTyper].defineUseCases(useCase)) match { case SilentTypeError(err) => - unit.warning(useCase.pos, err.errMsg) + reporter.warning(useCase.pos, err.errMsg) case _ => } for (useCaseSym <- useCase.defined) { if (sym.name != useCaseSym.name) - unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode) + reporter.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode) } } } @@ -190,8 +190,8 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax typeParams.nonEmpty || version.nonEmpty || since.nonEmpty } def isDirty = unclean(unmooredParser parseComment doc) - if ((doc ne null) && (settings.lint || isDirty)) - unit.warning(doc.pos, "discarding unmoored doc comment") + if ((doc ne null) && (settings.warnDocDetached || isDirty)) + reporter.warning(doc.pos, "discarding unmoored doc comment") } override def flushDoc(): DocComment = (try lastDoc finally lastDoc = null) @@ -208,7 +208,7 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax super.skipDocComment() } override def skipBlockComment(): Unit = { - inDocComment = false + inDocComment = false // ??? this means docBuffer won't receive contents of this comment??? docBuffer = new StringBuilder("/*") super.skipBlockComment() } @@ -217,9 +217,10 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax def foundStarComment(start: Int, end: Int) = try { val str = docBuffer.toString val pos = Position.range(unit.source, start, start, end) - unit.comment(pos, str) - if (inDocComment) + if (inDocComment) { + signalParsedDocComment(str, pos) lastDoc = DocComment(str, pos) + } true } finally { docBuffer = null diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala index 2ea3a0eb7ca1..4b40d25c1780 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala @@ -11,6 +11,7 @@ import reporters.Reporter import typechecker.Analyzer import scala.reflect.internal.util.{ BatchSourceFile, RangePosition } + trait ScaladocGlobalTrait extends Global { outer => diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala index 67529f417845..44683f17559c 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala @@ -66,7 +66,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) val docsourceurl = StringSetting ( "-doc-source-url", "url", - "A URL pattern used to build links to template sources; use variables, for example: ?{TPL_NAME} ('Seq'), ?{TPL_OWNER} ('scala.collection'), ?{FILE_PATH} ('scala/collection/Seq')", + s"A URL pattern used to link to the source file; the following variables are available: €{TPL_NAME}, €{TPL_OWNER} and respectively €{FILE_PATH}. For example, for `scala.collection.Seq`, the variables will be expanded to `Seq`, `scala.collection` and respectively `scala/collection/Seq` (without the backquotes). To obtain a relative path for €{FILE_PATH} instead of an absolute one, use the ${sourcepath.name} setting.", "" ) @@ -249,7 +249,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) val idx = s.indexOf("#") if (idx > 0) { val (first, last) = s.splitAt(idx) - Some(new File(first).getAbsolutePath -> appendIndex(last.substring(1))) + Some(new File(first).getCanonicalPath -> appendIndex(last.substring(1))) } else { error(s"Illegal -doc-external-doc option; expected a pair with '#' separator, found: '$s'") None diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala index a933c35c9907..d31b8772626f 100755 --- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -131,18 +131,19 @@ trait CommentFactoryBase { this: MemberLookupBase => /** Javadoc tags that should be replaced by something useful, such as wiki * syntax, or that should be dropped. */ private val JavadocTags = - new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""") + new Regex("""\{\@(code|docRoot|linkplain|link|literal|value)\p{Zs}*([^}]*)\}""") /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */ - private def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match { - case "code" => "`" + mtch.group(2) + "`" - case "docRoot" => "" - case "inheritDoc" => "" - case "link" => "`" + mtch.group(2) + "`" - case "linkplain" => "`" + mtch.group(2) + "`" - case "literal" => mtch.group(2) - case "value" => "`" + mtch.group(2) + "`" - case _ => "" + private def javadocReplacement(mtch: Regex.Match): String = { + mtch.group(1) match { + case "code" => "" + mtch.group(2) + "" + case "docRoot" => "" + case "link" => "`[[" + mtch.group(2) + "]]`" + case "linkplain" => "[[" + mtch.group(2) + "]]" + case "literal" => "`" + mtch.group(2) + "`" + case "value" => "`" + mtch.group(2) + "`" + case _ => "" + } } /** Safe HTML tags that can be kept. */ @@ -344,12 +345,28 @@ trait CommentFactoryBase { this: MemberLookupBase => Map.empty[String, Body] ++ pairs } + def linkedExceptions: Map[String, Body] = { + val m = allSymsOneTag(SimpleTagKey("throws")) + + m.map { case (name,body) => + val link = memberLookup(pos, name, site) + val newBody = body match { + case Body(List(Paragraph(Chain(content)))) => + val descr = Text(" ") +: content + val entityLink = EntityLink(Monospace(Text(name)), link) + Body(List(Paragraph(Chain(entityLink +: descr)))) + case _ => body + } + (name, newBody) + } + } + val com = createComment ( body0 = Some(parseWikiAtSymbol(docBody.toString, pos, site)), authors0 = allTags(SimpleTagKey("author")), see0 = allTags(SimpleTagKey("see")), result0 = oneTag(SimpleTagKey("return")), - throws0 = allSymsOneTag(SimpleTagKey("throws")), + throws0 = linkedExceptions, valueParams0 = allSymsOneTag(SimpleTagKey("param")), typeParams0 = allSymsOneTag(SimpleTagKey("tparam")), version0 = oneTag(SimpleTagKey("version")), @@ -666,7 +683,7 @@ trait CommentFactoryBase { this: MemberLookupBase => } def summary(): Inline = { - val i = inline(check(".")) + val i = inline(checkSentenceEnded()) Summary( if (jump(".")) Chain(List(i, Text("."))) @@ -680,11 +697,10 @@ trait CommentFactoryBase { this: MemberLookupBase => jump("[[") val parens = 2 + repeatJump('[') val stop = "]" * parens - //println("link with " + parens + " matching parens") - val target = readUntil { check(stop) || check(" ") } + val target = readUntil { check(stop) || isWhitespaceOrNewLine(char) } val title = if (!check(stop)) Some({ - jump(" ") + jumpWhitespaceOrNewLine() inline(check(stop)) }) else None @@ -723,49 +739,15 @@ trait CommentFactoryBase { this: MemberLookupBase => */ def normalizeIndentation(_code: String): String = { - val code = _code.trim - var maxSkip = Integer.MAX_VALUE - var crtSkip = 0 - var wsArea = true - var index = 0 - var firstLine = true - var emptyLine = true - - while (index < code.length) { - code(index) match { - case ' ' => - if (wsArea) - crtSkip += 1 - case c => - wsArea = (c == '\n') - maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip - crtSkip = if (c == '\n') 0 else crtSkip - firstLine = if (c == '\n') false else firstLine - emptyLine = if (c == '\n') true else false - } - index += 1 - } + val code = _code.replaceAll("\\s+$", "").dropWhile(_ == '\n') // right-trim + remove all leading '\n' + val lines = code.split("\n") - if (maxSkip == 0) - code - else { - index = 0 - val builder = new StringBuilder - while (index < code.length) { - builder.append(code(index)) - if (code(index) == '\n') { - // we want to skip as many spaces are available, if there are less spaces (like on empty lines, do not - // over-consume them) - index += 1 - val limit = index + maxSkip - while ((index < code.length) && (code(index) == ' ') && index < limit) - index += 1 - } - else - index += 1 - } - builder.toString - } + // maxSkip - size of the longest common whitespace prefix of non-empty lines + val nonEmptyLines = lines.filter(_.trim.nonEmpty) + val maxSkip = if (nonEmptyLines.isEmpty) 0 else nonEmptyLines.map(line => line.prefixLength(_ == ' ')).min + + // remove common whitespace prefix + lines.map(line => if (line.trim.nonEmpty) line.substring(maxSkip) else line).mkString("\n") } def checkParaEnded(): Boolean = { @@ -785,6 +767,16 @@ trait CommentFactoryBase { this: MemberLookupBase => }) } + def checkSentenceEnded(): Boolean = { + (char == '.') && { + val poff = offset + nextChar() // read '.' + val ok = char == endOfText || char == endOfLine || isWhitespace(char) + offset = poff + ok + } + } + def reportError(pos: Position, message: String) { reporter.warning(pos, message) } @@ -889,6 +881,8 @@ trait CommentFactoryBase { this: MemberLookupBase => def jumpWhitespace() = jumpUntil(!isWhitespace(char)) + def jumpWhitespaceOrNewLine() = jumpUntil(!isWhitespaceOrNewLine(char)) + /* READERS */ final def readUntil(c: Char): String = { @@ -928,5 +922,7 @@ trait CommentFactoryBase { this: MemberLookupBase => /* CHARS CLASSES */ def isWhitespace(c: Char) = c == ' ' || c == '\t' + + def isWhitespaceOrNewLine(c: Char) = isWhitespace(c) || c == '\n' } } diff --git a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala index cc217d2f800b..f853df048489 100755 --- a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala @@ -62,15 +62,15 @@ trait MemberLookupBase { syms.flatMap { case (sym, owner) => // reconstruct the original link def linkName(sym: Symbol) = { - def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.isPackage) "$" else "") - val packageSuffix = if (sym.isPackage) ".package" else "" + def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.hasPackageFlag) "$" else "") + val packageSuffix = if (sym.hasPackageFlag) ".package" else "" sym.ownerChain.reverse.filterNot(isRoot(_)).map(nameString(_)).mkString(".") + packageSuffix } - if (sym.isClass || sym.isModule || sym.isTrait || sym.isPackage) + if (sym.isClass || sym.isModule || sym.isTrait || sym.hasPackageFlag) findExternalLink(sym, linkName(sym)) - else if (owner.isClass || owner.isModule || owner.isTrait || owner.isPackage) + else if (owner.isClass || owner.isModule || owner.isTrait || owner.hasPackageFlag) findExternalLink(sym, linkName(owner) + "@" + externalSignature(sym)) else None diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala index d721a96ad7bd..a0dd154d2ef0 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala @@ -97,7 +97,9 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) { "selected2.png", "selected-right-implicits.png", "selected-implicits.png", - "unselected.png" + "unselected.png", + + "permalink.png" ) /** Generates the Scaladoc site for a model into the site root. diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala index f6373e9e9753..3738e79ffe19 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala @@ -14,6 +14,7 @@ import base.comment._ import model._ import scala.xml.NodeSeq +import scala.xml.Elem import scala.xml.dtd.{DocType, PublicID} import scala.collection._ import java.io.Writer @@ -219,4 +220,48 @@ abstract class HtmlPage extends Page { thisPage => else if (ety.isObject) "object_big.png" else if (ety.isPackage) "package_big.png" else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not + + def permalink(template: Entity, isSelf: Boolean = true): Elem = + + + + + + + def companionAndPackage(tpl: DocTemplateEntity): Elem = + { + tpl.companion match { + case Some(companionTpl) => + val objClassTrait = + if (companionTpl.isObject) s"object ${tpl.name}" + else if (companionTpl.isTrait) s"trait ${companionTpl.name}" + else s"class ${companionTpl.name}" +
    + Related Docs: + {objClassTrait} + | {templateToHtml(tpl.inTemplate, s"package ${tpl.inTemplate.name}")} +
    + case None => +
    Related Doc: + {templateToHtml(tpl.inTemplate, s"package ${tpl.inTemplate.name}")} +
    + } + }
    + + def memberToUrl(template: Entity, isSelf: Boolean = true): String = { + val (signature: Option[String], containingTemplate: TemplateEntity) = template match { + case dte: DocTemplateEntity if (!isSelf) => (Some(dte.signature), dte.inTemplate) + case dte: DocTemplateEntity => (None, dte) + case me: MemberEntity => (Some(me.signature), me.inTemplate) + case tpl => (None, tpl) + } + + def hashFromPath(templatePath: List[String]): String = + ((templatePath.head.replace(".html", "") :: templatePath.tail).reverse).mkString(".") + + val containingTemplatePath = templateToPath(containingTemplate) + val url = "../" * (containingTemplatePath.size - 1) + "index.html" + val hash = hashFromPath(containingTemplatePath) + s"$url#$hash" + signature.map("@" + _).getOrElse("") + } } diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala index 26ee005d3e29..9994cac3b498 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala @@ -15,7 +15,7 @@ import base.comment._ import model._ import model.diagram._ -import scala.xml.{ NodeSeq, Text, UnprefixedAttribute } +import scala.xml.{Elem, NodeSeq, Text, UnprefixedAttribute} import scala.language.postfixOps import scala.collection.mutable. { Set, HashSet } @@ -110,7 +110,9 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp }} { owner } -

    { displayName }

    +

    { displayName }

    { + if (tpl.isPackage) NodeSeq.Empty else

    {companionAndPackage(tpl)}

    + }{ permalink(tpl) } { signature(tpl, isSelf = true) } @@ -143,7 +145,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else { if (!tpl.linearizationTemplates.isEmpty) -
    +
    Inherited
      @@ -153,7 +155,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp else NodeSeq.Empty } ++ { if (!tpl.conversions.isEmpty) -
      +
      Implicitly
        { @@ -167,7 +169,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
      else NodeSeq.Empty } ++ -
      +
      1. Hide All
      2. @@ -201,28 +203,28 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp } { if (absValueMembers.isEmpty) NodeSeq.Empty else -
        +

        Abstract Value Members

          { absValueMembers map (memberToHtml(_, tpl)) }
        } { if (concValueMembers.isEmpty) NodeSeq.Empty else -
        +

        { if (absValueMembers.isEmpty) "Value Members" else "Concrete Value Members" }

          { concValueMembers map (memberToHtml(_, tpl)) }
        } { if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else -
        +

        Shadowed Implicit Value Members

          { shadowedImplicitMembers map (memberToHtml(_, tpl)) }
        } { if (deprValueMembers.isEmpty) NodeSeq.Empty else -
        +

        Deprecated Value Members

          { deprValueMembers map (memberToHtml(_, tpl)) }
        @@ -287,13 +289,19 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp } def memberToHtml(mbr: MemberEntity, inTpl: DocTemplateEntity): NodeSeq = { + // Sometimes it's same, do we need signatureCompat still? + val sig = if (mbr.signature == mbr.signatureCompat) { + + } else { + + } + val memberComment = memberToCommentHtml(mbr, inTpl, isSelf = false)
      3. - - + { sig } { signature(mbr, isSelf = false) } { memberComment }
      4. @@ -306,9 +314,6 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
        { memberToCommentBodyHtml(mbr, inTpl, isSelf = true) }
        - case dte: DocTemplateEntity if mbr.comment.isDefined => - // comment of inner, documented class (only short comment, full comment is on the class' own page) - memberToInlineCommentHtml(mbr, isSelf) case _ => // comment of non-class member or non-documentented inner class val commentBody = memberToCommentBodyHtml(mbr, inTpl, isSelf = false) @@ -608,7 +613,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
        { val exceptionsXml: List[NodeSeq] = for((name, body) <- comment.throws.toList.sortBy(_._1) ) yield - {Text(name) ++ bodyToHtml(body)} + {bodyToHtml(body)} exceptionsXml.reduceLeft(_ ++ Text("") ++ _) }
        } @@ -723,6 +728,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp /** name, tparams, params, result */ def signature(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = { + def inside(hasLinks: Boolean, nameLink: String = ""): NodeSeq = @@ -833,11 +839,11 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp mbr match { case dte: DocTemplateEntity if !isSelf => -

        { inside(hasLinks = true, nameLink = relativeLinkTo(dte)) }

        +

        { inside(hasLinks = true, nameLink = relativeLinkTo(dte)) }

        ++ permalink(dte, isSelf) case _ if isSelf =>

        { inside(hasLinks = true) }

        case _ => -

        { inside(hasLinks = true) }

        +

        { inside(hasLinks = true) }

        ++ permalink(mbr) } } diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala index 4ff436bdc677..dc823ab1e593 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala @@ -364,7 +364,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { // add an id and class attribute to the SVG element case Elem(prefix, "svg", attribs, scope, child @ _*) => { val klass = if (isInheritanceDiagram) "class-diagram" else "package-diagram" - Elem(prefix, "svg", attribs, scope, child map(x => transform(x)) : _*) % + Elem(prefix, "svg", attribs, scope, true, child map(x => transform(x)) : _*) % new UnprefixedAttribute("id", "graph" + counter, Null) % new UnprefixedAttribute("class", klass, Null) } @@ -378,7 +378,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { // assign id and class attributes to edges and nodes: // the id attribute generated by dot has the format: "{class}|{id}" case g @ Elem(prefix, "g", attribs, scope, children @ _*) if (List("edge", "node").contains((g \ "@class").toString)) => { - var res = new Elem(prefix, "g", attribs, scope, (children map(x => transform(x))): _*) + var res = new Elem(prefix, "g", attribs, scope, true, (children map(x => transform(x))): _*) val dotId = (g \ "@id").toString if (dotId.count(_ == '|') == 1) { val Array(klass, id) = dotId.toString.split("\\|") @@ -395,11 +395,11 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { val imageNode = val anchorNode = (g \ "a") match { case Seq(Elem(prefix, "a", attribs, scope, children @ _*)) => - transform(new Elem(prefix, "a", attribs, scope, (children ++ imageNode): _*)) + transform(new Elem(prefix, "a", attribs, scope, true, (children ++ imageNode): _*)) case _ => g \ "a" } - res = new Elem(prefix, "g", attribs, scope, anchorNode: _*) + res = new Elem(prefix, "g", attribs, scope, true, anchorNode: _*) DiagramStats.addFixedImage() } } @@ -413,7 +413,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { scala.xml.Text("") // apply recursively case Elem(prefix, label, attribs, scope, child @ _*) => - Elem(prefix, label, attribs, scope, child map(x => transform(x)) : _*) + Elem(prefix, label, attribs, scope, true, child map(x => transform(x)) : _*) case x => x } diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js index 478f2e38ac40..680ead7a593d 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js @@ -25,7 +25,7 @@ $(document).ready(function() $(".diagram-container").css("display", "block"); $(".diagram").each(function() { - // store inital dimensions + // store initial dimensions $(this).data("width", $("svg", $(this)).width()); $(this).data("height", $("svg", $(this)).height()); // store unscaled clone of SVG element diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css index 55fb370a414c..3e352a95b3a9 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css @@ -310,6 +310,7 @@ h1 { position: fixed; margin-left: 300px; display: block; + -webkit-overflow-scrolling: touch; } #content > iframe { diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js index c201b324e745..3f5cfb4b52e3 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js @@ -1,5 +1,5 @@ // © 2009–2010 EPFL/LAMP -// code by Gilles Dubochet with contributions by Johannes Rudolph and "spiros" +// code by Gilles Dubochet with contributions by Johannes Rudolph, "spiros" and Marcin Kubala var topLevelTemplates = undefined; var topLevelPackages = undefined; @@ -11,7 +11,7 @@ var focusFilterState = undefined; var title = $(document).attr('title'); -var lastHash = ""; +var lastFragment = ""; $(document).ready(function() { $('body').layout({ @@ -24,9 +24,13 @@ $(document).ready(function() { ,north__paneSelector: ".ui-west-north" }); $('iframe').bind("load", function(){ - var subtitle = $(this).contents().find('title').text(); - $(document).attr('title', (title ? title + " - " : "") + subtitle); - + try { + var subtitle = $(this).contents().find('title').text(); + $(document).attr('title', (title ? title + " - " : "") + subtitle); + } catch (e) { + // Chrome doesn't allow reading the iframe's contents when + // used on the local file system. + } setUrlFragmentFromFrameSrc(); }); @@ -64,21 +68,43 @@ $(document).ready(function() { // Set the iframe's src according to the fragment of the current url. // fragment = "#scala.Either" => iframe url = "scala/Either.html" // fragment = "#scala.Either@isRight:Boolean" => iframe url = "scala/Either.html#isRight:Boolean" +// fragment = "#scalaz.iteratee.package@>@>[E,A]=scalaz.iteratee.package.Iteratee[E,A]" => iframe url = "scalaz/iteratee/package.html#>@>[E,A]=scalaz.iteratee.package.Iteratee[E,A]" function setFrameSrcFromUrlFragment() { - var fragment = location.hash.slice(1); - if(fragment) { - var loc = fragment.split("@")[0].replace(/\./g, "/"); - if(loc.indexOf(".html") < 0) loc += ".html"; - if(fragment.indexOf('@') > 0) loc += ("#" + fragment.split("@", 2)[1]); - frames["template"].location.replace(loc); - } - else - frames["template"].location.replace("package.html"); + + function extractLoc(fragment) { + var loc = fragment.split('@')[0].replace(/\./g, "/"); + if (loc.indexOf(".html") < 0) { + loc += ".html"; + } + return loc; + } + + function extractMemberSig(fragment) { + var splitIdx = fragment.indexOf('@'); + if (splitIdx < 0) { + return; + } + return fragment.substr(splitIdx + 1); + } + + var fragment = location.hash.slice(1); + if (fragment) { + var locWithMemeberSig = extractLoc(fragment); + var memberSig = extractMemberSig(fragment); + if (memberSig) { + locWithMemeberSig += "#" + memberSig; + } + frames["template"].location.replace(locWithMemeberSig); + } else { + console.log("empty fragment detected"); + frames["template"].location.replace("package.html"); + } } // Set the url fragment according to the src of the iframe "template". // iframe url = "scala/Either.html" => url fragment = "#scala.Either" // iframe url = "scala/Either.html#isRight:Boolean" => url fragment = "#scala.Either@isRight:Boolean" +// iframe url = "scalaz/iteratee/package.html#>@>[E,A]=scalaz.iteratee.package.Iteratee[E,A]" => fragment = "#scalaz.iteratee.package@>@>[E,A]=scalaz.iteratee.package.Iteratee[E,A]" function setUrlFragmentFromFrameSrc() { try { var commonLength = location.pathname.lastIndexOf("/"); diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/permalink.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/permalink.png new file mode 100644 index 000000000000..d54bc93f6a70 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/permalink.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css index b066027f046e..6eee28026709 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css @@ -217,7 +217,7 @@ dl.attributes > dd { height: 18px; } -#values ol li:last-child { +.values ol li:last-child { margin-bottom: 5px; } @@ -397,6 +397,49 @@ div.members > ol > li:last-child { margin-bottom: 5px; } +#definition .morelinks { + text-align: right; + position: absolute; + top: 40px; + right: 10px; + width: 450px; +} + +#definition .morelinks a { + color: #EBEBEB; +} + +#template .members li .permalink { + position: absolute; + top: 5px; + right: 5px; +} + +#definition .permalink { + position: absolute; + top: 10px; + right: 15px; +} + +#definition .permalink a { + color: #EBEBEB; +} + +#template .members li .permalink, +#definition .permalink a { + display: none; +} + +#template .members li:hover .permalink, +#definition:hover .permalink a { + display: block; +} + +#template .members li .permalink a, +#definition .permalink a { + text-decoration: none; + font-weight: bold; +} /* Comments text formating */ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js index 6d1caf6d5009..5ef03848b2c7 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js @@ -1,23 +1,57 @@ // © 2009–2010 EPFL/LAMP -// code by Gilles Dubochet with contributions by Pedro Furlanetto +// code by Gilles Dubochet with contributions by Pedro Furlanetto and Marcin Kubala $(document).ready(function(){ + var controls = { + visibility: { + publicOnly: $("#visbl").find("> ol > li.public"), + all: $("#visbl").find("> ol > li.all") + } + }; + // Escapes special characters and returns a valid jQuery selector function escapeJquery(str){ - return str.replace(/([;&,\.\+\*\~':"\!\^#$%@\[\]\(\)=>\|])/g, '\\$1'); + return str.replace(/([;&,\.\+\*\~':"\!\^#$%@\[\]\(\)=<>\|])/g, '\\$1'); } - // highlight and jump to selected member - if (window.location.hash) { - var temp = window.location.hash.replace('#', ''); - var elem = '#'+escapeJquery(temp); + function toggleVisibilityFilter(ctrlToEnable, ctrToDisable) { + if (ctrlToEnable.hasClass("out")) { + ctrlToEnable.removeClass("out").addClass("in"); + ctrToDisable.removeClass("in").addClass("out"); + filter(); + } + } + + controls.visibility.publicOnly.click(function () { + toggleVisibilityFilter(controls.visibility.publicOnly, controls.visibility.all); + }); - window.scrollTo(0, 0); - $(elem).parent().effect("highlight", {color: "#FFCC85"}, 3000); - $('html,body').animate({scrollTop:$(elem).parent().offset().top}, 1000); + controls.visibility.all.click(function () { + toggleVisibilityFilter(controls.visibility.all, controls.visibility.publicOnly); + }); + + function exposeMember(jqElem) { + var jqElemParent = jqElem.parent(), + parentName = jqElemParent.attr("name"), + linearizationName = /^([^#]*)(#.*)?$/gi.exec(parentName)[1]; + + // switch visibility filter if necessary + if (jqElemParent.attr("visbl") == "prt") { + toggleVisibilityFilter(controls.visibility.all, controls.visibility.publicOnly); + } + + // toggle appropriate linearization buttons + if (linearizationName) { + $("#linearization li.out[name='" + linearizationName + "']").removeClass("out").addClass("in"); + } + + filter(); + window.scrollTo(0, 0); + jqElemParent.effect("highlight", {color: "#FFCC85"}, 3000); + $('html,body').animate({scrollTop: jqElemParent.offset().top}, 1000); } - + var isHiddenClass = function (name) { return name == 'scala.Any' || name == 'scala.AnyRef'; @@ -97,7 +131,7 @@ $(document).ready(function(){ else if ($(this).hasClass("out")) { $(this).removeClass("out"); $(this).addClass("in"); - }; + } filter(); }); @@ -109,23 +143,23 @@ $(document).ready(function(){ else if ($(this).hasClass("out")) { $(this).removeClass("out"); $(this).addClass("in"); - }; + } filter(); }); - $("#mbrsel > div[id=ancestors] > ol > li.hideall").click(function() { + $("#mbrsel > div.ancestors > ol > li.hideall").click(function() { $("#linearization li.in").removeClass("in").addClass("out"); $("#linearization li:first").removeClass("out").addClass("in"); $("#implicits li.in").removeClass("in").addClass("out"); - if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.showall").hasClass("in")) { + if ($(this).hasClass("out") && $("#mbrsel > div.ancestors > ol > li.showall").hasClass("in")) { $(this).removeClass("out").addClass("in"); - $("#mbrsel > div[id=ancestors] > ol > li.showall").removeClass("in").addClass("out"); + $("#mbrsel > div.ancestors > ol > li.showall").removeClass("in").addClass("out"); } filter(); }) - $("#mbrsel > div[id=ancestors] > ol > li.showall").click(function() { + $("#mbrsel > div.ancestors > ol > li.showall").click(function() { var filteredLinearization = $("#linearization li.out").filter(function() { return ! isHiddenClass($(this).attr("name")); @@ -138,41 +172,27 @@ $(document).ready(function(){ }); filteredImplicits.removeClass("out").addClass("in"); - if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.hideall").hasClass("in")) { + if ($(this).hasClass("out") && $("#mbrsel > div.ancestors > ol > li.hideall").hasClass("in")) { $(this).removeClass("out").addClass("in"); - $("#mbrsel > div[id=ancestors] > ol > li.hideall").removeClass("in").addClass("out"); + $("#mbrsel > div.ancestors > ol > li.hideall").removeClass("in").addClass("out"); } filter(); }); $("#visbl > ol > li.public").click(function() { - if ($(this).hasClass("out")) { - $(this).removeClass("out").addClass("in"); - $("#visbl > ol > li.all").removeClass("in").addClass("out"); - filter(); - }; - }) - $("#visbl > ol > li.all").click(function() { - if ($(this).hasClass("out")) { - $(this).removeClass("out").addClass("in"); - $("#visbl > ol > li.public").removeClass("in").addClass("out"); - filter(); - }; - }); - $("#order > ol > li.alpha").click(function() { if ($(this).hasClass("out")) { orderAlpha(); - }; + } }) $("#order > ol > li.inherit").click(function() { if ($(this).hasClass("out")) { orderInherit(); - }; + } }); $("#order > ol > li.group").click(function() { if ($(this).hasClass("out")) { orderGroup(); - }; + } }); $("#groupedMembers").hide(); @@ -181,7 +201,7 @@ $(document).ready(function(){ // Create tooltips $(".extype").add(".defval").tooltip({ tip: "#tooltip", - position:"top center", + position: "top center", predelay: 500, onBeforeShow: function(ev) { $(this.getTip()).text(this.getTrigger().attr("name")); @@ -233,6 +253,20 @@ $(document).ready(function(){ windowTitle(); if ($("#order > ol > li.group").length == 1) { orderGroup(); }; + + function findElementByHash(locationHash) { + var temp = locationHash.replace('#', ''); + var memberSelector = '#' + escapeJquery(temp); + return $(memberSelector); + } + + // highlight and jump to selected member + if (window.location.hash) { + var jqElem = findElementByHash(window.location.hash); + if (jqElem.length > 0) { + exposeMember(jqElem); + } + } }); function orderAlpha() { @@ -241,7 +275,7 @@ function orderAlpha() { $("#order > ol > li.group").removeClass("in").addClass("out"); $("#template > div.parent").hide(); $("#template > div.conversion").hide(); - $("#mbrsel > div[id=ancestors]").show(); + $("#mbrsel > div.ancestors").show(); filter(); }; @@ -251,7 +285,7 @@ function orderInherit() { $("#order > ol > li.group").removeClass("in").addClass("out"); $("#template > div.parent").show(); $("#template > div.conversion").show(); - $("#mbrsel > div[id=ancestors]").hide(); + $("#mbrsel > div.ancestors").hide(); filter(); }; @@ -261,7 +295,7 @@ function orderGroup() { $("#order > ol > li.inherit").removeClass("in").addClass("out"); $("#template > div.parent").hide(); $("#template > div.conversion").hide(); - $("#mbrsel > div[id=ancestors]").show(); + $("#mbrsel > div.ancestors").show(); filter(); }; @@ -316,7 +350,7 @@ function initInherit() { } }); - $("#values > ol > li").each(function(){ + $(".values > ol > li").each(function(){ var mbr = $(this); this.mbrText = mbr.find("> .fullcomment .cmt").text(); var qualName = mbr.attr("name"); diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala index 6932f01e9a80..7fe8903c7614 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala @@ -117,7 +117,7 @@ trait MemberEntity extends Entity { def toRoot: List[MemberEntity] /** The templates in which this member has been declared. The first element of the list is the template that contains - * the currently active declaration of this member, subsequent elements are declarations that have been overriden. If + * the currently active declaration of this member, subsequent elements are declarations that have been overridden. If * the first element is equal to `inTemplate`, the member is declared locally, if not, it has been inherited. All * elements of this list are in the linearization of `inTemplate`. */ def inDefinitionTemplates: List[TemplateEntity] diff --git a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala index 339129bdbcfc..20aaab29fca0 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala @@ -40,12 +40,12 @@ trait MemberLookup extends base.MemberLookupBase { override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = { val sym1 = if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass - else if (sym.isPackage) + else if (sym.hasPackageFlag) /* Get package object which has associatedFile ne null */ sym.info.member(newTermName("package")) else sym Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src => - val path = src.path + val path = src.canonicalPath settings.extUrlMapping get path map { url => LinkToExternal(name, url + "#" + name) } diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala index ef84ac42bafd..cc2c0f890d88 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala @@ -89,10 +89,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { trait TemplateImpl extends EntityImpl with TemplateEntity { override def qualifiedName: String = if (inTemplate == null || inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name) - def isPackage = sym.isPackage + def isPackage = sym.hasPackageFlag def isTrait = sym.isTrait def isClass = sym.isClass && !sym.isTrait - def isObject = sym.isModule && !sym.isPackage + def isObject = sym.isModule && !sym.hasPackageFlag def isCaseClass = sym.isCaseClass def isRootPackage = false def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this)) @@ -250,7 +250,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */ def parentTypes = - if (sym.isPackage || sym == AnyClass) List() else { + if (sym.hasPackageFlag || sym == AnyClass) List() else { val tps = (this match { case a: AliasType => sym.tpe.dealias.parents case a: AbstractType => sym.info.bounds match { @@ -313,7 +313,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { /* Subclass cache */ private lazy val subClassesCache = ( - if (sym == AnyRefClass) null + if (sym == AnyRefClass || sym == AnyClass) null else mutable.ListBuffer[DocTemplateEntity]() ) def registerSubClass(sc: DocTemplateEntity): Unit = { @@ -661,7 +661,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { s != EmptyPackage && s != RootPackage } }) - else if (bSym.isPackage) // (2) + else if (bSym.hasPackageFlag) // (2) if (settings.skipPackage(makeQualifiedName(bSym))) None else @@ -753,8 +753,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { }) } else if (bSym.isConstructor) - if (conversion.isDefined) - None // don't list constructors inherted by implicit conversion + if (conversion.isDefined || (bSym.enclClass.isAbstract && (bSym.enclClass.isSealed || bSym.enclClass.isFinal))) + // don't list constructors inherited by implicit conversion + // and don't list constructors of abstract sealed types (they cannot be accessed anyway) + None else Some(new NonTemplateParamMemberImpl(bSym, conversion, useCaseOf, inTpl) with Constructor { override def isConstructor = true @@ -772,7 +774,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { Some(new MemberTemplateImpl(bSym, inTpl) with AliasImpl with AliasType { override def isAliasType = true }) - else if (!modelFinished && (bSym.isPackage || templateShouldDocument(bSym, inTpl))) + else if (!modelFinished && (bSym.hasPackageFlag || templateShouldDocument(bSym, inTpl))) modelCreation.createTemplate(bSym, inTpl) else None diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala index 2b7e2506d43b..ea72fa609598 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala @@ -94,7 +94,7 @@ trait ModelFactoryTypeSupport { LinkToMember(bMbr, oTpl) case _ => val name = makeQualifiedName(bSym) - if (!bSym.owner.isPackage) + if (!bSym.owner.hasPackageFlag) Tooltip(name) else findExternalLink(bSym, name).getOrElse ( diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala index 86a7a67160be..c1228e8735ce 100755 --- a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala @@ -49,7 +49,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory => case _ => } else if (asym.isTerm && asym.owner.isClass){ - if (asym.isSetter) asym = asym.getter(asym.owner) + if (asym.isSetter) asym = asym.getterIn(asym.owner) makeTemplate(asym.owner) match { case docTmpl: DocTemplateImpl => val mbrs: Option[MemberImpl] = findMember(asym, docTmpl) diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala index 44d8886e4ed7..b300752a348a 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala @@ -177,7 +177,7 @@ trait DiagramDirectiveParser { def warning(message: String) = { // we need the position from the package object (well, ideally its comment, but yeah ...) - val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym + val sym = if (template.sym.hasPackageFlag) template.sym.packageObject else template.sym assert((sym != global.NoSymbol) || (sym == global.rootMirror.RootPackage)) global.reporter.warning(sym.pos, message) } diff --git a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala index 70423cc7dc05..fa3e8ff5cbe0 100644 --- a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala +++ b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala @@ -182,14 +182,16 @@ abstract class ScaladocModelTest extends DirectTest { } } - def countLinks(c: Comment, p: EntityLink => Boolean) = { - def countLinks(body: Any): Int = body match { + def countLinks(c: Comment, p: EntityLink => Boolean): Int = countLinksInBody(c.body, p) + + def countLinksInBody(body: Body, p: EntityLink => Boolean): Int = { + def countLinks(b: Any): Int = b match { case el: EntityLink if p(el) => 1 case s: Seq[_] => s.toList.map(countLinks(_)).sum case p: Product => p.productIterator.toList.map(countLinks(_)).sum case _ => 0 } - countLinks(c.body) + countLinks(body) } def testDiagram(doc: DocTemplateEntity, diag: Option[Diagram], nodes: Int, edges: Int) = { diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala index c375a5bac41f..de9c30b8af07 100644 --- a/src/scalap/scala/tools/scalap/Arguments.scala +++ b/src/scalap/scala/tools/scalap/Arguments.scala @@ -9,7 +9,7 @@ package scala.tools.scalap import scala.collection.mutable -import mutable.{ Buffer, ListBuffer } +import mutable.ListBuffer object Arguments { case class Parser(optionPrefix: Char) { @@ -47,7 +47,7 @@ object Arguments { } def parseBinding(str: String, separator: Char): (String, String) = (str indexOf separator) match { - case -1 => argumentError("missing '" + separator + "' in binding '" + str + "'") ; ("", "") + case -1 => argumentError(s"missing '$separator' in binding '$str'") ; ("", "") case idx => ((str take idx).trim, (str drop (idx + 1)).trim) } @@ -71,7 +71,7 @@ object Arguments { i += 1 } else if (optionalArgs contains args(i)) { if ((i + 1) == args.length) { - argumentError("missing argument for '" + args(i) + "'") + argumentError(s"missing argument for '${args(i)}'") i += 1 } else { res.addArgument(args(i), args(i + 1)) @@ -79,11 +79,11 @@ object Arguments { } } else if (optionalBindings contains args(i)) { if ((i + 1) == args.length) { - argumentError("missing argument for '" + args(i) + "'") + argumentError(s"missing argument for '${args(i)}'") i += 1 } else { res.addBinding(args(i), - parseBinding(args(i + 1), optionalBindings(args(i)))); + parseBinding(args(i + 1), optionalBindings(args(i)))) i += 2 } } else { @@ -92,23 +92,23 @@ object Arguments { while ((i == j) && iter.hasNext) { val prefix = iter.next if (args(i) startsWith prefix) { - res.addPrefixed(prefix, args(i).substring(prefix.length()).trim()); + res.addPrefixed(prefix, args(i).substring(prefix.length()).trim()) i += 1 } } if (i == j) { - val iter = prefixedBindings.keysIterator; + val iter = prefixedBindings.keysIterator while ((i == j) && iter.hasNext) { val prefix = iter.next if (args(i) startsWith prefix) { val arg = args(i).substring(prefix.length()).trim() i = i + 1 res.addBinding(prefix, - parseBinding(arg, prefixedBindings(prefix))); + parseBinding(arg, prefixedBindings(prefix))) } } if (i == j) { - argumentError("unknown option '" + args(i) + "'") + argumentError(s"unknown option '${args(i)}'") i = i + 1 } } @@ -119,7 +119,7 @@ object Arguments { def parse(options: String*)(args: Array[String]): Arguments = { val parser = new Parser('-') - options foreach (parser withOption _) + options foreach parser.withOption parser parse args } } @@ -142,7 +142,7 @@ class Arguments { if (key.length > 0) bindings.getOrElseUpdate(tag, new mutable.HashMap)(key) = value - def addBinding(tag: String, binding: Tuple2[String, String]): Unit = + def addBinding(tag: String, binding: (String, String)): Unit = addBinding(tag, binding._1, binding._2) def addOther(arg: String): Unit = others += arg diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala index c72f416a896c..7c554d196cd5 100644 --- a/src/scalap/scala/tools/scalap/Main.scala +++ b/src/scalap/scala/tools/scalap/Main.scala @@ -10,11 +10,16 @@ package tools.scalap import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream } import scala.reflect.NameTransformer -import scalax.rules.scalasig._ -import scala.tools.nsc.util.{ ClassPath, JavaClassPath } -import scala.tools.util.PathResolver -import ClassPath.DefaultJavaContext +import scala.tools.nsc.Settings +import scala.tools.nsc.classpath.AggregateFlatClassPath +import scala.tools.nsc.classpath.FlatClassPathFactory import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.settings.ClassPathRepresentationType +import scala.tools.nsc.util.ClassFileLookup +import scala.tools.nsc.util.ClassPath.DefaultJavaContext +import scala.tools.nsc.util.JavaClassPath +import scala.tools.util.PathResolverFactory +import scalax.rules.scalasig._ /**The main object used to execute scalap on the command-line. * @@ -42,12 +47,12 @@ class Main { * * @param clazz the class file to be processed. */ - def processJavaClassFile(clazz: Classfile) { + def processJavaClassFile(clazz: Classfile): Unit = { // construct a new output stream writer val out = new OutputStreamWriter(Console.out) val writer = new JavaWriter(clazz, out) // print the class - writer.printClass + writer.printClass() out.flush() } @@ -60,21 +65,20 @@ class Main { syms.head.parent match { // Partial match - case Some(p) if (p.name != "") => { + case Some(p) if p.name != "" => val path = p.path if (!isPackageObject) { - stream.print("package "); - stream.print(path); + stream.print("package ") + stream.print(path) stream.print("\n") } else { val i = path.lastIndexOf(".") if (i > 0) { - stream.print("package "); + stream.print("package ") stream.print(path.substring(0, i)) stream.print("\n") } } - } case _ => } // Print classes @@ -96,7 +100,7 @@ class Main { /** Executes scalap with the given arguments and classpath for the * class denoted by `classname`. */ - def process(args: Arguments, path: ClassPath[AbstractFile])(classname: String): Unit = { + def process(args: Arguments, path: ClassFileLookup[AbstractFile])(classname: String): Unit = { // find the classfile val encName = classname match { case "scala.AnyRef" => "java.lang.Object" @@ -106,92 +110,115 @@ class Main { // we can afford allocations because this is not a performance critical code classname.split('.').map(NameTransformer.encode).mkString(".") } - val cls = path.findClass(encName) - if (cls.isDefined && cls.get.binary.isDefined) { - val cfile = cls.get.binary.get - if (verbose) { - Console.println(Console.BOLD + "FILENAME" + Console.RESET + " = " + cfile.path) - } - val bytes = cfile.toByteArray - if (isScalaFile(bytes)) { - Console.println(decompileScala(bytes, isPackageObjectFile(encName))) - } else { - // construct a reader for the classfile content - val reader = new ByteArrayReader(cfile.toByteArray) - // parse the classfile - val clazz = new Classfile(reader) - processJavaClassFile(clazz) - } - // if the class corresponds to the artificial class scala.Any. - // (see member list in class scala.tool.nsc.symtab.Definitions) - } - else - Console.println("class/object " + classname + " not found.") - } - object EmptyClasspath extends ClassPath[AbstractFile] { - /** - * The short name of the package (without prefix) - */ - def name = "" - def asURLs = Nil - def asClasspathString = "" - - val context = DefaultJavaContext - val classes = IndexedSeq() - val packages = IndexedSeq() - val sourcepaths = IndexedSeq() + path.findClassFile(encName) match { + case Some(classFile) => + if (verbose) { + Console.println(Console.BOLD + "FILENAME" + Console.RESET + " = " + classFile.path) + } + val bytes = classFile.toByteArray + if (isScalaFile(bytes)) { + Console.println(decompileScala(bytes, isPackageObjectFile(encName))) + } else { + // construct a reader for the classfile content + val reader = new ByteArrayReader(classFile.toByteArray) + // parse the classfile + val clazz = new Classfile(reader) + processJavaClassFile(clazz) + } + // if the class corresponds to the artificial class scala.Any. + // (see member list in class scala.tool.nsc.symtab.Definitions) + case _ => + Console.println(s"class/object $classname not found.") + } } } object Main extends Main { + + private object opts { + val cp = "-cp" + val help = "-help" + val classpath = "-classpath" + val showPrivateDefs = "-private" + val verbose = "-verbose" + val version = "-version" + + val classPathImplType = "-YclasspathImpl" + val disableFlatClassPathCaching = "-YdisableFlatCpCaching" + val logClassPath = "-Ylog-classpath" + } + /** Prints usage information for scalap. */ - def usage() { - Console println """ + def usage(): Unit = { + Console println s""" |Usage: scalap {
        + println("hello, world.")""", + q""" + + + println("hello, world.")""" + ) + trees foreach println +} diff --git a/test/files/run/t9030.scala b/test/files/run/t9030.scala new file mode 100644 index 000000000000..48d24e5b547e --- /dev/null +++ b/test/files/run/t9030.scala @@ -0,0 +1,19 @@ +object Test extends App { + + // For these methods, the compiler emits calls to BoxesRuntime.equalsNumNum/equalsNumChar/equalsNumObject directly + + def numNum(a: java.lang.Number, b: java.lang.Number) = assert(a == b) + def numChar(a: java.lang.Number, b: java.lang.Character) = assert(a == b) + def numObject(a: java.lang.Number, b: java.lang.Object) = assert(a == b) + + // The compiler doesn't use equalsCharObject directly, but still adding an example for completeness + + def charObject(a: java.lang.Character, b: java.lang.Object) = assert(a == b) + + numNum(new Integer(1), new Integer(1)) + numChar(new Integer(97), new Character('a')) + numObject(new Integer(1), new Integer(1)) + numObject(new Integer(97), new Character('a')) + + charObject(new Character('a'), new Integer(97)) +} diff --git a/test/files/run/tailcalls.check b/test/files/run/tailcalls.check index 7607921856ff..92d4f8a3c863 100644 --- a/test/files/run/tailcalls.check +++ b/test/files/run/tailcalls.check @@ -50,6 +50,10 @@ test NonTailCall.f2 test TailCall.b1 was successful test TailCall.b2 was successful test FancyTailCalls.tcTryLocal was successful +test FancyTailCalls.tcInBooleanExprFirstOp was successful +test FancyTailCalls.tcInBooleanExprSecondOp was successful +test FancyTailCalls.tcInIfCond was successful +test FancyTailCalls.tcInPatternGuard was successful test FancyTailCalls.differentInstance was successful test PolyObject.tramp was successful #partest avian @@ -104,5 +108,9 @@ test NonTailCall.f2 test TailCall.b1 was successful test TailCall.b2 was successful test FancyTailCalls.tcTryLocal was successful +test FancyTailCalls.tcInBooleanExprFirstOp was successful +test FancyTailCalls.tcInBooleanExprSecondOp was successful +test FancyTailCalls.tcInIfCond was successful +test FancyTailCalls.tcInPatternGuard was successful test FancyTailCalls.differentInstance was successful test PolyObject.tramp was successful diff --git a/test/files/run/tailcalls.scala b/test/files/run/tailcalls.scala index 1653b14de9c6..8df2dcfcb635 100644 --- a/test/files/run/tailcalls.scala +++ b/test/files/run/tailcalls.scala @@ -213,6 +213,33 @@ class FancyTailCalls { } finally {} } + def tcInBooleanExprFirstOp(x: Int, v: Int): Boolean = { + { + def loop(n: Int): Int = if (n == 0) v else loop(n - 1) + loop(x) + } == v && true + } + def tcInBooleanExprSecondOp(x: Int, v: Int): Boolean = { + true && { + def loop(n: Int): Int = if (n == 0) v else loop(n - 1) + loop(x) + } == v + } + def tcInIfCond(x: Int, v: Int): Boolean = { + if ({ + def loop(n: Int): Int = if (n == 0) v else loop(n - 1) + loop(x) + } == v) true else false + } + def tcInPatternGuard(x: Int, v: Int): Boolean = + v match { + case _ if + { + def loop(n: Int): Int = if (n == 0) v else loop(n - 1) + loop(x) == v + } => true + } + import FancyTailCalls._ final def differentInstance(n: Int, v: Int): Int = { if (n == 0) v @@ -376,8 +403,12 @@ object Test { check_success_b("TailCall.b2", TailCall.b2(max), true) val FancyTailCalls = new FancyTailCalls; - check_success("FancyTailCalls.tcTryLocal", FancyTailCalls.tcTryLocal(max, max), max) - check_success("FancyTailCalls.differentInstance", FancyTailCalls.differentInstance(max, 42), 42) + check_success("FancyTailCalls.tcTryLocal", FancyTailCalls.tcTryLocal(max, max), max) + check_success_b("FancyTailCalls.tcInBooleanExprFirstOp", FancyTailCalls.tcInBooleanExprFirstOp(max, max), true) + check_success_b("FancyTailCalls.tcInBooleanExprSecondOp", FancyTailCalls.tcInBooleanExprSecondOp(max, max), true) + check_success_b("FancyTailCalls.tcInIfCond", FancyTailCalls.tcInIfCond(max, max), true) + check_success_b("FancyTailCalls.tcInPatternGuard", FancyTailCalls.tcInPatternGuard(max, max), true) + check_success("FancyTailCalls.differentInstance", FancyTailCalls.differentInstance(max, 42), 42) check_success("PolyObject.tramp", PolyObject.tramp[Int](max), 0) } diff --git a/test/files/run/tpeCache-tyconCache.check b/test/files/run/tpeCache-tyconCache.check index a892f5477aad..ff604819e0c3 100644 --- a/test/files/run/tpeCache-tyconCache.check +++ b/test/files/run/tpeCache-tyconCache.check @@ -16,4 +16,4 @@ res0: Boolean = true scala> AnyRefClass.tpe eq AnyRefClass.typeConstructor res1: Boolean = true -scala> +scala> :quit diff --git a/test/files/run/typetags_serialize.check b/test/files/run/typetags_serialize.check index f79436ea5d6f..22928a2e94d8 100644 --- a/test/files/run/typetags_serialize.check +++ b/test/files/run/typetags_serialize.check @@ -1,2 +1,3 @@ -java.io.NotSerializableException: scala.reflect.api.TypeTags$PredefTypeCreator -java.io.NotSerializableException: Test$$typecreator1$1 +TypeTag[Int] +TypeTag[String] +TypeTag[Test.C[Double]] diff --git a/test/files/run/typetags_serialize.scala b/test/files/run/typetags_serialize.scala index 3c842e6cc9a7..a7a784523205 100644 --- a/test/files/run/typetags_serialize.scala +++ b/test/files/run/typetags_serialize.scala @@ -4,6 +4,10 @@ import scala.reflect.runtime.{universe => ru} import scala.reflect.runtime.{currentMirror => cm} object Test extends App { + class C[A] { + def m(a: A): Int = 5 + } + def test(tag: TypeTag[_]) = try { val fout = new ByteArrayOutputStream() @@ -26,4 +30,5 @@ object Test extends App { test(implicitly[TypeTag[Int]]) test(implicitly[TypeTag[String]]) + test(implicitly[TypeTag[C[Double]]]) } \ No newline at end of file diff --git a/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala b/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala index 1fbdc62a1e18..3d2b9f77be3c 100644 --- a/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala +++ b/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala @@ -36,8 +36,8 @@ object Test extends StoreReporterDirectTest { println(filteredInfos.mkString("\n")) storeReporter.infos.clear() compileApp(); - // we should get bad symbolic reference errors, because we're trying to use an implicit that can't be unpickled + // we should get "missing or invalid dependency detected" errors, because we're trying to use an implicit that can't be unpickled // but we don't know the number of these errors and their order, so I just ignore them all - println(filteredInfos.filterNot(_.msg.contains("bad symbolic reference")).mkString("\n")) + println(filteredInfos.filterNot(_.msg.contains("missing or invalid dependency detected")).mkString("\n")) } } diff --git a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala index 6804baa0c3cc..a865f4d137dd 100644 --- a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala +++ b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala @@ -40,8 +40,8 @@ object Test extends StoreReporterDirectTest { println(filteredInfos.mkString("\n")) storeReporter.infos.clear() compileApp(); - // we should get bad symbolic reference errors, because we're trying to use an implicit that can't be unpickled + // we should get "missing or invalid dependency detected" errors, because we're trying to use an implicit that can't be unpickled // but we don't know the number of these errors and their order, so I just ignore them all - println(filteredInfos.filterNot (_.msg.contains("bad symbolic reference")).mkString("\n")) + println(filteredInfos.filterNot (_.msg.contains("missing or invalid dependency detected")).mkString("\n")) } } diff --git a/test/files/run/unittest_collection.check b/test/files/run/unittest_collection.check index 844ca5468251..df1629dd7eb1 100644 --- a/test/files/run/unittest_collection.check +++ b/test/files/run/unittest_collection.check @@ -1 +1 @@ -warning: there were 1 deprecation warning(s); re-run with -deprecation for details +warning: there was one deprecation warning; re-run with -deprecation for details diff --git a/test/files/run/various-flat-classpath-types.check b/test/files/run/various-flat-classpath-types.check new file mode 100644 index 000000000000..401f707d0ee3 --- /dev/null +++ b/test/files/run/various-flat-classpath-types.check @@ -0,0 +1,12 @@ +ZipBin() +JarBin() +DirBin() +ZipSrc() +JarSrc() +DirSrc() +NestedZipBin() +NestedJarBin() +NestedDirBin() +NestedZipSrc() +NestedJarSrc() +NestedDirSrc() \ No newline at end of file diff --git a/test/files/run/various-flat-classpath-types.scala b/test/files/run/various-flat-classpath-types.scala new file mode 100644 index 000000000000..d39019e88562 --- /dev/null +++ b/test/files/run/various-flat-classpath-types.scala @@ -0,0 +1,214 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ + +import java.io.{File => JFile, FileInputStream, FileOutputStream} +import java.util.zip.{ZipEntry, ZipOutputStream} +import scala.reflect.io.{Directory, File} +import scala.tools.nsc.classpath.FlatClassPath.RootPackage +import scala.tools.nsc.classpath.PackageNameUtils +import scala.tools.nsc.io.Jar + +/** + * Generates directories, jars and zip files containing sources and classes + * (the result of a compilation which is executed here) + * and use them as a class- and sourcepath during compilation and running + * created application. At the end everything is cleaned up. + * + * It can test also current, recursive classpath. Just right now we force + * flat classpath to test it also when the recursive one would be set as a default. + */ +object Test { + + private implicit class JFileOps(file: JFile) { + + def createDir(newDirName: String) = { + val newDir = new JFile(file, newDirName) + newDir.mkdir() + newDir + } + + def createSrcFile(newFileName: String) = createFile(newFileName + ".scala") + + def createFile(fullFileName: String) = { + val newFile = new JFile(file, fullFileName) + newFile.createNewFile() + newFile + } + + def writeAll(text: String): Unit = File(file) writeAll text + + def moveContentToZip(zipName: String): Unit = { + val newZip = zipsDir createFile s"$zipName.zip" + val outputStream = new ZipOutputStream(new FileOutputStream(newZip)) + + def addFileToZip(dirPrefix: String = "")(fileToAdd: JFile): Unit = + if (fileToAdd.isDirectory) { + val dirEntryName = fileToAdd.getName + "/" + outputStream.putNextEntry(new ZipEntry(dirEntryName)) + fileToAdd.listFiles() foreach addFileToZip(dirEntryName) + } else { + val inputStream = new FileInputStream(fileToAdd) + outputStream.putNextEntry(new ZipEntry(dirPrefix + fileToAdd.getName)) + + val buffer = new Array[Byte](1024) + var count = inputStream.read(buffer) + while (count > 0) { + outputStream.write(buffer, 0, count) + count = inputStream.read(buffer) + } + + inputStream.close() + } + + file.listFiles() foreach addFileToZip() + outputStream.close() + + cleanDir(file) + } + + def moveContentToJar(jarName: String): Unit = { + val newJar = jarsDir createFile s"$jarName.jar" + Jar.create(file = File(newJar), sourceDir = Directory(file), mainClass = "won't be used") + cleanDir(file) + } + + def path: String = file.getAbsolutePath + } + + private case class DirRep(name: String, nestedDirs: Seq[DirRep] = Nil, sourceFiles: Seq[String] = Nil) + + private val compiler = new scala.tools.nsc.MainClass + private val appRunner = new scala.tools.nsc.MainGenericRunner + private val classPathImplFlag = "-YclasspathImpl:flat" + private val javaClassPath = sys.props("java.class.path") + + // creates a test dir in a temporary dir containing compiled files of this test + // root dir will be automatically deleted after the end of test + private val rootDir = new JFile(sys.props("partest.output")) + private val testDir = rootDir createDir s"cp-tests-${System.currentTimeMillis()}" + + private val jarsDir = testDir createDir "jars" + private val zipsDir = testDir createDir "zips" + private val srcDir = testDir createDir "src" + private val binDir = testDir createDir "bin" + private val outDir = testDir createDir "out" + + def main(args: Array[String]): Unit = { + createClassesZipInZipsDir() + createClassesJarInJarsDir() + createClassesInBinDir() + createSourcesZipInZipsDir() + createSourcesJarInJarsDir() + createSourcesInSrcDir() + compileFinalApp() + runApp() + // at the end all created files will be deleted automatically + } + + private def createClassesZipInZipsDir(): Unit = { + val baseFileName = "ZipBin" + createStandardSrcHierarchy(baseFileName) + compileSrc(baseFileName) + outDir moveContentToZip "Bin" + cleanDir(srcDir) + } + + private def createClassesJarInJarsDir(): Unit = { + val baseFileName = "JarBin" + createStandardSrcHierarchy(baseFileName) + compileSrc(baseFileName) + outDir moveContentToJar "Bin" + cleanDir(srcDir) + } + + private def createClassesInBinDir(): Unit = { + val baseFileName = "DirBin" + createStandardSrcHierarchy(baseFileName) + compileSrc(baseFileName, destination = binDir) + cleanDir(srcDir) + } + + private def createSourcesZipInZipsDir(): Unit = { + createStandardSrcHierarchy(baseFileName = "ZipSrc") + srcDir moveContentToZip "Src" + } + + private def createSourcesJarInJarsDir(): Unit = { + createStandardSrcHierarchy(baseFileName = "JarSrc") + srcDir moveContentToJar "Src" + } + + private def createSourcesInSrcDir(): Unit = { + createStandardSrcHierarchy(baseFileName = "DirSrc") + + val appFile = srcDir createSrcFile "Main" + appFile writeAll s"""import nested._ + | object Main extends App { + | println(new ZipBin) + | println(new JarBin) + | println(new DirBin) + | println(new ZipSrc) + | println(new JarSrc) + | println(new DirSrc) + | + | println(new NestedZipBin) + | println(new NestedJarBin) + | println(new NestedDirBin) + | println(new NestedZipSrc) + | println(new NestedJarSrc) + | println(new NestedDirSrc) + | } + """.stripMargin + } + + private def compileFinalApp(): Unit = { + val classPath = mkPath(javaClassPath, binDir.path, zipsDir.path + "/Bin.zip", jarsDir.path + "/Bin.jar") + val sourcePath = mkPath(srcDir.path, zipsDir.path + "/Src.zip", jarsDir.path + "/Src.jar") + + compiler.process(Array(classPathImplFlag, "-cp", classPath, "-sourcepath", sourcePath, + "-d", outDir.path, s"${srcDir.path}/Main.scala")) + } + + private def runApp(): Unit = { + val classPath = mkPath(javaClassPath, outDir.path, binDir.path, zipsDir.path + "/Bin.zip", jarsDir.path + "/Bin.jar") + appRunner.process(Array(classPathImplFlag, "-cp", classPath, "Main")) + } + + private def createStandardSrcHierarchy(baseFileName: String): Unit = + createSources(RootPackage, srcDir, + DirRep("", + nestedDirs = Seq(DirRep("nested", sourceFiles = Seq("Nested" + baseFileName))), + sourceFiles = Seq(baseFileName) + ) + ) + + private def createSources(pkg: String, dirFile: JFile, dirRep: DirRep): Unit = { + dirRep.nestedDirs foreach { rep => + val nestedDir = dirFile createDir rep.name + val nestedPkg = PackageNameUtils.packagePrefix(pkg) + rep.name + createSources(nestedPkg, nestedDir, rep) + } + + val pkgHeader = if (pkg == RootPackage) "" else s"package $pkg\n\n" + dirRep.sourceFiles foreach { srcName => + val text = s"""${pkgHeader}case class $srcName(x: String = "")""" + val srcFile = dirFile createSrcFile srcName + srcFile writeAll text + } + } + + private def compileSrc(baseFileName: String, destination: JFile = outDir): Unit = { + val srcDirPath = srcDir.path + compiler.process(Array(classPathImplFlag, "-cp", javaClassPath, "-d", destination.path, + s"$srcDirPath/$baseFileName.scala", s"$srcDirPath/nested/Nested$baseFileName.scala")) + } + + private def cleanDir(dir: JFile): Unit = + dir.listFiles().foreach { file => + if (file.isDirectory) cleanDir(file) + file.delete() + } + + private def mkPath(pathEntries: String*) = pathEntries.mkString(File.pathSeparator) +} diff --git a/test/files/run/virtpatmat_nested_lists.flags b/test/files/run/virtpatmat_nested_lists.flags new file mode 100644 index 000000000000..ca9a4c06970b --- /dev/null +++ b/test/files/run/virtpatmat_nested_lists.flags @@ -0,0 +1 @@ +-Ypatmat-exhaust-depth off \ No newline at end of file diff --git a/test/files/run/virtpatmat_opt_sharing.flags b/test/files/run/virtpatmat_opt_sharing.flags new file mode 100644 index 000000000000..ca9a4c06970b --- /dev/null +++ b/test/files/run/virtpatmat_opt_sharing.flags @@ -0,0 +1 @@ +-Ypatmat-exhaust-depth off \ No newline at end of file diff --git a/test/files/run/virtpatmat_staging.flags b/test/files/run/virtpatmat_staging.flags index 48fd867160ba..0a22f7c729cc 100644 --- a/test/files/run/virtpatmat_staging.flags +++ b/test/files/run/virtpatmat_staging.flags @@ -1 +1,2 @@ +-Yrangepos:false -Xexperimental diff --git a/test/files/run/virtpatmat_typetag.check b/test/files/run/virtpatmat_typetag.check index cac9d9a4d6ed..00df8b5e81b4 100644 --- a/test/files/run/virtpatmat_typetag.check +++ b/test/files/run/virtpatmat_typetag.check @@ -1,9 +1,9 @@ -1 is not a Int; it's a class java.lang.Integer +1 is a Int 1 is a java.lang.Integer 1 is not a java.lang.String; it's a class java.lang.Integer true is a Any woele is a java.lang.String -1 is not a Int; it's a class java.lang.Integer +1 is a Int 1 is a java.lang.Integer 1 is not a java.lang.String; it's a class java.lang.Integer true is a Any diff --git a/test/files/run/xMigration.check b/test/files/run/xMigration.check new file mode 100644 index 000000000000..378f7bb6c3f1 --- /dev/null +++ b/test/files/run/xMigration.check @@ -0,0 +1,49 @@ +Type in expressions to have them evaluated. +Type :help for more information. + +scala> Map(1 -> "eis").values // no warn +res0: Iterable[String] = MapLike(eis) + +scala> :setting -Xmigration:none + +scala> Map(1 -> "eis").values // no warn +res1: Iterable[String] = MapLike(eis) + +scala> :setting -Xmigration:any + +scala> Map(1 -> "eis").values // warn +:8: warning: method values in trait MapLike has changed semantics in version 2.8.0: +`values` returns `Iterable[B]` rather than `Iterator[B]`. + Map(1 -> "eis").values // warn + ^ +res2: Iterable[String] = MapLike(eis) + +scala> :setting -Xmigration:2.8 + +scala> Map(1 -> "eis").values // no warn +res3: Iterable[String] = MapLike(eis) + +scala> :setting -Xmigration:2.7 + +scala> Map(1 -> "eis").values // warn +:8: warning: method values in trait MapLike has changed semantics in version 2.8.0: +`values` returns `Iterable[B]` rather than `Iterator[B]`. + Map(1 -> "eis").values // warn + ^ +res4: Iterable[String] = MapLike(eis) + +scala> :setting -Xmigration:2.11 + +scala> Map(1 -> "eis").values // no warn +res5: Iterable[String] = MapLike(eis) + +scala> :setting -Xmigration // same as :any + +scala> Map(1 -> "eis").values // warn +:8: warning: method values in trait MapLike has changed semantics in version 2.8.0: +`values` returns `Iterable[B]` rather than `Iterator[B]`. + Map(1 -> "eis").values // warn + ^ +res6: Iterable[String] = MapLike(eis) + +scala> :quit diff --git a/test/files/run/xMigration.scala b/test/files/run/xMigration.scala new file mode 100644 index 000000000000..688e8783972c --- /dev/null +++ b/test/files/run/xMigration.scala @@ -0,0 +1,19 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + def code = """ +Map(1 -> "eis").values // no warn +:setting -Xmigration:none +Map(1 -> "eis").values // no warn +:setting -Xmigration:any +Map(1 -> "eis").values // warn +:setting -Xmigration:2.8 +Map(1 -> "eis").values // no warn +:setting -Xmigration:2.7 +Map(1 -> "eis").values // warn +:setting -Xmigration:2.11 +Map(1 -> "eis").values // no warn +:setting -Xmigration // same as :any +Map(1 -> "eis").values // warn + """ +} diff --git a/test/files/scalacheck/nan-ordering.scala b/test/files/scalacheck/nan-ordering.scala index 2094a46e370d..05e97a13c908 100644 --- a/test/files/scalacheck/nan-ordering.scala +++ b/test/files/scalacheck/nan-ordering.scala @@ -42,16 +42,16 @@ object Test extends Properties("NaN-Ordering") { property("Float equiv") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.equiv(d1, d2) == (d1 == d2) } property("Float reverse.min") = forAll(specFloats, specFloats) { (d1, d2) => { - val mathmin = math.min(d1, d2) + val mathmax = math.max(d1, d2) val numericmin = numFloat.reverse.min(d1, d2) - mathmin == numericmin || mathmin.isNaN && numericmin.isNaN + mathmax == numericmin || mathmax.isNaN && numericmin.isNaN } } property("Float reverse.max") = forAll(specFloats, specFloats) { (d1, d2) => { - val mathmax = math.max(d1, d2) + val mathmin = math.min(d1, d2) val numericmax = numFloat.reverse.max(d1, d2) - mathmax == numericmax || mathmax.isNaN && numericmax.isNaN + mathmin == numericmax || mathmin.isNaN && numericmax.isNaN } } @@ -105,16 +105,16 @@ object Test extends Properties("NaN-Ordering") { property("Double equiv") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.equiv(d1, d2) == (d1 == d2) } property("Double reverse.min") = forAll(specDoubles, specDoubles) { (d1, d2) => { - val mathmin = math.min(d1, d2) + val mathmax = math.max(d1, d2) val numericmin = numDouble.reverse.min(d1, d2) - mathmin == numericmin || mathmin.isNaN && numericmin.isNaN + mathmax == numericmin || mathmax.isNaN && numericmin.isNaN } } property("Double reverse.max") = forAll(specDoubles, specDoubles) { (d1, d2) => { - val mathmax = math.max(d1, d2) + val mathmin = math.min(d1, d2) val numericmax = numDouble.reverse.max(d1, d2) - mathmax == numericmax || mathmax.isNaN && numericmax.isNaN + mathmin == numericmax || mathmin.isNaN && numericmax.isNaN } } diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala index 774d6f428bc7..468bcb6dd117 100644 --- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala +++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala @@ -36,7 +36,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col // used to check if constructed collection is valid def checkDataStructureInvariants(orig: Traversable[T], cf: AnyRef) = { - // can be overriden in subclasses + // can be overridden in subclasses true } diff --git a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala index 69aef126685f..fd810674f536 100644 --- a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala +++ b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala @@ -90,6 +90,15 @@ trait ClassConstruction { self: QuasiquoteProperties => val args = q"val a: Int; val b: Int" assertEqAst(q"class C(implicit ..$args)", "class C(implicit val a: Int, val b: Int)") } + + property("SI-8451: inline secondary constructors") = test { + assertEqAst(q"class C(x: Int) { def this() = this(0) }", "class C(x: Int) { def this() = this(0) }") + } + + property("SI-8451: unquoted secondary constructors") = test { + val secondaryCtor = q"def this() = this(0)" + assertEqAst(q"class C(x: Int) { $secondaryCtor }", "class C(x: Int) { def this() = this(0) }") + } } trait TraitConstruction { self: QuasiquoteProperties => diff --git a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala index af7f2164a0a3..2c0e100b5a7d 100644 --- a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala +++ b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala @@ -229,6 +229,12 @@ trait DefDeconstruction { self: QuasiquoteProperties => val q"def foo(...$argss)(implicit ..$impl)" = q"def foo(x: Int)" assert(impl.isEmpty) } + + property("SI-8451") = test { + val q"def this(..$params) = this(..$args)" = q"def this(x: Int) = this(0)" + assert(params ≈ List(q"${Modifiers(PARAM)} val x: Int")) + assert(args ≈ List(q"0")) + } } trait ImportDeconstruction { self: QuasiquoteProperties => diff --git a/test/files/scalacheck/quasiquotes/ErrorProps.scala b/test/files/scalacheck/quasiquotes/ErrorProps.scala index 3d9b27de7771..2cba07abf2df 100644 --- a/test/files/scalacheck/quasiquotes/ErrorProps.scala +++ b/test/files/scalacheck/quasiquotes/ErrorProps.scala @@ -9,9 +9,10 @@ object ErrorProps extends QuasiquoteProperties("errors") { """) property("can't unquote with given rank") = fails( - "Can't unquote List[reflect.runtime.universe.Ident], consider using ..", + "Can't unquote List[StringBuilder], consider using .. or providing an implicit instance of Liftable[List[StringBuilder]]", """ - val xs = List(q"x", q"x") + import java.lang.StringBuilder + val xs: List[StringBuilder] = Nil q"$xs" """) @@ -71,9 +72,10 @@ object ErrorProps extends QuasiquoteProperties("errors") { """) property("use ... rank or provide liftable") = fails( - "Can't unquote List[List[reflect.runtime.universe.Ident]], consider using ...", + "Can't unquote List[List[StringBuilder]], consider using ... or providing an implicit instance of Liftable[List[List[StringBuilder]]]", """ - val xs = List(List(q"x", q"x")) + import java.lang.StringBuilder + val xs: List[List[StringBuilder]] = Nil q"$xs" """) @@ -172,6 +174,40 @@ object ErrorProps extends QuasiquoteProperties("errors") { tq"_" """) + property("SI-8420: don't crash on splicing of non-unliftable native type (1)") = fails( + "Can't unquote List[reflect.runtime.universe.Symbol] with .., consider omitting the dots or providing an implicit instance of Liftable[reflect.runtime.universe.Symbol]", + """ + val l: List[Symbol] = Nil + q"f(..$l)" + """) + + property("SI-8420: don't crash on splicing of non-unliftable native type (2)") = fails( + "Can't unquote List[reflect.runtime.universe.FlagSet] with .., consider omitting the dots or providing an implicit instance of Liftable[reflect.runtime.universe.FlagSet]", + """ + val l: List[FlagSet] = Nil + q"f(..$l)" + """) + + property("SI-8420: don't crash on splicing of non-unliftable native type (3)") = fails( + "Can't unquote List[reflect.runtime.universe.Modifiers] with .., consider omitting the dots or providing an implicit instance of Liftable[reflect.runtime.universe.Modifiers]", + """ + val l: List[Modifiers] = Nil + q"f(..$l)" + """) + + property("SI-8451 construction: disallow everything except for constructor calls in secondary constructor bodies") = fails( + "'this' expected but unquotee found", + """ + val rhs1 = q"this(0)" + val ctor1 = q"def this(x: Int) = $rhs1" + """) + + property("SI-8451 deconstruction: disallow everything except for constructor calls in secondary constructor bodies") = fails( + "'this' expected but unquotee found", + """ + val q"def this(..$params) = $rhs2" = q"def this(x: Int) = this(0)" + """) + // // Make sure a nice error is reported in this case // { import Flag._; val mods = NoMods; q"lazy $mods val x: Int" } } diff --git a/test/files/scalacheck/quasiquotes/LiftableProps.scala b/test/files/scalacheck/quasiquotes/LiftableProps.scala index 5d0eeb53c6ad..a4c57ac359c9 100644 --- a/test/files/scalacheck/quasiquotes/LiftableProps.scala +++ b/test/files/scalacheck/quasiquotes/LiftableProps.scala @@ -88,9 +88,10 @@ object LiftableProps extends QuasiquoteProperties("liftable") { assert(q"$const" ≈ q"0") } + val immutable = q"$scalapkg.collection.immutable" + property("lift list variants") = test { val lst = List(1, 2) - val immutable = q"$scalapkg.collection.immutable" assert(q"$lst" ≈ q"$immutable.List(1, 2)") assert(q"f(..$lst)" ≈ q"f(1, 2)") val llst = List(List(1), List(2)) @@ -98,6 +99,11 @@ object LiftableProps extends QuasiquoteProperties("liftable") { assert(q"f(...$llst)" ≈ q"f(1)(2)") } + property("lift list of tree") = test { + val lst = List(q"a", q"b") + assert(q"$lst" ≈ q"$immutable.List(a, b)") + } + property("lift tuple") = test { assert(q"${(1, 2)}" ≈ q"(1, 2)") assert(q"${(1, 2, 3)}" ≈ q"(1, 2, 3)") @@ -158,4 +164,11 @@ object LiftableProps extends QuasiquoteProperties("liftable") { val right3: Either[Int, Int] = Right(1) assert(q"$right3" ≈ q"scala.util.Right(1)") } + + property("lift xml comment") = test { + implicit val liftXmlComment = Liftable[xml.Comment] { comment => + q"new _root_.scala.xml.Comment(${comment.commentText})" + } + assert(q"${xml.Comment("foo")}" ≈ q"") + } } diff --git a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala index fd4d2e9c4b29..409f07037e54 100644 --- a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala +++ b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala @@ -95,12 +95,6 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") { body1 ≈ body && cond1 ≈ cond } - property("unquote trees into alternative") = forAll { (c: Tree, A: Tree, B: Tree) => - q"$c match { case $A | $B => }" ≈ - Match(c, List( - CaseDef(Alternative(List(A, B)), EmptyTree, Literal(Constant(()))))) - } - def blockInvariant(quote: Tree, trees: List[Tree]) = quote ≈ (trees match { case Nil => q"{}" @@ -295,4 +289,37 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") { val q"$a = $b = $c = $d = $e = $f = $g = $h = $k = $l" = q"a = b = c = d = e = f = g = h = k = l" assert(a ≈ q"a" && b ≈ q"b" && c ≈ q"c" && d ≈ q"d" && e ≈ q"e" && g ≈ q"g" && h ≈ q"h" && k ≈ q"k" && l ≈ q"l") } + + property("SI-8385 a") = test { + assertEqAst(q"(foo.x = 1)(2)", "(foo.x = 1)(2)") + } + + property("SI-8385 b") = test { + assertEqAst(q"(() => ())()", "(() => ())()") + } + + property("match scrutinee may not be empty") = test { + assertThrows[IllegalArgumentException] { + val scrutinee = q"" + val cases = List(cq"_ =>") + q"$scrutinee match { case ..$cases }" + } + } + + property("construct partial function") = test { + val cases = List(cq"a => b", cq"c => d") + assertEqAst(q"{ case ..$cases }", "{ case a => b case c => d }") + } + + property("SI-8609 a") = test { + val q1 = q"val x = 1" + val q2 = q"..$q1; val y = 2" + assert(q2 ≈ q"{ val x = 1; val y = 2 }") + } + + property("SI-8609 b") = test { + val q1 = q"import foo.bar" + val q2 = q"..$q1; val y = 2" + assert(q2 ≈ q"{ import foo.bar; val y = 2 }") + } } diff --git a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala index e96d1186f758..07e8f3faacd2 100644 --- a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala +++ b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala @@ -199,4 +199,58 @@ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction q"..$tpt; ()" } } + + property("term select doesn't match type select") = test { + assertThrows[MatchError] { + val q"$qual.$name" = tq"foo.bar" + } + } + + property("type application doesn't match applied type") = test { + assertThrows[MatchError] { + val q"$f[..$targs]" = tq"foo[bar]" + } + } + + property("match doesn't match partial function") = test { + assertThrows[MatchError] { + val q"$_ match { case ..$_ }" = q"{ case _ => }" + } + } + + property("deconstruct partial function") = test { + val q"{ case ..$cases }" = q"{ case a => b case c => d }" + val List(cq"a => b", cq"c => d") = cases + } + + property("SI-8350 `new C` and `new C()` are equivalent") = test { + val q"new C" = q"new C()" + val q"new C()" = q"new C" + } + + property("SI-8350 new applications extracted only for non-empty ctor calls") = test{ + val q"new $c1" = q"new C()" + assert(c1 ≈ tq"C") + val q"new $c2" = q"new C(x)" + assert(c2 ≈ q"${tq"C"}(x)") + } + + property("SI-8350 original test case") = test { + val q"new ..$parents" = q"new Foo with Bar" + assert(parents ≈ List(tq"Foo", tq"Bar")) + } + + property("SI-8387 new is not an application") = test { + val `new` = q"new F(x)" + val q"$f(...$argss)" = `new` + assert(f ≈ `new`) + assert(argss.isEmpty) + } + + property("SI-8703 extract block with single expression") = test { + val q"{ $a }" = Block(Nil, q"1") + val Literal(Constant(1)) = a + val q"{ $b }" = q"2" + val Literal(Constant(2)) = b + } } diff --git a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala index 8ec1779353fe..7572b27b5275 100644 --- a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala +++ b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala @@ -63,4 +63,16 @@ object TypeDeconstructionProps extends QuasiquoteProperties("type deconstruction // matches because type tree isn't syntactic without original val tq"" = tq"${typeOf[Int]}" } + + property("type select doesn't match term select") = test { + assertThrows[MatchError] { + val tq"$qual.$name" = q"foo.bar" + } + } + + property("applied type doesn't match type appliction") = test { + assertThrows[MatchError] { + val tq"$tpt[..$tpts]" = q"foo[bar]" + } + } } diff --git a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala index 7c4cb0306eac..f84df269cada 100644 --- a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala +++ b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala @@ -1,33 +1,13 @@ import org.scalacheck._, Prop._, Gen._, Arbitrary._ import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport._ -object TypecheckedProps extends QuasiquoteProperties("typechecked") { - def original(tree: Tree) = tree match { - case tt: TypeTree => Some(tt.original) - case _ => None - } - def originals(trees: List[Tree]) = trees.flatMap(original) - val int = ScalaDot(TypeName("Int")) - val intint = List(int, int) - +object TypecheckedProps extends QuasiquoteProperties("typechecked") + with TypecheckedTypes { property("tuple term") = test { val q"(..$elements)" = typecheck(q"(1, 2)") assert(elements ≈ List(q"1", q"2")) } - property("tuple type") = test { - val tq"(..$els0)" = typecheckTyp(tq"Unit") - assert(els0.isEmpty) - val tq"(..$els1)" = typecheckTyp(tq"(Int, Int)") - assert(originals(els1) ≈ intint) - } - - property("function type") = test { - val tq"(..$argtpes) => $restpe" = typecheckTyp(tq"(Int, Int) => Int") - assert(originals(argtpes) ≈ intint) - assert(original(restpe).get ≈ int) - } - property("for/for-yield") = test { val enums = fq"x <- xs" :: fq"x1 = x + 1" :: fq"if x1 % 2 == 0" :: Nil val body = q"x1" @@ -152,4 +132,84 @@ object TypecheckedProps extends QuasiquoteProperties("typechecked") { assert(name == defName) assert(rhs ≈ defRhs) } + + property("partial function") = test { + val q"{ case ..$cases }: $ascr" = typecheck(q"{ case 1 => () }: PartialFunction[Int, Unit]") + assert(cases ≈ q"{ case 1 => () }".cases) + } +} + +trait TypecheckedTypes { self: QuasiquoteProperties => + property("type ident") = test { + val q"$_; type $_ = $tpt" = typecheck(q"class C; type T = C") + val tq"C" = tpt + } + + property("type select") = test { + val tq"scala.Int" = typecheckTyp(tq"Int") + } + + property("this type select") = test { + val q"class $_ { $_; type $_ = $tpt }" = typecheck(q"class C { type A = Int; type B = this.A }") + val tq"this.$name" = tpt + val TypeName("A") = name + } + + property("super type select") = test { + val q"$_; class $_ extends $_ { type $_ = $tpt }" = + typecheck(q"class C1 { type A = Int }; class C2 extends C1 { type B = super[C1].A }") + val tq"$empty.super[$c1].$a" = tpt + val TypeName("") = empty + val TypeName("C1") = c1 + val TypeName("A") = a + } + + property("applied type") = test { + val tt = typecheckTyp(tq"Map[Int, Int]") + val tq"$tpt[..$tpts]" = tt + val tq"scala.this.Predef.Map" = tpt + val List(tq"scala.Int", tq"scala.Int") = tpts + } + + property("tuple type") = test { + val tq"(..$els0)" = typecheckTyp(tq"Unit") + assert(els0.isEmpty) + val tq"(..$els1)" = typecheckTyp(tq"(Int, Int)") + val List(tq"scala.Int", tq"scala.Int") = els1 + } + + property("function type") = test { + val tq"(..$argtpes) => $restpe" = typecheckTyp(tq"(Int, Int) => Int") + val List(tq"scala.Int", tq"scala.Int") = argtpes + val tq"scala.Int" = restpe + } + + property("compound type") = test { + val tq"..$parents { ..$defns }" = typecheckTyp(tq"Int { def x: Int }") + val List(tq"Int") = parents + val List(q"def x: Int") = defns + } + + property("singleton type") = test { + val tq"$ref.type" = typecheckTyp(tq"scala.Predef.type") + val q"scala.Predef" = ref + } + + property("type projection") = test { + val tq"$tpt#$name" = typecheckTyp(tq"({ type T = Int })#T") + val TypeName("T") = name + val tq"{ type T = Int }" = tpt + } + + property("annotated type") = test { + val tq"$tpt @$annot" = typecheckTyp(tq"Int @unchecked") + val tq"scala.Int" = tpt + val tq"unchecked" = annot + } + + property("existential type") = test { + val tq"$tpt forSome { ..$defns }" = typecheckTyp(tq"T forSome { type T }") + val tq"T" = tpt + val q"type T" :: Nil = defns + } } diff --git a/test/files/scalacheck/quasiquotes/UnliftableProps.scala b/test/files/scalacheck/quasiquotes/UnliftableProps.scala index 1d7629aa29c8..659b18edabb1 100644 --- a/test/files/scalacheck/quasiquotes/UnliftableProps.scala +++ b/test/files/scalacheck/quasiquotes/UnliftableProps.scala @@ -155,4 +155,12 @@ object UnliftableProps extends QuasiquoteProperties("unliftable") { assert(t21 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21)) assert(t22 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22)) } + + property("unlift xml comment") = test { + implicit val unliftXmlComment = Unliftable[xml.Comment] { + case q"new _root_.scala.xml.Comment(${value: String})" => xml.Comment(value) + } + val q"${comment: xml.Comment}" = q"" + assert(comment.commentText == "foo") + } } diff --git a/test/files/t8449/Client.scala b/test/files/t8449/Client.scala new file mode 100644 index 000000000000..5d273f06b251 --- /dev/null +++ b/test/files/t8449/Client.scala @@ -0,0 +1,3 @@ +object Client { + def foo: Any = new Test().foo +} diff --git a/test/files/t8449/Test.java b/test/files/t8449/Test.java new file mode 100644 index 000000000000..ecb1711b24b4 --- /dev/null +++ b/test/files/t8449/Test.java @@ -0,0 +1,10 @@ +public class Test { + // Raw type over a Scala type constructor + public scala.Function1 foo() { return null; } + // scalac reported: + // % scalac-hash v2.11.2 -d /tmp sandbox/{Test.java,Client.scala} + // sandbox/Test.java:2: error: trait Function1 takes type parameters + // public scala.Function1 foo() { return null; } + // ^ + // one error found +} diff --git a/test/instrumented/library/scala/runtime/ScalaRunTime.scala b/test/instrumented/library/scala/runtime/ScalaRunTime.scala index e474ae737c8a..6b45a4e9f3ba 100644 --- a/test/instrumented/library/scala/runtime/ScalaRunTime.scala +++ b/test/instrumented/library/scala/runtime/ScalaRunTime.scala @@ -261,7 +261,7 @@ object ScalaRunTime { * * The primary motivation for this method is to provide a means for * correctly obtaining a String representation of a value, while - * avoiding the pitfalls of naïvely calling toString on said value. + * avoiding the pitfalls of naively calling toString on said value. * In particular, it addresses the fact that (a) toString cannot be * called on null and (b) depending on the apparent type of an * array, toString may or may not print it in a human-readable form. diff --git a/test/junit/scala/StringContextTest.scala b/test/junit/scala/StringContextTest.scala new file mode 100644 index 000000000000..608b82bd96f3 --- /dev/null +++ b/test/junit/scala/StringContextTest.scala @@ -0,0 +1,78 @@ + +package scala + +import org.junit.Test +import org.junit.Assert._ +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.AssertUtil._ + +@RunWith(classOf[JUnit4]) +class StringContextTest { + + import StringContext._ + + @Test def noEscape() = { + val s = "string" + val res = processEscapes(s) + assertEquals(s, res) + } + @Test def tabbed() = { + val s = """a\tb""" + val res = processEscapes(s) + assertEquals("a\tb", res) + } + @Test def quoted() = { + val s = """hello, \"world\"""" + val res = processEscapes(s) + assertEquals("""hello, "world"""", res) + } + @Test def octal() = { + val s = """\123cala""" + val res = treatEscapes(s) + assertEquals("Scala", res) + } + @Test def doubled() = { + val s = """\123cala\123yntax""" + val res = treatEscapes(s) + assertEquals("ScalaSyntax", res) + } + @Test def badly() = assertThrows[InvalidEscapeException] { + val s = """Scala\""" + val res = treatEscapes(s) + assertEquals("Scala", res) + } + @Test def noOctal() = assertThrows[InvalidEscapeException] { + val s = """\123cala""" + val res = processEscapes(s) + assertEquals("Scala", res) + } + + @Test def t6631_baseline() = assertEquals("\f\r\n\t", s"""\f\r\n\t""") + + @Test def t6631_badEscape() = assertThrows[InvalidEscapeException] { + s"""\x""" + } + + // verifying that the standard interpolators can be supplanted + @Test def antiHijack_?() = { + object AllYourStringsAreBelongToMe { case class StringContext(args: Any*) { def s(args: Any) = "!!!!" } } + import AllYourStringsAreBelongToMe._ + //assertEquals("????", s"????") + assertEquals("!!!!", s"????") // OK to hijack core interpolator ids + } + + @Test def fIf() = { + val res = f"${if (true) 2.5 else 2.5}%.2f" + assertEquals("2.50", res) + } + @Test def fIfNot() = { + val res = f"${if (false) 2.5 else 3.5}%.2f" + assertEquals("3.50", res) + } + @Test def fHeteroArgs() = { + val res = f"${3.14}%.2f rounds to ${3}%d" + assertEquals("3.14 rounds to 3", res) + } +} diff --git a/test/junit/scala/collection/IndexedSeqOptimizedTest.scala b/test/junit/scala/collection/IndexedSeqOptimizedTest.scala new file mode 100644 index 000000000000..419e1454cbfc --- /dev/null +++ b/test/junit/scala/collection/IndexedSeqOptimizedTest.scala @@ -0,0 +1,29 @@ +package scala.collection + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Assert._ +import org.junit.Test + +@RunWith(classOf[JUnit4]) +class IndexedSeqOptimizedTest { + + @Test + def notThrowsAnExceptionInLastIndexOf() { + assertEquals(0, (Array(2): collection.mutable.WrappedArray[Int]).lastIndexWhere(_ => true, 1)) + assertEquals(2, "abc123".lastIndexWhere(_.isLetter, 6)) + } + + @Test + def hasCorrectDropAndTakeMethods() { + assertEquals("", "abc" take Int.MinValue) + assertEquals("", "abc" takeRight Int.MinValue) + assertEquals("abc", "abc" drop Int.MinValue) + assertEquals("abc", "abc" dropRight Int.MinValue) + + assertArrayEquals(Array.empty[Int], Array(1, 2, 3) take Int.MinValue) + assertArrayEquals(Array.empty[Int], Array(1, 2, 3) takeRight Int.MinValue) + assertArrayEquals(Array(1, 2, 3), Array(1, 2, 3) drop Int.MinValue) + assertArrayEquals(Array(1, 2, 3), Array(1, 2, 3) dropRight Int.MinValue) + } +} diff --git a/test/junit/scala/collection/IterableViewLikeTest.scala b/test/junit/scala/collection/IterableViewLikeTest.scala new file mode 100644 index 000000000000..55da02744b44 --- /dev/null +++ b/test/junit/scala/collection/IterableViewLikeTest.scala @@ -0,0 +1,20 @@ +package scala.collection + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class IterableViewLikeTest { + + @Test + def hasCorrectDropAndTakeMethods() { + val iter = Iterable(1, 2, 3) + + assertEquals(Iterable.empty[Int], iter.view take Int.MinValue force) + assertEquals(Iterable.empty[Int], iter.view takeRight Int.MinValue force) + assertEquals(iter, iter.view drop Int.MinValue force) + assertEquals(iter, iter.view dropRight Int.MinValue force) + } +} diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala new file mode 100644 index 000000000000..d5389afd0ce6 --- /dev/null +++ b/test/junit/scala/collection/IteratorTest.scala @@ -0,0 +1,157 @@ + +package scala.collection + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.AssertUtil._ + +import Seq.empty + +@RunWith(classOf[JUnit4]) +class IteratorTest { + + @Test def groupedIteratorShouldNotAskForUnneededElement(): Unit = { + var counter = 0 + val it = new Iterator[Int] { var i = 0 ; def hasNext = { counter = i; true } ; def next = { i += 1; i } } + val slidingIt = it sliding 2 + slidingIt.next + assertEquals("Counter should be one, that means we didn't look further than needed", 1, counter) + } + + @Test def groupedIteratorIsLazyWhenPadded(): Unit = { + var counter = 0 + def it = new Iterator[Int] { var i = 0 ; def hasNext = { counter = i; true } ; def next = { i += 1; i } } + val slidingIt = it sliding 2 withPadding -1 + slidingIt.next + assertEquals("Counter should be one, that means we didn't look further than needed", 1, counter) + } + + @Test def dropDoesNotGrowStack(): Unit = { + def it = new Iterator[Throwable] { def hasNext = true ; def next = new Throwable } + + assertEquals(it.drop(1).next.getStackTrace.length, it.drop(1).drop(1).next.getStackTrace.length) + } + + @Test def dropIsChainable(): Unit = { + assertSameElements(1 to 4, Iterator from 0 take 5 drop 1) + assertSameElements(3 to 4, Iterator from 0 take 5 drop 3) + assertSameElements(empty, Iterator from 0 take 5 drop 5) + assertSameElements(empty, Iterator from 0 take 5 drop 10) + assertSameElements(0 to 4, Iterator from 0 take 5 drop 0) + assertSameElements(0 to 4, Iterator from 0 take 5 drop -1) + assertSameElements(2 to 8 by 2, Iterator from 0 take 5 drop 1 map (2 * _)) + assertSameElements(2 to 8 by 2, Iterator from 0 take 5 map (2 * _) drop 1) + assertSameElements(3 to 4, Iterator from 0 take 5 drop 1 drop 2) + assertSameElements(3 to 4, Iterator from 0 take 5 drop 2 drop 1) + } + + @Test def sliceIsChainable(): Unit = { + assertSameElements(3 to 6, Iterator from 0 slice (3, 7)) + assertSameElements(empty, Iterator from 0 slice (3, 3)) + assertSameElements(0 to 2, Iterator from 0 slice (-1, 3)) + assertSameElements(empty, Iterator from 0 slice (3, -1)) + assertSameElements(6 to 12 by 2, Iterator from 0 slice (3, 7) map (2 * _)) + assertSameElements(6 to 12 by 2, Iterator from 0 map (2 * _) slice (3, 7)) + assertSameElements(4 to 6, Iterator from 0 slice (3, 7) drop 1) + assertSameElements(4 to 7, Iterator from 0 drop 1 slice (3, 7)) + assertSameElements(4 to 5, Iterator from 0 slice (3, 7) slice (1, 3)) + assertSameElements(4 to 6, Iterator from 0 slice (3, 7) slice (1, 10)) + } + + // test/files/run/iterator-concat.scala + @Test def concatIsStackFriendly(): Unit = { + // Create `size` Function0s, each of which evaluates to an Iterator + // which produces 1. Then fold them over ++ to get a single iterator, + // which should sum to "size". + def mk(size: Int): Iterator[Int] = { + //val closures = (1 to size).toList.map(x => (() => Iterator(1))) + //closures.foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f()) + List.fill(size)(() => Iterator(1)).foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f()) + } + assertEquals(100, mk(100).sum) + assertEquals(1000, mk(1000).sum) + assertEquals(10000, mk(10000).sum) + assertEquals(100000, mk(100000).sum) + } + + @Test def from(): Unit = { + val it1 = Iterator.from(-1) + val it2 = Iterator.from(0, -1) + assertEquals(-1, it1.next()) + assertEquals(0, it2.next()) + } + @Test def range(): Unit = { + assertEquals(5, Iterator.range(0, 10, 2).size) + assertEquals(0, Iterator.range(0, 10, -2).size) + assertEquals(5, Iterator.range(10, 0, -2).size) + assertEquals(0, Iterator.range(10, 0, 2).size) + assertEquals(1, Iterator.range(0, 10, 11).size) + assertEquals(10, Iterator.range(0, 10, 1).size) + assertEquals(10, Iterator.range(10, 0, -1).size) + } + @Test def range3(): Unit = { + val r1 = Iterator.range(0, 10) + assertTrue(r1 contains 5) + assertTrue(r1 contains 6) + assertFalse(r1 contains 4) + val r2a = Iterator.range(0, 10, 2) + assertFalse(r2a contains 5) + val r2b = Iterator.range(0, 10, 2) + assertTrue(r2b contains 6) + val r3 = Iterator.range(0, 10, 11) + assertFalse(r3 contains 5) + assertTrue(r3.isEmpty) + } + @Test def take(): Unit = { + assertEquals(10, (Iterator from 0 take 10).size) + } + @Test def foreach(): Unit = { + val it1 = Iterator.from(0) take 20 + var n = 0 + it1 foreach { n += _ } + assertEquals(190, n) + } + // ticket #429 + @Test def fromArray(): Unit = { + val a = List(1, 2, 3, 4).toArray + var xs0 = a.iterator.toList; + var xs1 = a.slice(0, 1).iterator + var xs2 = a.slice(0, 2).iterator + var xs3 = a.slice(0, 3).iterator + var xs4 = a.slice(0, 4).iterator + assertEquals(14, xs0.size + xs1.size + xs2.size + xs3.size + xs4.size) + } + @Test def toSeq(): Unit = { + assertEquals("1x2x3x4x5", List(1, 2, 3, 4, 5).iterator.mkString("x")) + } + @Test def indexOf(): Unit = { + assertEquals(3, List(1, 2, 3, 4, 5).iterator.indexOf(4)) + assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexOf(16)) + } + @Test def indexWhere(): Unit = { + assertEquals(3, List(1, 2, 3, 4, 5).iterator.indexWhere { x: Int => x >= 4 }) + assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexWhere { x: Int => x >= 16 }) + } + // iterator-iterate-lazy.scala + // was java.lang.UnsupportedOperationException: tail of empty list + @Test def iterateIsSufficientlyLazy(): Unit = { + //Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).toList // suffices + Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList + } + // SI-3516 + @Test def toStreamIsSufficientlyLazy(): Unit = { + val results = collection.mutable.ListBuffer.empty[Int] + def mkIterator = (1 to 5).iterator map (x => { results += x ; x }) + def mkInfinite = Iterator continually { results += 1 ; 1 } + + // Stream is strict in its head so we should see 1 from each of them. + val s1 = mkIterator.toStream + val s2 = mkInfinite.toStream + // back and forth without slipping into nontermination. + results += (Stream from 1).toIterator.drop(10).toStream.drop(10).toIterator.next() + assertSameElements(List(1,1,21), results) + } +} diff --git a/test/junit/scala/collection/PagedSeq.scala b/test/junit/scala/collection/PagedSeq.scala deleted file mode 100644 index 5f83cf6f31a1..000000000000 --- a/test/junit/scala/collection/PagedSeq.scala +++ /dev/null @@ -1,16 +0,0 @@ -package scala.collection.immutable - -import org.junit.runner.RunWith -import org.junit.runners.JUnit4 -import org.junit.Test -import org.junit.Assert._ - -/* Test for SI-6615 */ -@RunWith(classOf[JUnit4]) -class PagedSeqTest { - @Test - def rovingDoesNotNPE(): Unit = { - // should not NPE, and should equal the given Seq - assertEquals(Seq('a'), PagedSeq.fromStrings(List.fill(5000)("a")).slice(4096, 4097)) - } -} diff --git a/test/junit/scala/collection/SeqViewTest.scala b/test/junit/scala/collection/SeqViewTest.scala new file mode 100644 index 000000000000..24474fc4b9a2 --- /dev/null +++ b/test/junit/scala/collection/SeqViewTest.scala @@ -0,0 +1,16 @@ +package scala.collection + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Assert._ +import org.junit.Test + +@RunWith(classOf[JUnit4]) +class SeqViewTest { + + @Test + def test_SI8691() { + // Really just testing to make sure ++: doesn't throw an exception + assert( Seq(1,2) ++: Seq(3,4).view == Seq(1,2,3,4) ) + } +} diff --git a/test/junit/scala/collection/SetMapConsistencyTest.scala b/test/junit/scala/collection/SetMapConsistencyTest.scala index eed6007eef24..0749e61c0909 100644 --- a/test/junit/scala/collection/SetMapConsistencyTest.scala +++ b/test/junit/scala/collection/SetMapConsistencyTest.scala @@ -514,4 +514,30 @@ class SetMapConsistencyTest { assert( hs.toList.toSet == hs ) assert( hs == hs.toList.toSet ) } + + @Test + def testSI8815() { + val lm = new scala.collection.mutable.LongMap[String] + lm += (Long.MinValue, "min") + lm += (-1, "neg-one") + lm += (0, "zero") + lm += (Long.MaxValue, "max") + var nit = 0 + lm.iterator.foreach(_ => nit += 1) + var nfe = 0 + lm.foreach(_ => nfe += 1) + assert(nit == 4) + assert(nfe == 4) + } + + @Test + def test_SI8727() { + import scala.tools.testing.AssertUtil._ + type NSEE = NoSuchElementException + val map = Map(0 -> "zero", 1 -> "one") + val m = map.filterKeys(i => if (map contains i) true else throw new NSEE) + assert{ (m contains 0) && (m get 0).nonEmpty } + assertThrows[NSEE]{ m contains 2 } + assertThrows[NSEE]{ m get 2 } + } } diff --git a/test/junit/scala/collection/TraversableOnceTest.scala b/test/junit/scala/collection/TraversableOnceTest.scala index 56d8312336bd..196174c1990c 100644 --- a/test/junit/scala/collection/TraversableOnceTest.scala +++ b/test/junit/scala/collection/TraversableOnceTest.scala @@ -43,8 +43,8 @@ class TraversableOnceTest { def testReturnTheFirstMatch() = { val d = List(1, 2, 3, 4, 5, 6, 7, 8) def f(x: Int) = x % 3; - assert(d.maxBy(f) == 2, "If multiple elements evaluted to the largest value, maxBy should return the first one.") - assert(d.minBy(f) == 3, "If multiple elements evaluted to the largest value, minBy should return the first one.") + assert(d.maxBy(f) == 2, "If multiple elements evaluated to the largest value, maxBy should return the first one.") + assert(d.minBy(f) == 3, "If multiple elements evaluated to the largest value, minBy should return the first one.") } // Make sure it evaluates f no more than list.length times. @@ -56,7 +56,7 @@ class TraversableOnceTest { evaluatedCountOfMaxBy += 1 x * 10 }) - assert(evaluatedCountOfMaxBy == list.length, s"maxBy: should evaluate f only ${list.length} times, but it evaluted $evaluatedCountOfMaxBy times.") + assert(evaluatedCountOfMaxBy == list.length, s"maxBy: should evaluate f only ${list.length} times, but it evaluated $evaluatedCountOfMaxBy times.") var evaluatedCountOfMinBy = 0 @@ -64,7 +64,7 @@ class TraversableOnceTest { evaluatedCountOfMinBy += 1 x * 10 }) - assert(evaluatedCountOfMinBy == list.length, s"minBy: should evaluate f only ${list.length} times, but it evaluted $evaluatedCountOfMinBy times.") + assert(evaluatedCountOfMinBy == list.length, s"minBy: should evaluate f only ${list.length} times, but it evaluated $evaluatedCountOfMinBy times.") } } diff --git a/test/junit/scala/collection/convert/MapWrapperTest.scala b/test/junit/scala/collection/convert/MapWrapperTest.scala index 060b6b5937e2..22eaf858ea4a 100644 --- a/test/junit/scala/collection/convert/MapWrapperTest.scala +++ b/test/junit/scala/collection/convert/MapWrapperTest.scala @@ -46,4 +46,14 @@ class MapWrapperTest { assertFalse(javaMap.containsKey(null)) // negative test, null key assertEquals(4, scalaMap.containsCounter) } + + // test for SI-8504 + @Test + def testHashCode() { + import scala.collection.JavaConverters._ + val javaMap = Map(1 -> null).asJava + + // Before the fix for SI-8504, this throws a NPE + javaMap.hashCode + } } diff --git a/test/junit/scala/collection/immutable/ListTest.scala b/test/junit/scala/collection/immutable/ListTest.scala new file mode 100644 index 000000000000..1006801029b0 --- /dev/null +++ b/test/junit/scala/collection/immutable/ListTest.scala @@ -0,0 +1,49 @@ +package scala.collection.immutable + +import org.junit.{Assert, Test} +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.ref.WeakReference + +@RunWith(classOf[JUnit4]) +class ListTest { + /** + * Test that empty iterator does not hold reference + * to complete List + */ + @Test + def testIteratorGC(): Unit = { + var num = 0 + var emptyIterators = Seq.empty[(Iterator[Int], WeakReference[List[Int]])] + + do { + val list = List.fill(10000)(num) + val ref = WeakReference(list) + + val i = list.iterator + + while (i.hasNext) i.next() + + emptyIterators = (i, ref) +: emptyIterators + + num+=1 + } while (emptyIterators.forall(_._2.get.isDefined) && num<1000) + + // check something is result to protect from JIT optimizations + for ((i, _) <- emptyIterators) { + Assert.assertTrue(i.isEmpty) + } + + // await gc up to ~5 seconds + var forceLoops = 50 + while (emptyIterators.forall(_._2.get.isDefined) && forceLoops>0) { + System.gc() + Thread.sleep(100) + forceLoops -= 1 + } + + // real assertion + Assert.assertTrue(emptyIterators.exists(_._2.get.isEmpty)) + } +} diff --git a/test/junit/scala/collection/immutable/PagedSeqTest.scala b/test/junit/scala/collection/immutable/PagedSeqTest.scala new file mode 100644 index 000000000000..2b576a365554 --- /dev/null +++ b/test/junit/scala/collection/immutable/PagedSeqTest.scala @@ -0,0 +1,28 @@ +package scala.collection.immutable + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import org.junit.Assert._ + +@RunWith(classOf[JUnit4]) +class PagedSeqTest { + // should not NPE, and should equal the given Seq + @Test + def test_SI6615(): Unit = { + assertEquals(Seq('a'), PagedSeq.fromStrings(List.fill(5000)("a")).slice(4096, 4097)) + } + + // Slices shouldn't read outside where they belong + @Test + def test_SI6519 { + var readAttempt = 0 + val sideEffectingIterator = new Iterator[Int] { + def hasNext = readAttempt < 65536 + def next = { readAttempt += 1; readAttempt } + } + val s = PagedSeq.fromIterator(sideEffectingIterator).slice(0,2).mkString + assertEquals(s, "12") + assert(readAttempt <= 4096) + } +} diff --git a/test/junit/scala/collection/QueueTest.scala b/test/junit/scala/collection/immutable/QueueTest.scala similarity index 100% rename from test/junit/scala/collection/QueueTest.scala rename to test/junit/scala/collection/immutable/QueueTest.scala diff --git a/test/junit/scala/collection/NumericRangeTest.scala b/test/junit/scala/collection/immutable/RangeConsistencyTest.scala similarity index 100% rename from test/junit/scala/collection/NumericRangeTest.scala rename to test/junit/scala/collection/immutable/RangeConsistencyTest.scala diff --git a/test/junit/scala/collection/immutable/StreamTest.scala b/test/junit/scala/collection/immutable/StreamTest.scala new file mode 100644 index 000000000000..6dc1c79a48d3 --- /dev/null +++ b/test/junit/scala/collection/immutable/StreamTest.scala @@ -0,0 +1,18 @@ +package scala.collection.immutable + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import org.junit.Assert._ + +@RunWith(classOf[JUnit4]) +class StreamTest { + + @Test + def t6727_and_t6440(): Unit = { + assertTrue(Stream.continually(()).filter(_ => true).take(2) == Seq((), ())) + assertTrue(Stream.continually(()).filterNot(_ => false).take(2) == Seq((), ())) + assertTrue(Stream(1,2,3,4,5).filter(_ < 4) == Seq(1,2,3)) + assertTrue(Stream(1,2,3,4,5).filterNot(_ > 4) == Seq(1,2,3,4)) + } +} diff --git a/test/junit/scala/collection/immutable/TreeMapTest.scala b/test/junit/scala/collection/immutable/TreeMapTest.scala new file mode 100644 index 000000000000..4c21b94b24f0 --- /dev/null +++ b/test/junit/scala/collection/immutable/TreeMapTest.scala @@ -0,0 +1,20 @@ +package scala.collection.immutable + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class TreeMapTest { + + @Test + def hasCorrectDropAndTakeMethods() { + val tree = TreeMap(1 -> "a", 2 -> "b", 3 -> "c") + + assertEquals(TreeMap.empty[Int, String], tree take Int.MinValue) + assertEquals(TreeMap.empty[Int, String], tree takeRight Int.MinValue) + assertEquals(tree, tree drop Int.MinValue) + assertEquals(tree, tree dropRight Int.MinValue) + } +} diff --git a/test/junit/scala/collection/immutable/TreeSetTest.scala b/test/junit/scala/collection/immutable/TreeSetTest.scala new file mode 100644 index 000000000000..8efe1bfeb8fb --- /dev/null +++ b/test/junit/scala/collection/immutable/TreeSetTest.scala @@ -0,0 +1,20 @@ +package scala.collection.immutable + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class TreeSetTest { + + @Test + def hasCorrectDropAndTakeMethods() { + val set = TreeSet(1, 2, 3) + + assertEquals(TreeSet.empty[Int], set take Int.MinValue) + assertEquals(TreeSet.empty[Int], set takeRight Int.MinValue) + assertEquals(set, set drop Int.MinValue) + assertEquals(set, set dropRight Int.MinValue) + } +} diff --git a/test/junit/scala/collection/mutable/ArrayBufferTest.scala b/test/junit/scala/collection/mutable/ArrayBufferTest.scala new file mode 100644 index 000000000000..8c8316402716 --- /dev/null +++ b/test/junit/scala/collection/mutable/ArrayBufferTest.scala @@ -0,0 +1,36 @@ +package scala.collection.mutable + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.{Assert, Test} + +import scala.tools.testing.AssertUtil + +/* Test for SI-9043 */ +@RunWith(classOf[JUnit4]) +class ArrayBufferTest { + @Test + def testInsertAll: Unit = { + val traver = ArrayBuffer(2, 4, 5, 7) + val testSeq = List(1, 3, 6, 9) + + def insertAt(x: Int) = { + val clone = traver.clone() + clone.insertAll(x, testSeq) + clone + } + + // Just insert some at position 0 + Assert.assertEquals(ArrayBuffer(1, 3, 6, 9, 2, 4, 5, 7), insertAt(0)) + + // Insert in the middle + Assert.assertEquals(ArrayBuffer(2, 4, 1, 3, 6, 9, 5, 7), insertAt(2)) + + // No strange last position weirdness + Assert.assertEquals(ArrayBuffer(2, 4, 5, 7, 1, 3, 6, 9), insertAt(traver.size)) + + // Overflow is caught + AssertUtil.assertThrows[IndexOutOfBoundsException] { insertAt(-1) } + AssertUtil.assertThrows[IndexOutOfBoundsException] { insertAt(traver.size + 10) } + } +} diff --git a/test/junit/scala/collection/ArraySortingTest.scala b/test/junit/scala/collection/mutable/ArraySortingTest.scala similarity index 100% rename from test/junit/scala/collection/ArraySortingTest.scala rename to test/junit/scala/collection/mutable/ArraySortingTest.scala diff --git a/test/junit/scala/collection/mutable/BitSetTest.scala b/test/junit/scala/collection/mutable/BitSetTest.scala new file mode 100644 index 000000000000..8d164b50d460 --- /dev/null +++ b/test/junit/scala/collection/mutable/BitSetTest.scala @@ -0,0 +1,22 @@ +package scala.collection.mutable + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.{Test, Ignore} + +@RunWith(classOf[JUnit4]) +class BitSetTest { + // Test for SI-8910 + @Test def capacityExpansionTest() { + val bitSet = BitSet.empty + val size = bitSet.toBitMask.length + bitSet ^= bitSet + assert(bitSet.toBitMask.length == size, "Capacity of bitset changed after ^=") + bitSet |= bitSet + assert(bitSet.toBitMask.length == size, "Capacity of bitset changed after |=") + bitSet &= bitSet + assert(bitSet.toBitMask.length == size, "Capacity of bitset changed after &=") + bitSet &~= bitSet + assert(bitSet.toBitMask.length == size, "Capacity of bitset changed after &~=") + } +} diff --git a/test/junit/scala/collection/PriorityQueueTest.scala b/test/junit/scala/collection/mutable/PriorityQueueTest.scala similarity index 100% rename from test/junit/scala/collection/PriorityQueueTest.scala rename to test/junit/scala/collection/mutable/PriorityQueueTest.scala diff --git a/test/junit/scala/collection/VectorTest.scala b/test/junit/scala/collection/mutable/VectorTest.scala similarity index 100% rename from test/junit/scala/collection/VectorTest.scala rename to test/junit/scala/collection/mutable/VectorTest.scala diff --git a/test/junit/scala/io/SourceTest.scala b/test/junit/scala/io/SourceTest.scala new file mode 100644 index 000000000000..3138a4589c43 --- /dev/null +++ b/test/junit/scala/io/SourceTest.scala @@ -0,0 +1,86 @@ + +package scala.io + +import org.junit.Test +import org.junit.Assert._ +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.AssertUtil._ + +import java.io.{ Console => _, _ } + +@RunWith(classOf[JUnit4]) +class SourceTest { + + private implicit val `our codec` = Codec.UTF8 + private val charSet = Codec.UTF8.charSet.name + + private def sampler = """ + |Big-endian and little-endian approaches aren't + |readily interchangeable in general, because the + |laws of arithmetic send signals leftward from + |the bits that are "least significant." + |""".stripMargin.trim + + private def in = new ByteArrayInputStream(sampler.getBytes) + + @Test def canIterateLines() = { + assertEquals(sampler.lines.size, (Source fromString sampler).getLines.size) + } + @Test def canCustomizeReporting() = { + class CapitalReporting(is: InputStream) extends BufferedSource(is) { + override def report(pos: Int, msg: String, out: PrintStream): Unit = { + out print f"$pos%04x: ${msg.toUpperCase}" + } + class OffsetPositioner extends Positioner(null) { + override def next(): Char = { + ch = iter.next() + pos = pos + 1 + ch + } + } + withPositioning(new OffsetPositioner) + } + val s = new CapitalReporting(in) + // skip to next line and report an error + do { + val c = s.next() + } while (s.ch != '\n') + s.next() + val out = new ByteArrayOutputStream + val ps = new PrintStream(out, true, charSet) + s.reportError(s.pos, "That doesn't sound right.", ps) + assertEquals("0030: THAT DOESN'T SOUND RIGHT.", out.toString(charSet)) + } + @Test def canAltCustomizeReporting() = { + class CapitalReporting(is: InputStream)(implicit codec: Codec) extends Source { + override val iter = { + val r = new InputStreamReader(is, codec.decoder) + Iterator continually (codec wrap r.read()) takeWhile (_ != -1) map (_.toChar) + } + override def report(pos: Int, msg: String, out: PrintStream): Unit = { + out print f"$pos%04x: ${msg.toUpperCase}" + } + private[this] var _pos: Int = _ + override def pos = _pos + private[this] var _ch: Char = _ + override def ch = _ch + override def next = { + _ch = iter.next() + _pos += 1 + _ch + } + } + val s = new CapitalReporting(in) + // skip to next line and report an error + do { + val c = s.next() + } while (s.ch != '\n') + s.next() + val out = new ByteArrayOutputStream + val ps = new PrintStream(out, true, charSet) + s.reportError(s.pos, "That doesn't sound right.", ps) + assertEquals("0030: THAT DOESN'T SOUND RIGHT.", out.toString(charSet)) + } +} diff --git a/test/junit/scala/issues/BytecodeTests.scala b/test/junit/scala/issues/BytecodeTests.scala new file mode 100644 index 000000000000..d4ed063a0319 --- /dev/null +++ b/test/junit/scala/issues/BytecodeTests.scala @@ -0,0 +1,80 @@ +package scala.issues + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import scala.tools.asm.Opcodes +import scala.tools.nsc.backend.jvm.AsmUtils +import scala.tools.nsc.backend.jvm.CodeGenTools._ +import org.junit.Assert._ +import scala.collection.JavaConverters._ +import scala.tools.partest.ASMConverters._ + +@RunWith(classOf[JUnit4]) +class BytecodeTests { + val compiler = newCompiler() + + @Test + def t8731(): Unit = { + val code = + """class C { + | def f(x: Int) = (x: @annotation.switch) match { + | case 1 => 0 + | case 2 => 1 + | case 3 => 2 + | } + | final val K = 10 + | def g(x: Int) = (x: @annotation.switch) match { + | case K => 0 + | case 1 => 10 + | case 2 => 20 + | } + |} + """.stripMargin + + val List(c) = compileClasses(compiler)(code) + + assertTrue(getSingleMethod(c, "f").instructions.count(_.isInstanceOf[TableSwitch]) == 1) + assertTrue(getSingleMethod(c, "g").instructions.count(_.isInstanceOf[LookupSwitch]) == 1) + } + + @Test + def t8926(): Unit = { + import scala.reflect.internal.util.BatchSourceFile + + // this test cannot be implemented using partest because of its mixed-mode compilation strategy: + // partest first compiles all files with scalac, then the java files, and then again the scala + // using the output classpath. this shadows the bug SI-8926. + + val annotA = + """import java.lang.annotation.Retention; + |import java.lang.annotation.RetentionPolicy; + |@Retention(RetentionPolicy.RUNTIME) + |public @interface AnnotA { } + """.stripMargin + val annotB = "public @interface AnnotB { }" + + val scalaSrc = + """@AnnotA class A + |@AnnotB class B + """.stripMargin + + val compiler = newCompiler() + val run = new compiler.Run() + run.compileSources(List(new BatchSourceFile("AnnotA.java", annotA), new BatchSourceFile("AnnotB.java", annotB), new BatchSourceFile("Test.scala", scalaSrc))) + val outDir = compiler.settings.outputDirs.getSingleOutput.get + val outfiles = (for (f <- outDir.iterator if !f.isDirectory) yield (f.name, f.toByteArray)).toList + + def check(classfile: String, annotName: String) = { + val f = (outfiles collect { case (`classfile`, bytes) => AsmUtils.readClass(bytes) }).head + val descs = f.visibleAnnotations.asScala.map(_.desc).toList + assertTrue(descs.toString, descs exists (_ contains annotName)) + } + + check("A.class", "AnnotA") + + // known issue SI-8928: the visibility of AnnotB should be CLASS, but annotation classes without + // a @Retention annotation are currently emitted as RUNTIME. + check("B.class", "AnnotB") + } +} diff --git a/test/junit/scala/math/BigDecimalTest.scala b/test/junit/scala/math/BigDecimalTest.scala index d1ba96fcc86e..c7a63da89079 100644 --- a/test/junit/scala/math/BigDecimalTest.scala +++ b/test/junit/scala/math/BigDecimalTest.scala @@ -222,4 +222,10 @@ class BigDecimalTest { for (a <- different; b <- different if (a ne b)) assert(a != b, "BigDecimal representations of Double mistakenly conflated") } + + // Make sure hash code agrees with decimal representation of Double + @Test + def test_SI8970() { + assert((0.1).## == BigDecimal(0.1).##) + } } diff --git a/test/junit/scala/math/NumericTest.scala b/test/junit/scala/math/NumericTest.scala index 4f0657f471ee..9bf7d4f1e45c 100644 --- a/test/junit/scala/math/NumericTest.scala +++ b/test/junit/scala/math/NumericTest.scala @@ -1,4 +1,4 @@ - +package scala.math import org.junit.Assert._ import org.junit.Test diff --git a/test/junit/scala/math/OrderingTest.scala b/test/junit/scala/math/OrderingTest.scala new file mode 100644 index 000000000000..218622b8b4d9 --- /dev/null +++ b/test/junit/scala/math/OrderingTest.scala @@ -0,0 +1,61 @@ +package scala.math + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class OrderingTest { + + /* Test for SI-9077 */ + @Test + def testReverseOrdering { + def check[T: Ordering](t1: T, t2: T): Unit = { + val O = Ordering[T] + val R = O.reverse + assertEquals(O.min(t1, t2), R.max(t1, t2)) + assertEquals(O.max(t1, t2), R.min(t1, t2)) + + assertEquals(O.lteq(t1, t2), R.lteq(t2, t1)) + assertEquals(O.lt(t1, t2), R.lt(t2, t1)) + assertEquals(O.gteq(t1, t2), R.gteq(t2, t1)) + assertEquals(O.gt(t1, t2), R.gt(t2, t1)) + assertEquals(O.compare(t1, t2), R.compare(t2, t1)) + + assertEquals(O.equiv(t1, t2), R.equiv(t1, t2)) + + assertEquals(O.on((x: T) => x).min(t1, t2), R.on((x: T) => x).max(t1, t2)) + + assertEquals(O.tryCompare(t1, t2), R.tryCompare(t2, t1)) + + assertEquals(O.mkOrderingOps(t1).<(t2), R.mkOrderingOps(t2).<(t1)) + assertEquals(O.mkOrderingOps(t1).<=(t2), R.mkOrderingOps(t2).<=(t1)) + assertEquals(O.mkOrderingOps(t1).>(t2), R.mkOrderingOps(t2).>(t1)) + assertEquals(O.mkOrderingOps(t1).>=(t2), R.mkOrderingOps(t2).>=(t1)) + + assertEquals(O.mkOrderingOps(t1).min(t2), R.mkOrderingOps(t1).max(t2)) + assertEquals(O.mkOrderingOps(t1).max(t2), R.mkOrderingOps(t1).min(t2)) + } + def checkAll[T: Ordering](ts: T*): Unit = { + for (t1 <- ts; t2 <- ts) check(t1, t2) + } + checkAll[Unit](()) + checkAll[Boolean](true, false) + checkAll[Byte](Byte.MinValue, -1.toByte, 0.toByte, 1.toByte, Byte.MaxValue) + checkAll[Char](Char.MinValue, -1.toChar, 0.toChar, 1.toChar, Char.MaxValue) + checkAll[Short](Short.MinValue, -1, 0, 1, Short.MaxValue) + checkAll[Int](Int.MinValue, -1, 0, 1, Int.MaxValue) + checkAll[Double](Double.MinValue, -1, -0, 0, 1, Double.MaxValue) + checkAll[Float](Float.MinValue, -1, -0, 0, 1, Float.MaxValue) + + checkAll[BigInt](Int.MinValue, -1, 0, 1, Int.MaxValue) + checkAll[BigDecimal](Int.MinValue, -1, -0, 1, Int.MaxValue) + checkAll[String]("", "a", "b", "bb") + checkAll[String]("", "a", "b", "bb") + checkAll[Option[Int]](None, Some(1), Some(2)) + checkAll[Iterable[Int]](Nil, List(1), List(1, 2)) + checkAll[(Int, Int)]((1, 2), (1, 3), (4, 5)) + } +} + diff --git a/test/junit/scala/reflect/QTest.scala b/test/junit/scala/reflect/QTest.scala new file mode 100644 index 000000000000..24c35dc4010f --- /dev/null +++ b/test/junit/scala/reflect/QTest.scala @@ -0,0 +1,23 @@ + +package scala.reflect + +import org.junit.Test +import org.junit.Assert._ +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.AssertUtil._ + +@RunWith(classOf[JUnit4]) +class QTest { + + import reflect.runtime._ + import universe._ + @Test def qConstantsNotHomogenized() = { + //Apply(Select(Literal(Constant(1.0)), TermName("$plus")), List(Literal(Constant(1.0)))) + val t = q"${1} + ${1.0}" + val Apply(Select(Literal(Constant(i)), TermName("$plus")), List(Literal(Constant(j)))) = t + assertEquals(1, i) + assertEquals(1.0, j) + } +} diff --git a/test/junit/scala/reflect/internal/MirrorsTest.scala b/test/junit/scala/reflect/internal/MirrorsTest.scala index 8f2a92f27af4..9108af139fac 100644 --- a/test/junit/scala/reflect/internal/MirrorsTest.scala +++ b/test/junit/scala/reflect/internal/MirrorsTest.scala @@ -1,22 +1,18 @@ -// looks like tests are compiled by the old version of compiler -// therefore certain scala-reflect tests give me AMEs after the SI-8063 overhaul -// TODO: fix this in build.xml +package scala.reflect.internal -// package scala.reflect.internal +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 -// import org.junit.Assert._ -// import org.junit.Test -// import org.junit.runner.RunWith -// import org.junit.runners.JUnit4 - -// @RunWith(classOf[JUnit4]) -// class MirrorsTest { -// @Test def rootCompanionsAreConnected(): Unit = { -// val cm = scala.reflect.runtime.currentMirror -// import cm._ -// assertEquals("RootPackage.moduleClass == RootClass", RootClass, RootPackage.moduleClass) -// assertEquals("RootClass.module == RootPackage", RootPackage, RootClass.module) -// assertEquals("EmptyPackage.moduleClass == EmptyPackageClass", EmptyPackageClass, EmptyPackage.moduleClass) -// assertEquals("EmptyPackageClass.module == EmptyPackage", EmptyPackage, EmptyPackageClass.module) -// } -// } \ No newline at end of file +@RunWith(classOf[JUnit4]) +class MirrorsTest { + @Test def rootCompanionsAreConnected(): Unit = { + val cm = scala.reflect.runtime.currentMirror + import cm._ + assertEquals("RootPackage.moduleClass == RootClass", RootClass, RootPackage.moduleClass) + assertEquals("RootClass.module == RootPackage", RootPackage, RootClass.module) + assertEquals("EmptyPackage.moduleClass == EmptyPackageClass", EmptyPackageClass, EmptyPackage.moduleClass) + assertEquals("EmptyPackageClass.module == EmptyPackage", EmptyPackage, EmptyPackageClass.module) + } +} \ No newline at end of file diff --git a/test/junit/scala/reflect/internal/NamesTest.scala b/test/junit/scala/reflect/internal/NamesTest.scala new file mode 100644 index 000000000000..549c10abedbc --- /dev/null +++ b/test/junit/scala/reflect/internal/NamesTest.scala @@ -0,0 +1,95 @@ +package scala.reflect.internal + +import scala.tools.testing.AssertUtil._ +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import org.junit.Assert._ +import scala.tools.nsc.symtab.SymbolTableForUnitTesting + +@RunWith(classOf[JUnit4]) +class NamesTest { + object symbolTable extends SymbolTableForUnitTesting + import symbolTable._ + + val h1 = newTermName("hai") + val h2 = newTermName("hai") + val f = newTermName("fisch") + + val h1y = h1.toTypeName + val h2y = newTypeName("hai") + val fy = newTypeName("fisch") + + val uy = newTypeName("uhu") + val u = uy.toTermName // calling toTermName after constructing a typeName. This tests the fact + // that creating a typeName always also first creates a termName. There is + // an assertion for that in toTermName. + + @Test + def termNamesAreHashConsed() { + assertTrue(h1 eq h2) + assertEquals(h1, h2) + assertTrue(h1 ne f) + assertTrue(h1 != f) + } + + @Test + def termNamesNotEqualsTypeNames() { + assert(h1 ne h1y) + assert(h1 != h1y) + assert(h2 ne h2y) + assert(h2 != h2y) + } + + @Test + def termNamesTypeNamesSameRange() { + assert(h1.start == h1y.start && h1.length == h1y.length) + assert(h2.start == h2y.start && h2.length == h2y.length) + assert(u.start == uy.start && u.length == uy.length) + } + + @Test + def testLookupTypeName() { + assert(lookupTypeName("hai".toCharArray) eq h1y) + assert(lookupTypeName("fisch".toCharArray) eq fy) + assert(lookupTypeName("uhu".toCharArray) eq uy) + + assertThrows[AssertionError](lookupTypeName("dog".toCharArray), _ contains "not yet created") + val d = newTermName("dog") + assertThrows[AssertionError](lookupTypeName("dog".toCharArray), _ contains "not yet created") + val dy = d.toTypeName + assert(lookupTypeName("dog".toCharArray) eq dy) + } + + @Test + def emptyName() { + val z = newTermName("") + val zy = z.toTypeName + assertEquals(z.toString, "") + assertEquals(zy.toString, "") + assert(z eq newTermName("")) + assert(zy eq newTypeName("")) + } + + @Test + def subNameTest() { + val i = f.subName(1, f.length) + assert(i.start == (f.start + 1) && i.length == (f.length - 1)) + assert(f.subName(0, f.length) eq f) + + val iy = fy.subName(1, fy.length) + assert(iy.start == (fy.start + 1) && iy.length == (fy.length - 1)) + assert(fy.subName(0, fy.length) eq fy) + + assert(f.subName(1,1) eq newTermName("")) + assert(f.subName(1, 0) eq newTermName("")) + + assertThrows[IllegalArgumentException](f.subName(0 - f.start - 1, 1)) + } + + @Test + def stringEqualsTest() { + assert(h1 string_== h2) + assert(h1 string_== h1y) + } +} diff --git a/test/junit/scala/reflect/internal/PrintersTest.scala b/test/junit/scala/reflect/internal/PrintersTest.scala index 62cb401aa92f..9bfe6eecb8ed 100644 --- a/test/junit/scala/reflect/internal/PrintersTest.scala +++ b/test/junit/scala/reflect/internal/PrintersTest.scala @@ -1,824 +1,1198 @@ -// looks like tests are compiled by the old version of compiler -// therefore certain scala-reflect tests give me AMEs after the SI-8063 overhaul -// TODO: fix this in build.xml +package scala.reflect.internal -// package scala.reflect.internal +import org.junit.Test +import org.junit.Assert._ +import scala.tools.reflect._ +import scala.reflect.runtime.universe._ +import scala.reflect.runtime.{currentMirror=>cm} +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 -// import org.junit.Test -// import org.junit.Assert._ -// import scala.tools.reflect._ -// import scala.reflect.runtime.universe._ -// import scala.reflect.runtime.{currentMirror=>cm} -// import org.junit.runner.RunWith -// import org.junit.runners.JUnit4 +@RunWith(classOf[JUnit4]) +class PrintersTest extends BasePrintTests + with ClassPrintTests + with TraitPrintTests + with ValAndDefPrintTests + with QuasiTreesPrintTests + with PackagePrintTests -// @RunWith(classOf[JUnit4]) -// class PrintersTest extends BasePrintTests -// with ClassPrintTests -// with TraitPrintTests -// with ValAndDefPrintTests -// with QuasiTreesPrintTests -// with PackagePrintTests +object PrinterHelper { + val toolbox = cm.mkToolBox() -// object PrinterHelper { -// val toolbox = cm.mkToolBox() -// def assertPrintedCode(code: String, tree: Tree = EmptyTree) = { -// def processEOL(resultCode: String) = { -// import scala.reflect.internal.Chars._ -// resultCode.replaceAll(s"$CR$LF", s"$LF").replace(CR, LF) -// } + import scala.reflect.internal.Chars._ + private def normalizeEOL(resultCode: String) = + resultCode.lines mkString s"$LF" -// val toolboxTree = -// try{ -// toolbox.parse(code) -// } catch { -// case e:scala.tools.reflect.ToolBoxError => throw new Exception(e.getMessage + ": " + code) -// } -// if (tree ne EmptyTree) assertEquals("using quasiquote or given tree"+"\n", code.trim, processEOL(showCode(tree))) -// else assertEquals("using toolbox parser", code.trim, processEOL(showCode(toolboxTree))) -// } + def assertResultCode(code: String)(parsedCode: String = "", typedCode: String = "", wrap: Boolean = false, printRoot: Boolean = false) = { + def toolboxTree(tree: => Tree) = try { + tree + } catch { + case e:scala.tools.reflect.ToolBoxError => throw new Exception(e.getMessage + ": " + code, e) + } -// implicit class StrContextStripMarginOps(val stringContext: StringContext) extends util.StripMarginInterpolator -// } + def wrapCode(source: String) = { + val context = sm""" + |trait PrintersContext { + | class baz extends scala.annotation.Annotation with scala.annotation.StaticAnnotation; + | class foo1[A, B] extends scala.annotation.Annotation with scala.annotation.StaticAnnotation; + | class foo2[A, B](a: scala.Int)(b: scala.Int) extends scala.annotation.Annotation with scala.annotation.StaticAnnotation; + | class foo3[Af, Bf](a: scala.Int)(b: scala.Float, c: PrintersContext.this.foo1[Af, Bf]) extends scala.annotation.Annotation with scala.annotation.StaticAnnotation; + | trait A1; + | trait B1; + |${source.trim.lines map {" " + _} mkString s"$LF"} + |}""" -// import PrinterHelper._ + if (wrap) context.trim() else source.trim + } -// trait BasePrintTests { -// @Test def testIdent = assertPrintedCode("*", Ident("*")) + val parsedTree = toolboxTree(toolbox.parse(wrapCode(code))) + if (!parsedCode.isEmpty()) + assertEquals("using toolbox parser" + LF, wrapCode(parsedCode), normalizeEOL(showCode(parsedTree))) + if (!typedCode.isEmpty()) { + val typedTree = toolboxTree(toolbox.typecheck(parsedTree)) + assertEquals("using toolbox typechecker" + LF, wrapCode(typedCode), normalizeEOL(showCode(typedTree, printRootPkg = printRoot))) + } + } -// @Test def testConstant1 = assertPrintedCode("\"*\"", Literal(Constant("*"))) - -// @Test def testConstant2 = assertPrintedCode("42", Literal(Constant(42))) - -// @Test def testConstantFloat = assertPrintedCode("42.0F", Literal(Constant(42f))) - -// @Test def testConstantDouble = assertPrintedCode("42.0", Literal(Constant(42d))) - -// @Test def testConstantLong = assertPrintedCode("42L", Literal(Constant(42l))) - -// @Test def testOpExpr = assertPrintedCode("(5).+(4)") - -// @Test def testName1 = assertPrintedCode("class test") - -// @Test def testName2 = assertPrintedCode("class *") - -// @Test def testName4 = assertPrintedCode("class `a*`") - -// @Test def testName5 = assertPrintedCode("val :::: = 1") - -// @Test def testName6 = assertPrintedCode("val `::::t` = 1") - -// @Test def testName7 = assertPrintedCode("""class \/""") - -// @Test def testName8 = assertPrintedCode("""class \\\\""") - -// @Test def testName9 = assertPrintedCode("""class test_\/""") - -// @Test def testName10 = assertPrintedCode("""class `*_*`""") - -// @Test def testName11 = assertPrintedCode("""class `a_*`""") - -// @Test def testName12 = assertPrintedCode("""class `*_a`""") - -// @Test def testName13 = assertPrintedCode("""class a_a""") - -// @Test def testName14 = assertPrintedCode("val x$11 = 5") - -// @Test def testName15 = assertPrintedCode("class `[]`") - -// @Test def testName16 = assertPrintedCode("class `()`") - -// @Test def testName17 = assertPrintedCode("class `{}`") - -// @Test def testName18 = assertPrintedCode("class <>") - -// @Test def testName19 = assertPrintedCode("""class `class`""") - -// @Test def testName20 = assertPrintedCode("""class `test name`""") - -// @Test def testIfExpr1 = assertPrintedCode(sm""" -// |if (a) -// | ((expr1): Int) -// |else -// | ((expr2): Int)""") - -// @Test def testIfExpr2 = assertPrintedCode(sm""" -// |(if (a) -// | { -// | expr1; -// | () -// | } -// |else -// | { -// | expr2; -// | () -// | }).toString""") - -// @Test def testIfExpr3 = assertPrintedCode(sm""" -// |(if (a) -// | { -// | expr1; -// | () -// | } -// |else -// | { -// | expr2; -// | () -// | }).method1().method2()""") - -// //val x = true && true && false.! -// @Test def testBooleanExpr1 = assertPrintedCode("val x = true.&&(true).&&(false.!)") - -// //val x = true && !(true && false) -// @Test def testBooleanExpr2 = assertPrintedCode("val x = true.&&(true.&&(false).`unary_!`)") - -// @Test def testNewExpr1 = assertPrintedCode("new foo()") - -// //new foo { test } -// @Test def testNewExpr2 = assertPrintedCode(sm""" -// |{ -// | final class $$anon extends foo { -// | test -// | }; -// | new $$anon() -// |}""") - -// @Test def testNewExpr3 = assertPrintedCode("new foo[t]()") - -// @Test def testNewExpr4 = assertPrintedCode("new foo(x)") - -// @Test def testNewExpr5 = assertPrintedCode("new foo[t](x)") - -// //new foo[t](x) { () } -// @Test def testNewExpr6 = assertPrintedCode(sm""" -// |{ -// | final class $$anon extends foo[t](x) { -// | () -// | }; -// | new $$anon() -// |}""") - -// //new foo with bar -// @Test def testNewExpr7 = assertPrintedCode(sm""" -// |{ -// | final class $$anon extends foo with bar; -// | new $$anon() -// |}""") - -// //new { anonymous } -// @Test def testNewExpr8 = assertPrintedCode(sm""" -// |{ -// | final class $$anon { -// | anonymous -// | }; -// | new $$anon() -// |}""") - -// //new { val early = 1 } with Parent[Int] { body } -// @Test def testNewExpr9 = assertPrintedCode(sm""" -// |{ -// | final class $$anon extends { -// | val early = 1 -// | } with Parent[Int] { -// | body -// | }; -// | new $$anon() -// |}""") - -// //new Foo { self => } -// @Test def testNewExpr10 = assertPrintedCode(sm""" -// |{ -// | final class $$anon extends Foo { self => -// | -// | }; -// | new $$anon() -// |}""") - -// @Test def testReturn = assertPrintedCode("def test: Int = return 42") - -// @Test def testFunc1 = assertPrintedCode("List(1, 2, 3).map(((i: Int) => i.-(1)))") - -// //val sum: Seq[Int] => Int = _ reduceLeft (_+_) -// @Test def testFunc2 = assertPrintedCode("val sum: _root_.scala.Function1[Seq[Int], Int] = ((x$1) => x$1.reduceLeft(((x$2, x$3) => x$2.+(x$3))))") - -// //List(1, 2, 3) map (_ - 1) -// @Test def testFunc3 = assertPrintedCode("List(1, 2, 3).map(((x$1) => x$1.-(1)))") - -// @Test def testImport1 = assertPrintedCode("import scala.collection.mutable") - -// @Test def testImport2 = assertPrintedCode("import java.lang.{String=>Str}") - -// @Test def testImport3 = assertPrintedCode("import java.lang.{String=>Str, Object=>_, _}") - -// @Test def testImport4 = assertPrintedCode("import scala.collection._") -// } - -// trait ClassPrintTests { -// @Test def testClass = assertPrintedCode("class *") - -// @Test def testClassWithBody = assertPrintedCode(sm""" -// |class X { -// | def y = "test" -// |}""") - -// @Test def testClassWithPublicParams = assertPrintedCode("class X(val x: Int, val s: String)") - -// @Test def testClassWithParams1 = assertPrintedCode("class X(x: Int, s: String)") - -// @Test def testClassWithParams2 = assertPrintedCode("class X(@test x: Int, s: String)") - -// @Test def testClassWithParams3 = assertPrintedCode("class X(implicit x: Int, s: String)") - -// @Test def testClassWithParams4 = assertPrintedCode("class X(implicit @test x: Int, s: String)") - -// @Test def testClassWithParams5 = assertPrintedCode("class X(override private[this] val x: Int, s: String) extends Y") - -// @Test def testClassWithParams6 = assertPrintedCode("class X(@test1 override private[this] val x: Int, @test2(param1 = 7) s: String) extends Y") - -// @Test def testClassWithParams7 = assertPrintedCode("class X protected (val x: Int, val s: String)") - -// @Test def testClassWithParams8 = assertPrintedCode("class X(var x: Int)") - -// @Test def testClassWithParams9 = assertPrintedCode("class X(var x: Int*)") - -// @Test def testClassWithByNameParam = assertPrintedCode("class X(x: => Int)") - -// @Test def testClassWithDefault = assertPrintedCode("class X(var x: Int = 5)") - -// @Test def testClassWithParams10 = assertPrintedCode("class X(protected[zzz] var x: Int)") - -// @Test def testClassWithParams11 = assertPrintedCode("class X(override var x: Int) extends F(x) with E(x)") - -// @Test def testClassWithParams12 = assertPrintedCode("class X(val y: Int)()(var z: Double)") - -// @Test def testClassWithImplicitParams = assertPrintedCode("class X(var i: Int)(implicit val d: Double, var f: Float)") - -// @Test def testClassWithEarly = assertPrintedCode(sm""" -// |class X(var i: Int) extends { -// | val a: String = i; -// | type B -// |} with Y""") - -// @Test def testClassWithThrow1 = assertPrintedCode(sm""" -// |class Throw1 { -// | throw new Exception("exception!") -// |}""") - -// @Test def testClassWithThrow2 = assertPrintedCode(sm""" -// |class Throw2 { -// | var msg = " "; -// | val e = new Exception(msg); -// | throw e -// |}""") - -// /* -// class Test { -// val (a, b) = (1, 2) -// } -// */ -// @Test def testClassWithAssignmentWithTuple1 = assertPrintedCode(sm""" -// |class Test { -// | private[this] val x$$1 = (scala.Tuple2(1, 2): @scala.unchecked) match { -// | case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2(a, b) -// | }; -// | val a = x$$1._1; -// | val b = x$$1._2 -// |}""") - -// /* -// class Test { -// val (a, b) = (1).->(2) -// } -// */ -// @Test def testClassWithAssignmentWithTuple2 = assertPrintedCode(sm""" -// |class Test { -// | private[this] val x$$1 = ((1).->(2): @scala.unchecked) match { -// | case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2(a, b) -// | }; -// | val a = x$$1._1; -// | val b = x$$1._2 -// |}""") - -// /* -// class Test { -// val List(one, three, five) = List(1,3,5) -// } -// */ -// @Test def testClassWithPatternMatchInAssignment = assertPrintedCode(sm""" -// |class Test { -// | private[this] val x$$1 = (List(1, 3, 5): @scala.unchecked) match { -// | case List((one @ _), (three @ _), (five @ _)) => scala.Tuple3(one, three, five) -// | }; -// | val one = x$$1._1; -// | val three = x$$1._2; -// | val five = x$$1._3 -// |}""") - -// //class A(l: List[_]) -// @Test def testClassWithExistentialParameter1 = assertPrintedCode(sm""" -// |class Test(l: (List[_$$1] forSome { -// | type _$$1 -// |}))""") - -// @Test def testClassWithExistentialParameter2 = assertPrintedCode(sm""" -// |class B(l: (List[T] forSome { -// | type T -// |}))""") - -// @Test def testClassWithCompoundTypeTree = assertPrintedCode(sm""" -// |{ -// | trait A; -// | trait B; -// | abstract class C(val a: A with B) { -// | def method(x: A with B with C { -// | val x: Float -// | }): A with B -// | }; -// | () -// |}""") - -// @Test def testClassWithSelectFromTypeTree = assertPrintedCode(sm""" -// |{ -// | trait A { -// | type T -// | }; -// | class B(t: (A)#T); -// | () -// |}""") - -// @Test def testImplicitClass = assertPrintedCode("implicit class X(protected[zzz] var x: Int)") - -// @Test def testAbstractClass = assertPrintedCode("abstract class X(protected[zzz] var x: Int)") - -// @Test def testCaseClassWithParams1 = assertPrintedCode("case class X(x: Int, s: String)") - -// @Test def testCaseClassWithParams2 = assertPrintedCode("case class X(protected val x: Int, s: String)") - -// @Test def testCaseClassWithParams3 = assertPrintedCode("case class X(implicit x: Int, s: String)") - -// @Test def testCaseClassWithParams4 = assertPrintedCode("case class X(override val x: Int, s: String) extends Y") - -// @Test def testCaseClassWithBody = assertPrintedCode(sm""" -// |case class X() { -// | def y = "test" -// |}""") - -// @Test def testLocalClass = assertPrintedCode(sm""" -// |def test = { -// | class X(var a: Int) { -// | def y = "test" -// | }; -// | new X(5) -// |}""") - -// @Test def testLocalCaseClass = assertPrintedCode(sm""" -// |def test = { -// | case class X(var a: Int) { -// | def y = "test" -// | }; -// | new X(5) -// |}""") - -// @Test def testSuperInClass = assertPrintedCode(sm""" -// |{ -// | trait Root { -// | def r = "Root" -// | }; -// | class X extends Root { -// | def superX = super.r -// | }; -// | class Y extends X with Root { -// | class Inner { -// | val myY = Y.super.r -// | }; -// | def fromX = super[X].r; -// | def fromRoot = super[Root].r -// | }; -// | () -// |}""") - -// @Test def testThisInClass = assertPrintedCode(sm""" -// |class Outer { -// | class Inner { -// | val outer = Root.this -// | }; -// | val self = this -// |}""") - -// @Test def testCaseClassWithParamsAndBody = assertPrintedCode(sm""" -// |case class X(x: Int, s: String) { -// | def y = "test" -// |}""") - -// @Test def testObject = assertPrintedCode("object *") - -// @Test def testObjectWithBody = assertPrintedCode(sm""" -// |object X { -// | def y = "test" -// |}""") - -// @Test def testObjectWithEarly1 = assertPrintedCode(sm""" -// |object X extends { -// | val early: T = v -// |} with Bar""") - -// @Test def testObjectWithEarly2 = assertPrintedCode(sm""" -// |object X extends { -// | val early: T = v; -// | type EarlyT = String -// |} with Bar""") - -// @Test def testObjectWithSelf = assertPrintedCode(sm""" -// |object Foo extends Foo { self => -// | body -// |}""") - -// @Test def testObjectInh = assertPrintedCode("private[Y] object X extends Bar with Baz") - -// @Test def testObjectWithPatternMatch1 = assertPrintedCode(sm""" -// |object PM1 { -// | List(1, 2) match { -// | case (i @ _) => i -// | } -// |}""") - -// @Test def testObjectWithPatternMatch2 = assertPrintedCode(sm""" -// |object PM2 { -// | List(1, 2).map({ -// | case (i @ _) if i.>(5) => i -// | }) -// |}""") - -// //case i: Int => i -// @Test def testObjectWithPatternMatch3 = assertPrintedCode(sm""" -// |object PM3 { -// | List(1, 2).map({ -// | case (i @ ((_): Int)) => i -// | }) -// |}""") - -// //case a @ (i: Int) => i -// @Test def testObjectWithPatternMatch4 = assertPrintedCode(sm""" -// |object PM4 { -// | List(1, 2).map({ -// | case (a @ (i @ ((_): Int))) => i -// | }) -// |}""") - -// @Test def testObjectWithPatternMatch5 = assertPrintedCode(sm""" -// |object PM5 { -// | List(1, 2).map({ -// | case _ => 42 -// | }) -// |}""") - -// @Test def testObjectWithPatternMatch6 = assertPrintedCode(sm""" -// |object PM6 { -// | List(1, 2) match { -// | case ::((x @ _), (xs @ _)) => x -// | } -// |}""") - -// @Test def testObjectWithPatternMatch7 = assertPrintedCode(sm""" -// |object PM7 { -// | List(1, 2).map({ -// | case (0| 1) => true -// | case _ => false -// | }) -// |}""") - -// @Test def testObjectWithPatternMatch8 = assertPrintedCode(sm""" -// |object PM8 { -// | "abcde".toList match { -// | case Seq((car @ _), _*) => car -// | } -// |}""") - -// @Test def testObjectWithPatternMatch9 = assertPrintedCode(sm""" -// |{ -// | object Extractor { -// | def unapply(i: Int) = Some(i) -// | }; -// | object PM9 { -// | 42 match { -// | case (a @ Extractor((i @ _))) => i -// | } -// | }; -// | () -// |}""") - -// @Test def testObjectWithPartialFunc = assertPrintedCode(sm""" -// |object Test { -// | def partFuncTest[A, B](e: Either[A, B]): scala.Unit = e match { -// | case Right(_) => () -// | } -// |}""") - -// @Test def testObjectWithTry = assertPrintedCode(sm""" -// |object Test { -// | import java.io; -// | var file: PrintStream = null; -// | try { -// | val out = new FileOutputStream("myfile.txt"); -// | file = new PrintStream(out) -// | } catch { -// | case (ioe @ ((_): IOException)) => println("ioe") -// | case (e @ ((_): Exception)) => println("e") -// | } finally println("finally") -// |}""") -// } - -// trait TraitPrintTests { -// @Test def testTrait = assertPrintedCode("trait *") - -// @Test def testTraitWithBody = assertPrintedCode(sm""" -// |trait X { -// | def y = "test" -// |}""") - -// @Test def testTraitWithSelfTypeAndBody = assertPrintedCode(sm""" -// |trait X { self: Order => -// | def y = "test" -// |}""") - -// @Test def testTraitWithSelf1 = assertPrintedCode(sm""" -// |trait X { self => -// | def y = "test" -// |}""") - -// @Test def testTraitWithSelf2 = assertPrintedCode(sm""" -// |trait X { self: Foo with Bar => -// | val x: Int = 1 -// |}""") - -// @Test def testTraitTypeParams = assertPrintedCode("trait X[A, B]") - -// @Test def testTraitWithBody2 = assertPrintedCode(sm""" -// |trait X { -// | def foo: scala.Unit; -// | val bar: Baz -// |}""") - -// @Test def testTraitWithInh = assertPrintedCode("trait X extends A with B") - -// @Test def testTraitWithEarly1 = assertPrintedCode(sm""" -// |trait X extends { -// | val x: Int = 1 -// |} with Any""") - -// @Test def testTraitWithEarly2 = assertPrintedCode(sm""" -// |trait X extends { -// | val x: Int = 0; -// | type Foo = Bar -// |} with Y""") - -// @Test def testTraitWithEarly3 = assertPrintedCode(sm""" -// |trait X extends { -// | val x: Int = 5; -// | val y: Double = 4.0; -// | type Foo; -// | type XString = String -// |} with Y""") - -// @Test def testTraitWithEarly4 = assertPrintedCode(sm""" -// |trait X extends { -// | val x: Int = 5; -// | val y: Double = 4.0; -// | type Foo; -// | type XString = String -// |} with Y { -// | val z = 7 -// |}""") - -// @Test def testTraitWithEarly5 = assertPrintedCode(sm""" -// |trait X extends { -// | override protected[this] val x: Int = 5; -// | val y: Double = 4.0; -// | private type Foo; -// | private[ee] type XString = String -// |} with Y { -// | val z = 7 -// |}""") - -// @Test def testTraitWithSingletonTypeTree = assertPrintedCode(sm""" -// |trait Test { -// | def testReturnSingleton(): this.type -// |}""") - -// @Test def testTraitWithThis = assertPrintedCode(sm""" -// |trait Test { _ : X with Y => -// | -// |}""", q"trait Test { this: X with Y => }") - -// @Test def testTraitWithWhile1 = assertPrintedCode(sm""" -// |trait Test { -// | while (true.!=(false)) -// | println("testing...") -// | -// |}""") - -// @Test def testTraitWithWhile2 = assertPrintedCode(sm""" -// |trait Test { -// | while (true) -// | { -// | println("testing..."); -// | println("testing...") -// | } -// | -// |}""") - -// @Test def testTraitWithDoWhile1 = assertPrintedCode(sm""" -// |trait Test { -// | do -// | println("testing...") -// | while (true) -// |}""") - -// @Test def testTraitWithTypes = assertPrintedCode(sm""" -// |trait Test { -// | type A = Int; -// | type B >: Nothing <: AnyRef; -// | protected type C >: Nothing; -// | type D <: AnyRef -// |}""") -// } - -// trait ValAndDefPrintTests { -// @Test def testVal1 = assertPrintedCode("val a: Unit = null") - -// @Test def testVal2 = assertPrintedCode("val * : Unit = null") - -// @Test def testVal3 = assertPrintedCode("val a_ : Unit = null") - -// @Test def testDef1 = assertPrintedCode("def a: Unit = null") - -// @Test def testDef2 = assertPrintedCode("def * : Unit = null") - -// @Test def testDef3 = assertPrintedCode("def a_(x: Int): Unit = null") - -// @Test def testDef4 = assertPrintedCode("def a_ : Unit = null") - -// @Test def testDef5 = assertPrintedCode("def a_(* : Int): Unit = null") - -// @Test def testDef6 = assertPrintedCode("def a_(b_ : Int): Unit = null") - -// @Test def testDef7 = assertPrintedCode(sm""" -// |{ -// | def test1 = (); -// | def test2() = () -// |}""", -// Block( -// DefDef(NoMods, newTermName("test1"), Nil, Nil, EmptyTree, Literal(Constant(()))), -// DefDef(NoMods, newTermName("test2"), Nil, Nil :: Nil, EmptyTree, Literal(Constant(()))) -// ) -// ) - -// @Test def testDef8 = { -// val arg = ValDef(Modifiers(Flag.IMPLICIT) , newTermName("a"), -// AppliedTypeTree(Ident(newTypeName("R")), List(Ident(newTypeName("X")))), EmptyTree) - -// //def m[X](implicit a: R[X]) = () -// val tree = DefDef(NoMods, newTermName("test"), TypeDef(NoMods, newTypeName("X"), Nil, EmptyTree) :: Nil, -// List(List(arg)), EmptyTree, Literal(Constant(()))) - -// assertPrintedCode("def test[X](implicit a: R[X]) = ()", tree) -// } - -// @Test def testDefWithParams1 = assertPrintedCode("def foo(x: Int*) = null") - -// @Test def testDefWithParams2 = assertPrintedCode("def foo(x: Int)(y: Int = 1) = null") - -// @Test def testDefWithTypeParams1 = assertPrintedCode("def foo[A, B, C](x: A)(y: Int = 1): C = null") - -// @Test def testDefWithTypeParams2 = assertPrintedCode("def foo[A, B <: Bar] = null") - -// @Test def testDefWithAnn1 = assertPrintedCode("@annot def foo = null") - -// @Test def testDefWithAnn2 = assertPrintedCode("@a(x) def foo = null") - -// @Test def testDefWithAnn3 = assertPrintedCode("@Foo[A, B] def foo = null") - -// @Test def testDefWithAnn4 = assertPrintedCode("@Foo(a)(b)(x, y) def foo = null") - -// @Test def testDefWithAnn5 = assertPrintedCode("@Foo[A, B](a)(b) @Bar def foo(x: Int) = null") - -// @Test def testDefWithAnn6 = assertPrintedCode("@test1(new test2()) def foo = 42") - -// @Test def testDefWithAnn7 = assertPrintedCode("@`t*` def foo = 42") - -// @Test def testDefWithAnn8 = assertPrintedCode("@throws(classOf[Exception]) def foo = throw new Exception()") - -// @Test def testAnnotated1 = assertPrintedCode("def foo = 42: @test1") - -// @Test def testAnnotated2 = assertPrintedCode("""def foo = 42: @test1(42, z = "5")""") - -// @Test def testAnnotated3 = assertPrintedCode("def foo = (42: @test1): @test2(new test1())") - -// @Test def testAnnotated4 = assertPrintedCode("""def foo = 42: @test1(4, "testing")(4.2)""") - -// @Test def testAnnotated5 = assertPrintedCode("""def foo = (42: @test1(4, "testing")(4.2)): @test2(1, "bar")(3.14)""") - -// @Test def testAnnotated6 = assertPrintedCode("def foo = ((42: @test1): @test2(new test1())): @test3(1)(2, 3)(4)") - -// @Test def testAnnotated7 = assertPrintedCode(sm""" -// |(x: @unchecked) match { -// | case ((_): Int) => true -// | case _ => false -// |}""") - -// @Test def testAnnotated8 = assertPrintedCode(sm""" -// |((x: @unchecked): @test1(1, "testing")(3.14)) match { -// | case _ => true -// |}""") -// } - -// trait PackagePrintTests { -// @Test def testPackage1 = assertPrintedCode(sm""" -// |package foo.bar { -// | -// |}""") - -// @Test def testPackage2 = assertPrintedCode(sm""" -// |package foo { -// | class C -// | -// | object D -// |}""") - -// //package object foo extends a with b -// @Test def testPackage3 = assertPrintedCode(sm""" -// |package foo { -// | object `package` extends a with b -// |}""") - -// //package object foo { def foo; val x = 1 } -// @Test def testPackage4 = assertPrintedCode(sm""" -// |package foo { -// | object `package` { -// | def foo: scala.Unit; -// | val x = 1 -// | } -// |}""") - -// //package object foo extends { val x = 1; type I = Int } with Any -// @Test def testPackage5 = assertPrintedCode(sm""" -// |package foo { -// | object `package` extends { -// | val x = 1; -// | type I = Int -// | } with Any -// |}""") -// } - -// trait QuasiTreesPrintTests { -// @Test def testQuasiIdent = assertPrintedCode("*", q"*") - -// @Test def testQuasiVal = assertPrintedCode("val * : Unit = null", q"val * : Unit = null") - -// @Test def testQuasiDef = assertPrintedCode("def * : Unit = null", q"def * : Unit = null") - -// @Test def testQuasiTrait = assertPrintedCode("trait *", q"trait *") - -// @Test def testQuasiClass = assertPrintedCode("class *", q"class *") - -// @Test def testQuasiClassWithPublicParams = assertPrintedCode( "class X(val x: Int, val s: String)", q"class X(val x: Int, val s:String)" ) - -// @Test def testQuasiClassWithParams = assertPrintedCode("class X(x: Int, s: String)", q"class X(x: Int, s:String)") - -// @Test def testQuasiObject = assertPrintedCode("object *", q"object *") - -// @Test def testQuasiObjectWithBody = assertPrintedCode(sm""" -// |object X { -// | def y = "test" -// |}""", q"""object X{ def y = "test" }""") - -// @Test def testQuasiClassWithBody = assertPrintedCode(sm""" -// |class X { -// | def y = "test" -// |}""", q"""class X{ def y = "test" }""") - -// @Test def testQuasiTraitWithBody = assertPrintedCode(sm""" -// |trait X { -// | def y = "test" -// |}""", q"""trait X{ def y = "test" }""") - -// @Test def testQuasiTraitWithSelfTypeAndBody = assertPrintedCode(sm""" -// |trait X { self: Order => -// | def y = "test" -// |}""", q"""trait X{ self: Order => def y = "test" }""") - -// @Test def testQuasiTraitWithSelf = assertPrintedCode(sm""" -// |trait X { self => -// | def y = "test" -// |}""", q"""trait X{ self => def y = "test" }""") - -// @Test def testQuasiCaseClassWithBody = assertPrintedCode(sm""" -// |case class X() { -// | def y = "test" -// |}""", q"""case class X() { def y = "test" }""") - -// @Test def testQuasiCaseClassWithParamsAndBody = assertPrintedCode(sm""" -// |case class X(x: Int, s: String) { -// | def y = "test" -// |}""", q"""case class X(x: Int, s: String){ def y = "test" }""") -// } \ No newline at end of file + def assertTreeCode(tree: Tree, typecheck: Boolean = false)(code: String) = { + if (typecheck) { + assertEquals("using quasiquote or given tree (typechecked)"+LF, code.trim, normalizeEOL(showCode(toolbox.typecheck(tree)))) + } else { + assertEquals("using quasiquote or given tree"+LF, code.trim, normalizeEOL(showCode(tree))) + } + } + + def assertPrintedCode(source: String, checkTypedTree: Boolean = true, wrapCode: Boolean = false) = { + if (checkTypedTree) + assertResultCode(source)(source, source, wrapCode) + else assertResultCode(source)(parsedCode = source, wrap = wrapCode) + } + + implicit class StrContextStripMarginOps(val stringContext: StringContext) extends util.StripMarginInterpolator +} + +import PrinterHelper._ + +trait BasePrintTests { + @Test def testIdent = assertTreeCode(Ident("*"))("*") + + @Test def testConstant1 = assertTreeCode(Literal(Constant("*")))("\"*\"") + + @Test def testConstant2 = assertTreeCode(Literal(Constant(42)))("42") + + @Test def testConstantFloat = assertTreeCode(Literal(Constant(42f)))("42.0F") + + @Test def testConstantDouble = assertTreeCode(Literal(Constant(42d)))("42.0") + + @Test def testConstantLong = assertTreeCode(Literal(Constant(42l)))("42L") + + @Test def testOpExpr = assertPrintedCode("(5).+(4)", checkTypedTree = false) + + @Test def testName1 = assertPrintedCode("class test") + + @Test def testName2 = assertPrintedCode("class *") + + @Test def testName4 = assertPrintedCode("class `a*`") + + @Test def testName5 = assertPrintedCode("val :::: = 1") + + @Test def testName6 = assertPrintedCode("val `::::t` = 1") + + @Test def testName7 = assertPrintedCode("""class \/""") + + @Test def testName8 = assertPrintedCode("""class \\\\""") + + @Test def testName9 = assertPrintedCode("""class test_\/""") + + @Test def testName10 = assertPrintedCode("""class `*_*`""") + + @Test def testName11 = assertPrintedCode("""class `a_*`""") + + @Test def testName12 = assertPrintedCode("""class `*_a`""") + + @Test def testName13 = assertPrintedCode("""class a_a""") + + @Test def testName14 = assertPrintedCode("val x$11 = 5") + + @Test def testName15 = assertPrintedCode("class `[]`") + + @Test def testName16 = assertPrintedCode("class `()`") + + @Test def testName17 = assertPrintedCode("class `{}`") + + @Test def testName18 = assertPrintedCode("class <>") + + @Test def testName19 = assertPrintedCode("""class `class`""") + + @Test def testName20 = assertPrintedCode("""class `test name`""") + + @Test def testName21 = assertPrintedCode("""class `test.name`""") + + @Test def testIfExpr1 = assertResultCode(code = sm""" + |val a = 1 + |if (a > 1) + | a: Int + |else + | (a.toString): String + """)( + parsedCode = sm""" + |val a = 1; + |if (a.>(1)) + | ((a): Int) + |else + | ((a.toString): String)""", + typedCode=sm""" + |val a = 1; + |if (PrintersContext.this.a.>(1)) + | ((PrintersContext.this.a): scala.Int) + |else + | ((PrintersContext.this.a.toString()): scala.Predef.String) + """, wrap = true) + + @Test def testIfExpr2 = assertPrintedCode(sm""" + |class A { + | (if (true) + | { + | false; + | () + | } + |else + | { + | true; + | () + | }).toString() + |}""") + + @Test def testIfExpr3 = assertPrintedCode(sm""" + |class A { + | (if (true) + | { + | false; + | () + | } + |else + | { + | true; + | () + | }).toString().hashCode() + |}""") + + //val x = true && true && false.! + @Test def testBooleanExpr1 = assertPrintedCode("val x = true.&&(true).&&(false.`unary_!`)", checkTypedTree = false) + + //val x = true && !(true && false) + @Test def testBooleanExpr2 = assertPrintedCode("val x = true.&&(true.&&(false).`unary_!`)", checkTypedTree = false) + + @Test def testNewExpr1 = assertResultCode( + code = sm""" + |class foo + |new foo() + |""")( + parsedCode = sm""" + |class foo; + |new foo()""", + typedCode = sm""" + |class foo; + |new PrintersContext.this.foo() + |""", + wrap = true) + + @Test def testNewExpr2 = assertResultCode( + code = sm""" + |class foo + |new foo { "test" } + |""")( + parsedCode = sm""" + |class foo; + |{ + | final class $$anon extends foo { + | "test" + | }; + | new $$anon() + |}""", + typedCode = sm""" + |class foo; + |{ + | final class $$anon extends PrintersContext.this.foo { + | "test" + | }; + | new $$anon() + |}""", + wrap = true) + + @Test def testNewExpr3 = assertPrintedCode(sm""" + |{ + | class foo[t]; + | new foo[scala.Int]() + |}""") + + @Test def testNewExpr4 = assertPrintedCode(sm""" + |{ + | class foo(x: scala.Int); + | val x = 5; + | new foo(x) + |}""") + + @Test def testNewExpr5 = assertPrintedCode(sm""" + |{ + | class foo[t](x: scala.Int); + | val x = 5; + | new foo[scala.Predef.String](x) + |}""") + + //new foo[t](x) { () } + @Test def testNewExpr6 = assertResultCode( + code = sm""" + |class foo[t](x: Int) + |new foo[String](3) { () } + |""")( + parsedCode = sm""" + |{ + | class foo[t](x: Int); + | { + | final class $$anon extends foo[String](3) { + | () + | }; + | new $$anon() + | } + |}""", + typedCode = sm""" + |{ + | class foo[t](x: scala.Int); + | { + | final class $$anon extends foo[scala.Predef.String](3) { + | () + | }; + | new $$anon() + | } + |}""") + + //new foo with bar + @Test def testNewExpr7 = assertPrintedCode(sm""" + |{ + | trait foo; + | trait bar; + | { + | final class $$anon extends foo with bar; + | new $$anon() + | } + |}""") + + //new { anonymous } + @Test def testNewExpr8 = assertPrintedCode(sm""" + |{ + | final class $$anon { + | 5 + | }; + | new $$anon() + |}""") + + //new { val early = 1 } with Parent[Int] { body } + @Test def testNewExpr9 = assertPrintedCode(sm""" + |{ + | class Parent[t]; + | { + | final class $$anon extends { + | val early = 1 + | } with Parent[scala.Int] { + | "testNewExpr" + | }; + | new $$anon() + | } + |}""") + + //new Foo { self => } + @Test def testNewExpr10 = assertPrintedCode(sm""" + |{ + | class Foo; + | { + | final class $$anon extends Foo { self => + | + | }; + | new $$anon() + | } + |}""") + + @Test def testReturn = assertPrintedCode("def test: scala.Int = return 42") + + @Test def testFunc1 = assertResultCode( + code = "List(1, 2, 3).map((i: Int) => i - 1)")( + parsedCode = "List(1, 2, 3).map(((i: Int) => i.-(1)))", + typedCode = sm"scala.collection.immutable.List.apply[Int](1, 2, 3).map[Int, List[Int]](((i: scala.Int) => i.-(1)))(scala.collection.immutable.List.canBuildFrom[Int])") + + @Test def testFunc2 = assertResultCode( + code = "val sum: Seq[Int] => Int = _ reduceLeft (_+_)")( + parsedCode = "val sum: _root_.scala.Function1[Seq[Int], Int] = ((x$1) => x$1.reduceLeft(((x$2, x$3) => x$2.+(x$3))))", + typedCode = "val sum: _root_.scala.Function1[scala.`package`.Seq[scala.Int], scala.Int] = ((x$1: Seq[Int]) => x$1.reduceLeft[Int](((x$2: Int, x$3: Int) => x$2.+(x$3))))") + + @Test def testFunc3 = assertResultCode( + code = "List(1, 2, 3) map (_ - 1)")( + parsedCode = "List(1, 2, 3).map(((x$1) => x$1.-(1))) ", + typedCode = "scala.collection.immutable.List.apply[Int](1, 2, 3).map[Int, List[Int]](((x$1: Int) => x$1.-(1)))(scala.collection.immutable.List.canBuildFrom[Int])") + + @Test def testFunc4 = assertResultCode( + code = "val x: String => Int = ((str: String) => 1)")( + parsedCode = "val x: _root_.scala.Function1[String, Int] = ((str: String) => 1)", + typedCode = " val x: _root_.scala.Function1[_root_.scala.Predef.String, _root_.scala.Int] = ((str: _root_.scala.Predef.String) => 1)", printRoot = true) + + @Test def testAssign1 = assertPrintedCode("(f.v = 5).toString", checkTypedTree = false) + + @Test def testAssign2 = assertPrintedCode("(f.v = 5)(2)", checkTypedTree = false) + + @Test def testImport1 = assertPrintedCode("import scala.collection.mutable") + + @Test def testImport2 = assertPrintedCode("import java.lang.{String=>Str}") + + @Test def testImport3 = assertPrintedCode("import java.lang.{String=>Str, Object=>_, _}") + + @Test def testImport4 = assertPrintedCode("import scala.collection._") +} + +trait ClassPrintTests { + @Test def testClass = assertPrintedCode("class *") + + @Test def testClassWithBody = assertPrintedCode(sm""" + |class X { + | def y = "test" + |}""") + + @Test def testClassConstructorModifiers = assertPrintedCode("class X private (x: scala.Int)") + + @Test def testClassConstructorModifierVisibility = assertPrintedCode(sm""" + |object A { + | class X protected[A] (x: scala.Int) + |}""") + + @Test def testClassWithPublicParams = assertPrintedCode("class X(val x: scala.Int, val s: scala.Predef.String)") + + @Test def testClassWithParams1 = assertPrintedCode("class X(x: scala.Int, s: scala.Predef.String)") + + @Test def testClassWithParams2 = assertPrintedCode("class X(@test x: Int, s: String)", checkTypedTree = false) + + @Test def testClassWithParams3 = assertPrintedCode("class X(implicit x: Int, s: String)", checkTypedTree = false) + + @Test def testClassWithParams4 = assertPrintedCode("class X(implicit @unchecked x: Int, s: String)", checkTypedTree = false) + + @Test def testClassWithParams5 = assertPrintedCode(sm""" + |{ + | class Y { + | val x = 5 + | }; + | class X(override private[this] val x: scala.Int, s: scala.Predef.String) extends Y; + | () + |}""") + + @Test def testClassWithParams6 = assertPrintedCode("class X(@test1 override private[this] val x: Int, @test2(param1 = 7) s: String) extends Y", checkTypedTree = false) + + @Test def testClassWithParams7 = assertPrintedCode("class X protected (val x: scala.Int, val s: scala.Predef.String)") + + @Test def testClassWithParams8 = assertPrintedCode("class X(var x: scala.Int)") + + @Test def testClassWithParams9 = assertPrintedCode("def test(x: scala.Int*) = 5") + + @Test def testClassWithByNameParam = assertPrintedCode("class X(x: => scala.Int)") + + @Test def testClassWithDefault = assertPrintedCode(sm""" + |{ + | class X(var x: scala.Int = 5); + | () + |}""") + + @Test def testClassWithParams10 = assertPrintedCode("class X(protected[zzz] var x: Int)", checkTypedTree = false) + + @Test def testClassWithParams11 = assertPrintedCode(sm""" + |{ + | class F(x: scala.Int); + | trait E { + | var x: scala.Int + | }; + | class X(override var x: scala.Int = 5) extends F(x) with E; + | () + |}""") + + @Test def testClassWithParams12 = assertPrintedCode("class X(val y: scala.Int)()(var z: scala.Double)") + + @Test def testClassWithImplicitParams = assertPrintedCode("class X(var i: scala.Int)(implicit val d: scala.Double, var f: scala.Float)") + + @Test def testClassWithEarly = + assertPrintedCode(sm""" + |class X(var i: scala.Int) extends { + | val a = i; + | type B + |} with scala.Serializable""") + + @Test def testClassWithThrow1 = assertPrintedCode(sm""" + |class Throw1 { + | throw new scala.`package`.Exception("exception!") + |}""") + + @Test def testClassWithThrow2 = assertPrintedCode(sm""" + |class Throw2 { + | var msg = " "; + | val e = new scala.`package`.Exception(Throw2.this.msg); + | throw Throw2.this.e + |}""") + + @Test def testClassWithAssignmentWithTuple1 = assertResultCode(sm""" + |class Test { + | val (a, b) = (1, 2) + |}""")( + parsedCode = sm""" + |class Test { + | private[this] val x$$1 = (scala.Tuple2(1, 2): @scala.unchecked) match { + | case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2(a, b) + | }; + | val a = x$$1._1; + | val b = x$$1._2 + |}""", + typedCode = sm""" + |class Test { + | private[this] val x$$1 = (scala.Tuple2.apply[Int, Int](1, 2): @scala.unchecked) match { + | case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2.apply[Int, Int](a, b) + | }; + | val a = Test.this.x$$1._1; + | val b = Test.this.x$$1._2 + |}""") + + @Test def testClassWithAssignmentWithTuple2 = assertResultCode( + code = sm""" + |class Test { + | val (a, b) = (1).->(2) + |}""")( + parsedCode = sm""" + |class Test { + | private[this] val x$$1 = ((1).->(2): @scala.unchecked) match { + | case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2(a, b) + | }; + | val a = x$$1._1; + | val b = x$$1._2 + |}""", + typedCode = sm""" + |class Test { + | private[this] val x$$1 = (scala.Predef.ArrowAssoc[Int](1).->[Int](2): @scala.unchecked) match { + | case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2.apply[Int, Int](a, b) + | }; + | val a = Test.this.x$$1._1; + | val b = Test.this.x$$1._2 + |}""") + + /* + class Test { + val List(one, three, five) = List(1,3,5) + } + */ + @Test def testClassWithPatternMatchInAssignment = assertPrintedCode(sm""" + |class Test { + | private[this] val x$$1 = (scala.collection.immutable.List.apply[scala.Int](1, 3, 5): @scala.unchecked) match { + | case scala.collection.immutable.List((one @ _), (three @ _), (five @ _)) => scala.Tuple3.apply[scala.Int, scala.Int, scala.Int](one, three, five) + | }; + | val one = Test.this.x$$1._1; + | val three = Test.this.x$$1._2; + | val five = Test.this.x$$1._3 + |}""") + + //class A(l: List[_]) + @Test def testClassWithExistentialParameter1 = assertPrintedCode(sm""" + |class Test(l: (scala.`package`.List[_$$1] forSome { + | type _$$1 + |}))""") + + @Test def testClassWithExistentialParameter2 = assertPrintedCode(sm""" + |class B(l: (scala.`package`.List[T] forSome { + | type T + |}))""") + + @Test def testClassWithCompoundTypeTree = assertPrintedCode(sm""" + |{ + | trait A; + | trait B; + | abstract class C(val a: A with B) { + | def method(x: A with B with C { + | val x: scala.Float + | }): A with B + | }; + | () + |}""") + + @Test def testClassWithSelectFromTypeTree = assertPrintedCode(sm""" + |{ + | trait A { + | type T + | }; + | class B(t: (A)#T); + | () + |}""") + + @Test def testImplicitClass = assertPrintedCode(sm""" + |{ + | implicit class X(protected[this] var x: scala.Int); + | () + |}""", + checkTypedTree = true) + + @Test def testAbstractClass = assertPrintedCode("abstract class X(protected[this] var x: scala.Int)") + + @Test def testCaseClassWithParams1 = assertPrintedCode(sm""" + |{ + | case class X(x: scala.Int, s: scala.Predef.String); + | () + |}""") + + @Test def testCaseClassWithParams2 = assertPrintedCode(sm""" + |{ + | case class X(protected val x: scala.Int, s: scala.Predef.String); + | () + |}""") + + @Test def testCaseClassWithParams3 = assertPrintedCode(sm""" + |{ + | case class X(implicit x: scala.Int, s: scala.Predef.String); + | () + |}""") + + @Test def testCaseClassWithParams4 = assertPrintedCode(sm""" + |{ + | trait V { + | val x: scala.Int + | }; + | case class X(override val x: scala.Int, s: scala.Predef.String) extends scala.Cloneable; + | () + |}""") + + @Test def testCaseClassWithBody = assertPrintedCode(sm""" + |{ + | case class X() { + | def y = "test" + | }; + | () + |}""") + + @Test def testLocalClass = assertPrintedCode(sm""" + |def test = { + | class X(var a: scala.Int) { + | def y = "test" + | }; + | new X(5) + |}""") + + @Test def testLocalCaseClass = assertPrintedCode(sm""" + |def test = { + | case class X(var a: scala.Int) { + | def y = "test" + | }; + | new X(5) + |}""") + + @Test def testSuperInClass = assertPrintedCode(sm""" + |{ + | trait Root { + | def r = "Root" + | }; + | class X extends Root { + | def superX = super.r + | }; + | class Y extends X with Root { + | class Inner { + | val myY = Y.super.r + | }; + | def fromX = super[X].r; + | def fromRoot = super[Root].r + | }; + | () + |}""") + + @Test def testThisInClass = assertPrintedCode(sm""" + |class Outer { + | class Inner { + | val outer = Outer.this + | }; + | val self = this + |}""") + + @Test def testCaseClassWithParamsAndBody = assertPrintedCode(sm""" + |{ + | case class X(var x: scala.Int, var s: scala.Predef.String) { + | def y = "test" + | }; + | () + |}""") + + @Test def testObject = assertPrintedCode("object *") + + @Test def testObjectWithBody = assertPrintedCode(sm""" + |object X { + | def y = "test" + |}""") + + @Test def testObjectWithEarly1 = assertPrintedCode(sm""" + |object X extends { + | val early: scala.Int = 42 + |} with scala.Serializable""") + + @Test def testObjectWithEarly2 = assertPrintedCode(sm""" + |object X extends { + | val early: scala.Int = 42; + | type EarlyT = scala.Predef.String + |} with scala.Serializable""") + + @Test def testObjectWithSelf = assertPrintedCode(sm""" + |object Foo extends scala.Serializable { self => + | 42 + |}""") + + @Test def testObjectInh = assertPrintedCode(sm""" + |trait Y { + | private[Y] object X extends scala.Serializable with scala.Cloneable + |}""") + + @Test def testObjectWithPatternMatch1 = assertPrintedCode(sm""" + |object PM1 { + | scala.collection.immutable.List.apply[scala.Int](1, 2) match { + | case (i @ _) => i + | } + |}""") + + @Test def testObjectWithPatternMatch2 = assertResultCode( + code = sm""" + |object PM2 { + | List(1, 2).map { + | case i if i > 5 => i + | } + |}""")( + parsedCode = sm""" + |object PM2 { + | List(1, 2).map({ + | case (i @ _) if i.>(5) => i + | }) + |}""") + /* + typedCode = sm""" + |object PM2 { + | scala.collection.immutable.List.apply(1, 2).map(((x0$$1) => x0$$1 match { + | case (i @ _) if i.>(5) => i + | }))(scala.collection.immutable.List.canBuildFrom) + |}""") + * + */ + + @Test def testObjectWithPatternMatch3 = assertResultCode( + code = sm""" + |object PM3 { + | List(1, 2).map { + | case i: Int => i + | } + |}""")( + parsedCode = sm""" + |object PM3 { + | List(1, 2).map({ + | case (i @ ((_): Int)) => i + | }) + |}""") + /* + typedCode = sm""" + |object PM3 { + | scala.collection.immutable.List.apply(1, 2).map(((x0$$2) => x0$$2 match { + | case (i @ ((_): scala.Int)) => i + | }))(scala.collection.immutable.List.canBuildFrom) + |}""") + * + */ + + @Test def testObjectWithPatternMatch4 = assertResultCode( + code = sm""" + |object PM4 { + | List(1, 2).map { + | case _ => 42 + | } + |}""")( + parsedCode = sm""" + |object PM4 { + | List(1, 2).map({ + | case _ => 42 + | }) + |}""") + /* + typedCode = sm""" + |object PM4 { + | scala.collection.immutable.List.apply(1, 2).map(((x0$$3) => x0$$3 match { + | case _ => 42 + | }))(scala.collection.immutable.List.canBuildFrom) + |}""") + * + */ + + @Test def testObjectWithPatternMatch5 = assertResultCode( + code = sm""" + |object PM5 { + | List(1, 2) match { + | case x :: xs => x + | } + |}""")( + parsedCode = sm""" + |object PM5 { + | List(1, 2) match { + | case ::((x @ _), (xs @ _)) => x + | } + |}""", + typedCode = sm""" + |object PM5 { + | scala.collection.immutable.List.apply[Int](1, 2) match { + | case scala.`package`.::((x @ _), (xs @ _)) => x + | } + |}""") + + @Test def testObjectWithPatternMatch6 = assertResultCode( + code = sm""" + |object PM6 { + | List(1, 2).map { + | case (0 | 1) => true + | case _ => false + | } + |}""")( + parsedCode = sm""" + |object PM6 { + | List(1, 2).map({ + | case (0| 1) => true + | case _ => false + | }) + |}""") + /* + typedCode = sm""" + |object PM6 { + | scala.collection.immutable.List.apply(1, 2).map(((x0$$4) => x0$$4 match { + | case (0| 1) => true + | case _ => false + | }))(scala.collection.immutable.List.canBuildFrom) + |}""" + * + */ + + @Test def testObjectWithPatternMatch7 = assertPrintedCode(sm""" + |object PM7 { + | scala.Predef.augmentString("abcde").toList match { + | case scala.collection.Seq((car @ _), _*) => car + | } + |}""") + + @Test def testObjectWithPatternMatch8 = assertPrintedCode(sm""" + |{ + | object Extractor { + | def unapply(i: scala.Int) = scala.Some.apply[scala.Int](i) + | }; + | object PM9 { + | 42 match { + | case (a @ Extractor((i @ _))) => i + | } + | }; + | () + |}""") + + @Test def testObjectWithPartialFunc = assertPrintedCode(sm""" + |object Test { + | def partFuncTest[A, B](e: scala.`package`.Either[A, B]): scala.Unit = e match { + | case scala.`package`.Right(_) => () + | } + |}""") + + @Test def testObjectWithTry = assertResultCode( + code = sm""" + |object Test { + | import java.io._; + | var file: PrintStream = null; + | try { + | val out = new FileOutputStream("myfile.txt"); + | file = new PrintStream(out) + | } catch { + | case ioe: IOException => println("ioe") + | case e: Exception => println("e") + | } finally println("finally") + |}""")( + parsedCode = sm""" + |object Test { + | import java.io._; + | var file: PrintStream = null; + | try { + | val out = new FileOutputStream("myfile.txt"); + | file = new PrintStream(out) + | } catch { + | case (ioe @ ((_): IOException)) => println("ioe") + | case (e @ ((_): Exception)) => println("e") + | } finally println("finally") + |}""", + typedCode = sm""" + |object Test { + | import java.io._; + | var file: java.io.PrintStream = null; + | try { + | val out = new java.io.FileOutputStream("myfile.txt"); + | Test.this.`file_=`(new java.io.PrintStream(out)) + | } catch { + | case (ioe @ ((_): java.io.IOException)) => scala.Predef.println("ioe") + | case (e @ ((_): scala.`package`.Exception)) => scala.Predef.println("e") + | } finally scala.Predef.println("finally") + |}""") +} + +trait TraitPrintTests { + @Test def testTrait = assertPrintedCode("trait *") + + @Test def testTraitWithBody = assertPrintedCode(sm""" + |trait X { + | def y = "test" + |}""") + + @Test def testTraitWithSelfTypeAndBody = assertPrintedCode(sm""" + |trait X { self: scala.Cloneable => + | def y = "test" + |}""") + + @Test def testTraitWithSelf1 = assertPrintedCode(sm""" + |trait X { self => + | def y = "test" + |}""") + + @Test def testTraitWithSelf2 = assertPrintedCode(sm""" + |trait X { self: scala.Cloneable with scala.Serializable => + | val x: scala.Int = 1 + |}""") + + @Test def testTraitTypeParams = assertPrintedCode("trait X[A, B]") + + @Test def testTraitWithBody2 = assertPrintedCode(sm""" + |trait X { + | def foo: scala.Unit; + | val bar: scala.Predef.String + |}""") + + @Test def testTraitWithInh = assertPrintedCode("trait X extends scala.Cloneable with scala.Serializable") + + @Test def testTraitWithEarly1 = assertPrintedCode(sm""" + |trait X extends { + | val x: Int = 1 + |} with AnyRef""", checkTypedTree = false) + + @Test def testTraitWithEarly2 = assertPrintedCode(sm""" + |trait X extends { + | val x: scala.Int = 0; + | type Foo = scala.Unit + |} with scala.Cloneable""") + + @Test def testTraitWithEarly3 = assertPrintedCode(sm""" + |trait X extends { + | val x: scala.Int = 5; + | val y: scala.Double = 4.0; + | type Foo; + | type XString = scala.Predef.String + |} with scala.Serializable""") + + @Test def testTraitWithEarly4 = assertPrintedCode(sm""" + |trait X extends { + | val x: scala.Int = 5; + | val y: scala.Double = 4.0; + | type Foo; + | type XString = scala.Predef.String + |} with scala.Serializable { + | val z = 7 + |}""") + + @Test def testTraitWithSingletonTypeTree = assertPrintedCode(sm""" + |trait Test { + | def testReturnSingleton(): Test.this.type + |}""") + + @Test def testTraitWithThis = assertTreeCode(q"trait Test { this: X with Y => }")(sm""" + |trait Test { _ : X with Y => + | + |}""") + + @Test def testTraitWithWhile1 = assertPrintedCode(sm""" + |trait Test { + | while (false) + | scala.Predef.println("testing...") + | + |}""") + + @Test def testTraitWithWhile2 = assertPrintedCode(sm""" + |trait Test { + | while (true) + | { + | scala.Predef.println("testing..."); + | scala.Predef.println("testing...") + | } + | + |}""") + + @Test def testTraitWithDoWhile1 = assertPrintedCode(sm""" + |trait Test { + | do + | scala.Predef.println("testing...") + | while (true) + |}""") + + @Test def testTraitWithTypes = assertResultCode( + code = sm""" + |trait Test { + | type A = Int; + | type B >: Nothing <: AnyRef; + | protected type C >: Nothing; + | type D <: AnyRef + |}""")( + parsedCode = sm""" + |trait Test { + | type A = Int; + | type B >: Nothing <: AnyRef; + | protected type C >: Nothing; + | type D <: AnyRef + |}""", + typedCode = sm""" + |trait Test { + | type A = scala.Int; + | type B <: scala.AnyRef; + | protected type C; + | type D <: scala.AnyRef + |}""") +} + +trait ValAndDefPrintTests { + @Test def testVal1 = assertPrintedCode("val a: scala.Unit = ()") + + @Test def testVal2 = assertPrintedCode("val * : scala.Unit = ()") + + @Test def testVal3 = assertPrintedCode("val a_ : scala.Unit = ()") + + @Test def testDef1 = assertPrintedCode("def a = ()") + + @Test def testDef2 = assertPrintedCode("def * : scala.Unit = ()") + + @Test def testDef3 = assertPrintedCode("def a_(x: scala.Int): scala.Unit = ()") + + @Test def testDef4 = assertPrintedCode("def a_ : scala.Unit = ()") + + @Test def testDef5 = assertPrintedCode("def a_(* : scala.Int): scala.Unit = ()") + + @Test def testDef6 = assertPrintedCode("def a_(b_ : scala.Int) = ()") + + @Test def testDef7 = assertTreeCode{ + Block( + DefDef(NoMods, newTermName("test1"), Nil, Nil, EmptyTree, Literal(Constant(()))), + DefDef(NoMods, newTermName("test2"), Nil, Nil :: Nil, EmptyTree, Literal(Constant(()))) + ) + }(sm""" + |{ + | def test1 = (); + | def test2() = () + |}""") + + @Test def testDef8 = { + val arg = ValDef(Modifiers(Flag.IMPLICIT) , newTermName("a"), + AppliedTypeTree(Ident(newTypeName("R")), List(Ident(newTypeName("X")))), EmptyTree) + + //def m[X](implicit a: R[X]) = () + val tree = DefDef(NoMods, newTermName("test"), TypeDef(NoMods, newTypeName("X"), Nil, EmptyTree) :: Nil, + List(List(arg)), EmptyTree, Literal(Constant(()))) + + assertTreeCode(tree)("def test[X](implicit a: R[X]) = ()") + } + + @Test def testDef9 = assertPrintedCode("def a(x: scala.Int)(implicit z: scala.Double, y: scala.Float): scala.Unit = ()") + + @Test def testDefWithLazyVal1 = assertResultCode( + code = "def a = { lazy val test: Int = 42 }")( + parsedCode = sm""" + |def a = { + | lazy val test: Int = 42; + | () + |} + """, + typedCode = sm""" + |def a = { + | lazy val test: scala.Int = 42; + | () + |}""") + + @Test def testDefWithLazyVal2 = assertPrintedCode(sm""" + |def a = { + | lazy val test: Unit = { + | scala.Predef.println(); + | scala.Predef.println() + | }; + | () + |}""") + + @Test def testDefWithParams1 = assertPrintedCode("def foo(x: scala.Int*) = ()") + + @Test def testDefWithParams2 = assertPrintedCode(sm""" + |{ + | def foo(x: scala.Int)(y: scala.Int = 1) = (); + | () + |}""") + + @Test def testDefWithTypeParams1 = assertPrintedCode(sm""" + |{ + | def foo[A, B, C](x: A)(y: scala.Int = 1): C = ().asInstanceOf[C]; + | () + |}""") + + @Test def testDefWithTypeParams2 = assertPrintedCode("def foo[A, B <: scala.AnyVal] = ()") + + @Test def testDefWithAnn1 = assertPrintedCode("@annot def foo = null", checkTypedTree = false) + + @Test def testDefWithAnn2 = assertPrintedCode("@a(x) def foo = null", checkTypedTree = false) + + @Test def testDefWithAnn3 = assertPrintedCode("@Foo[A, B] def foo = null", checkTypedTree = false) + + @Test def testDefWithAnn4 = assertPrintedCode("@Foo(a)(b)(x, y) def foo = null", checkTypedTree = false) + + @Test def testDefWithAnn5 = assertPrintedCode("@Foo[A, B](a)(b) @Bar def foo(x: Int) = null", checkTypedTree = false) + + @Test def testDefWithAnn6 = assertPrintedCode("@test1(new test2()) def foo = 42", checkTypedTree = false) + + @Test def testDefWithAnn7 = assertPrintedCode("@`t*` def foo = 42", checkTypedTree = false) + + @Test def testDefWithAnn8 = assertPrintedCode("@throws(classOf[Exception]) def foo = throw new Exception()", checkTypedTree = false) + + @Test def testAnnotated1 = assertResultCode( + code = "def foo = 42: @baz")( + parsedCode = "def foo = 42: @baz", + typedCode = "def foo = (42: @baz)", + wrap = true) + + @Test def testAnnotated2 = assertResultCode( + code = "def foo = 42: @foo2[A1, B1](4)(2)")( + parsedCode = "def foo = 42: @foo2[A1, B1](4)(2)", + typedCode = "def foo = (42: @foo2[A1, B1](4)(2))", + wrap = true) + + @Test def testAnnotated3 = assertResultCode( + code = "def foo = (42: @foo1[A1, B1]): @foo2[A1, B1](4)(2)")( + parsedCode = "def foo = (42: @foo1[A1, B1]): @foo2[A1, B1](4)(2)", + typedCode = "def foo = ((42: @foo1[A1, B1]): @foo2[A1, B1](4)(2))", + wrap = true) + + @Test def testAnnotated4 = assertResultCode( + code = "def foo = 42: @foo3[A1, B1](4)(2.0F, new foo1[A1, B1]())")( + parsedCode = "def foo = 42: @foo3[A1, B1](4)(2.0F, new foo1[A1, B1]())", + typedCode = "def foo = (42: @foo3[A1, B1](4)(2.0F, new foo1[A1, B1]()))", + wrap = true) + + @Test def testAnnotated5 = assertPrintedCode(sm""" + |{ + | val x = 5; + | (x: @unchecked) match { + | case ((_): scala.Int) => true + | case _ => false + | } + |}""") + + @Test def testAnnotated8 = assertPrintedCode(sm""" + |{ + | val x = 5; + | ((x: @unchecked): @foo3(4)(2.0F, new foo1[A1, B1]())) match { + | case _ => true + | } + |}""", wrapCode = true) +} + +trait PackagePrintTests { + @Test def testPackage1 = assertPrintedCode(sm""" + |package foo.bar { + | + |}""", checkTypedTree = false) + + @Test def testPackage2 = assertPrintedCode(sm""" + |package foo { + | class C + | + | object D + |}""", checkTypedTree = false) + + //package object foo extends a with b + @Test def testPackage3 = assertPrintedCode(sm""" + |package foo { + | object `package` extends a with b + |}""", checkTypedTree = false) + + //package object foo { def foo; val x = 1 } + @Test def testPackage4 = assertPrintedCode(sm""" + |package foo { + | object `package` { + | def foo: scala.Unit = (); + | val x = 1 + | } + |}""", checkTypedTree = false) + + //package object foo extends { val x = 1; type I = Int } with Any + @Test def testPackage5 = assertPrintedCode(sm""" + |package foo { + | object `package` extends { + | val x = 1; + | type I = Int + | } with AnyRef + |}""", checkTypedTree = false) +} + +trait QuasiTreesPrintTests { + @Test def testQuasiIdent = assertTreeCode(q"*")("*") + + @Test def testQuasiVal = assertTreeCode(q"val * : Unit = null")("val * : Unit = null") + + @Test def testQuasiDef = assertTreeCode(q"def * : Unit = null")("def * : Unit = null") + + @Test def testQuasiTrait = assertTreeCode(q"trait *")("trait *") + + @Test def testQuasiClass = assertTreeCode(q"class *")("class *") + + @Test def testQuasiClassWithPublicParams = assertTreeCode(q"class X(val x: Int, val s:String)")("class X(val x: Int, val s: String)") + + @Test def testQuasiClassWithParams = assertTreeCode(q"class X(x: Int, s:String)")("class X(x: Int, s: String)") + + @Test def testQuasiObject = assertTreeCode(q"object *")("object *") + + @Test def testQuasiObjectWithBody = assertTreeCode(q"""object X{ def y = "test" }""")(sm""" + |object X { + | def y = "test" + |}""") + + @Test def testQuasiClassWithBody = assertTreeCode(q"""class X{ def y = "test" }""")(sm""" + |class X { + | def y = "test" + |}""") + + @Test def testQuasiTraitWithBody = assertTreeCode(q"""trait X{ def y = "test" }""")(sm""" + |trait X { + | def y = "test" + |}""") + + @Test def testQuasiTraitWithSelfTypeAndBody = assertTreeCode(q"""trait X{ self: Order => def y = "test" }""")(sm""" + |trait X { self: Order => + | def y = "test" + |}""") + + @Test def testQuasiTraitWithSelf = assertTreeCode(q"""trait X{ self => def y = "test" }""")(sm""" + |trait X { self => + | def y = "test" + |}""") + + @Test def testQuasiCaseClassWithBody = assertTreeCode(q"""case class X() { def y = "test" }""")(sm""" + |case class X() { + | def y = "test" + |}""") + + @Test def testQuasiCaseClassWithParamsAndBody = assertTreeCode(q"""case class X(x: Int, s: String){ def y = "test" }""")(sm""" + |case class X(x: Int, s: String) { + | def y = "test" + |}""") + + @Test def testQuasiCaseClassWithTypes1 = assertTreeCode(q"""case class X(x: ${typeOf[Int]}, s: ${typeOf[String]}){ def y = "test" }""")(sm""" + |case class X(x: Int, s: String) { + | def y = "test" + |}""") + + @Test def testQuasiCaseClassWithTypes2 = assertTreeCode(q"""case class X(x: ${typeOf[Int]}, s: ${typeOf[String]}){ def y = "test" }""", typecheck = true)(sm""" + |{ + | case class X(x: Int, s: String) { + | def y = "test" + | }; + | () + |}""") +} diff --git a/test/junit/scala/reflect/internal/ScopeTest.scala b/test/junit/scala/reflect/internal/ScopeTest.scala new file mode 100644 index 000000000000..1ab24facac98 --- /dev/null +++ b/test/junit/scala/reflect/internal/ScopeTest.scala @@ -0,0 +1,54 @@ +package scala.reflect.internal + +import scala.tools.nsc.symtab + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.AssertUtil.assertThrows +import scala.tools.nsc.symtab.SymbolTableForUnitTesting + +@RunWith(classOf[JUnit4]) +class ScopeTest { + object symbolTable extends SymbolTableForUnitTesting + + import symbolTable._ + + @Test + def testNestedScopeSmall(): Unit = testNestedScope(0) + @Test + def testNestedScopeLarge(): Unit = testNestedScope(64) // exceeding MIN_HASH + + private def testNestedScope(initSize: Int) { + def sym(termName: String): Symbol = NoSymbol.newValue(TermName(termName)) + val foo = sym("foo") + val bar = sym("bar") + + val outerElems = List.tabulate(initSize)(i => sym(i.toString)) + val outer = newScopeWith(outerElems ++ List(foo, bar) : _*) + assertTrue(outer.containsName(foo.name)) + assertTrue(outer.containsName(bar.name)) + + val baz = sym("baz") + val nested = newNestedScope(outer) + + // Entries from the outer scope are entered in the nested. + assertTrue(outer.containsName(foo.name)) + assertTrue(outer.containsName(bar.name)) + + // Nested scopes structurally share ScopeEntry-s with the outer. + assertSame(outer.lookupEntry(foo.name), nested.lookupEntry(foo.name)) + nested.enter(baz) + + // Symbols entered in the nested scope aren't visible in the outer. + assertTrue(nested.containsName(baz.name)) + assertTrue(!outer.containsName(baz.name)) + + // Unlinking a symbol in the inner scope doesn't modify the outer + nested.unlink(bar) + assert(!nested.containsName(bar.name)) + assert(outer.containsName(bar.name)) + } +} diff --git a/test/junit/scala/reflect/internal/TypesTest.scala b/test/junit/scala/reflect/internal/TypesTest.scala new file mode 100644 index 000000000000..95194ef0a453 --- /dev/null +++ b/test/junit/scala/reflect/internal/TypesTest.scala @@ -0,0 +1,35 @@ +package scala.reflect.internal + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import scala.tools.nsc.symtab.SymbolTableForUnitTesting + +@RunWith(classOf[JUnit4]) +class TypesTest { + + object symbolTable extends SymbolTableForUnitTesting + import symbolTable._, definitions._ + + @Test + def testRefinedTypeSI8611(): Unit = { + def stringNarrowed = StringTpe.narrow + assert(stringNarrowed != stringNarrowed) + assert(!(stringNarrowed =:= stringNarrowed)) + + def boolWithString = refinedType(BooleanTpe :: StringTpe :: Nil, NoSymbol) + assert(boolWithString != boolWithString) + assert(boolWithString =:= boolWithString) + + val boolWithString1 = boolWithString + val boolWithString1narrow1 = boolWithString1.narrow + val boolWithString1narrow2 = boolWithString1.narrow + // Two narrowings of the same refinement end up =:=. This was the root + // cause of SI-8611. See `narrowUniquely` in `Logic` for the workaround. + assert(boolWithString1narrow1 =:= boolWithString1narrow2) + val uniquelyNarrowed1 = refinedType(boolWithString1narrow1 :: Nil, NoSymbol) + val uniquelyNarrowed2 = refinedType(boolWithString1narrow2 :: Nil, NoSymbol) + assert(uniquelyNarrowed1 =:= uniquelyNarrowed2) + } +} diff --git a/test/junit/scala/reflect/internal/util/AbstractFileClassLoaderTest.scala b/test/junit/scala/reflect/internal/util/AbstractFileClassLoaderTest.scala new file mode 100644 index 000000000000..a2537ddab7e7 --- /dev/null +++ b/test/junit/scala/reflect/internal/util/AbstractFileClassLoaderTest.scala @@ -0,0 +1,138 @@ +package scala.reflect.internal.util + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class AbstractFileClassLoaderTest { + + import scala.reflect.io._ + import scala.io.Source + import scala.io.Codec.UTF8 + import scala.reflect.io.Streamable + import java.net.{ URLClassLoader, URL } + + implicit def `we love utf8` = UTF8 + implicit class `abs file ops`(f: AbstractFile) { + def writeContent(s: String): Unit = Streamable.closing(f.bufferedOutput)(os => os write s.getBytes(UTF8.charSet)) + } + implicit class `url slurp`(url: URL) { + def slurp(): String = Streamable.slurp(url) + } + + val NoClassLoader: ClassLoader = null + + def fuzzBuzzBooz: (AbstractFile, AbstractFile) = { + val fuzz = new VirtualDirectory("fuzz", None) + val buzz = fuzz subdirectoryNamed "buzz" + val booz = buzz fileNamed "booz.class" + (fuzz, booz) + } + + @Test + def afclGetsParent(): Unit = { + val p = new URLClassLoader(Array.empty[URL]) + val d = new VirtualDirectory("vd", None) + val x = new AbstractFileClassLoader(d, p) + assertSame(p, x.getParent) + } + + @Test + def afclGetsResource(): Unit = { + val (fuzz, booz) = fuzzBuzzBooz + booz writeContent "hello, world" + val x = new AbstractFileClassLoader(fuzz, NoClassLoader) + val r = x.getResource("buzz/booz.class") + assertNotNull(r) + assertEquals("hello, world", r.slurp()) + } + + @Test + def afclGetsResourceFromParent(): Unit = { + val (fuzz, booz) = fuzzBuzzBooz + val (fuzz_, booz_) = fuzzBuzzBooz + booz writeContent "hello, world" + booz_ writeContent "hello, world_" + val p = new AbstractFileClassLoader(fuzz, NoClassLoader) + val x = new AbstractFileClassLoader(fuzz_, p) + val r = x.getResource("buzz/booz.class") + assertNotNull(r) + assertEquals("hello, world", r.slurp()) + } + + @Test + def afclGetsResourceInDefaultPackage(): Unit = { + val fuzz = new VirtualDirectory("fuzz", None) + val booz = fuzz fileNamed "booz.class" + val bass = fuzz fileNamed "bass" + booz writeContent "hello, world" + bass writeContent "lo tone" + val x = new AbstractFileClassLoader(fuzz, NoClassLoader) + val r = x.getResource("booz.class") + assertNotNull(r) + assertEquals("hello, world", r.slurp()) + assertEquals("lo tone", (x getResource "bass").slurp()) + } + + // SI-8843 + @Test + def afclGetsResources(): Unit = { + val (fuzz, booz) = fuzzBuzzBooz + booz writeContent "hello, world" + val x = new AbstractFileClassLoader(fuzz, NoClassLoader) + val e = x.getResources("buzz/booz.class") + assertTrue(e.hasMoreElements) + assertEquals("hello, world", e.nextElement.slurp()) + assertFalse(e.hasMoreElements) + } + + @Test + def afclGetsResourcesFromParent(): Unit = { + val (fuzz, booz) = fuzzBuzzBooz + val (fuzz_, booz_) = fuzzBuzzBooz + booz writeContent "hello, world" + booz_ writeContent "hello, world_" + val p = new AbstractFileClassLoader(fuzz, NoClassLoader) + val x = new AbstractFileClassLoader(fuzz_, p) + val e = x.getResources("buzz/booz.class") + assertTrue(e.hasMoreElements) + assertEquals("hello, world", e.nextElement.slurp()) + assertTrue(e.hasMoreElements) + assertEquals("hello, world_", e.nextElement.slurp()) + assertFalse(e.hasMoreElements) + } + + @Test + def afclGetsResourceAsStream(): Unit = { + val (fuzz, booz) = fuzzBuzzBooz + booz writeContent "hello, world" + val x = new AbstractFileClassLoader(fuzz, NoClassLoader) + val r = x.getResourceAsStream("buzz/booz.class") + assertNotNull(r) + assertEquals("hello, world", Streamable.closing(r)(is => Source.fromInputStream(is).mkString)) + } + + @Test + def afclGetsClassBytes(): Unit = { + val (fuzz, booz) = fuzzBuzzBooz + booz writeContent "hello, world" + val x = new AbstractFileClassLoader(fuzz, NoClassLoader) + val b = x.classBytes("buzz/booz.class") + assertEquals("hello, world", new String(b, UTF8.charSet)) + } + + @Test + def afclGetsClassBytesFromParent(): Unit = { + val (fuzz, booz) = fuzzBuzzBooz + val (fuzz_, booz_) = fuzzBuzzBooz + booz writeContent "hello, world" + booz_ writeContent "hello, world_" + + val p = new AbstractFileClassLoader(fuzz, NoClassLoader) + val x = new AbstractFileClassLoader(fuzz_, p) + val b = x.classBytes("buzz/booz.class") + assertEquals("hello, world", new String(b, UTF8.charSet)) + } +} diff --git a/test/junit/scala/reflect/internal/util/SourceFileTest.scala b/test/junit/scala/reflect/internal/util/SourceFileTest.scala index 903e705ba271..cad23eba1454 100644 --- a/test/junit/scala/reflect/internal/util/SourceFileTest.scala +++ b/test/junit/scala/reflect/internal/util/SourceFileTest.scala @@ -17,6 +17,11 @@ class SourceFileTest { assertFalse(file.isEndOfLine(Int.MaxValue)) } + @Test def si8630_lineToString(): Unit = { + val code = "abc " + assertEquals(code, new BatchSourceFile("", code).lineToString(0)) + } + @Test def si8205_lineToString(): Unit = { assertEquals("", lineContentOf("", 0)) diff --git a/test/junit/scala/sys/process/t7350.scala b/test/junit/scala/sys/process/t7350.scala new file mode 100644 index 000000000000..7f3e8897f2f2 --- /dev/null +++ b/test/junit/scala/sys/process/t7350.scala @@ -0,0 +1,298 @@ + +package scala.sys.process + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import java.io.{InputStream, OutputStream, PipedInputStream, PipedOutputStream, ByteArrayInputStream, + ByteArrayOutputStream, IOException, Closeable} +import java.lang.reflect.InvocationTargetException +import scala.concurrent.{Await, Future} +import scala.concurrent.duration.{Duration, SECONDS} +import scala.concurrent.ExecutionContext.Implicits.global +import scala.util.control.Exception.ignoring + +// Each test normally ends in a moment, but for failure cases, waits until one second. + +@RunWith(classOf[JUnit4]) +class PipedProcessTest { + class ProcessMock(error: Boolean) extends Process { + var destroyCount = 0 + def exitValue(): Int = { + if (error) { + throw new InterruptedException() + } + 0 + } + def destroy(): Unit = { destroyCount += 1 } + } + + class ProcessBuilderMock(process: Process, error: Boolean) extends ProcessBuilder.AbstractBuilder { + override def run(io: ProcessIO): Process = { + if (error) { + throw new IOException() + } + process + } + } + + class PipeSinkMock extends Process.PipeSink("PipeSinkMock") { + var releaseCount = 0 + override val pipe = null + override val sink = null + override def run(): Unit = {} + override def connectOut(out: OutputStream): Unit = {} + override def connectIn(pipeOut: PipedOutputStream): Unit = {} + override def release(): Unit = { releaseCount += 1 } + } + + class PipeSourceMock extends Process.PipeSource("PipeSourceMock") { + var releaseCount = 0 + override val pipe = null + override val source = null + override def run(): Unit = {} + override def connectIn(in: InputStream): Unit = {} + override def connectOut(sink: Process.PipeSink): Unit = {} + override def release(): Unit = { releaseCount += 1 } + } + + class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean) + extends Process.PipedProcesses(a, b, defaultIO, toError) { + def callRunAndExitValue(source: Process.PipeSource, sink: Process.PipeSink) = { + val m = classOf[Process.PipedProcesses].getDeclaredMethod("runAndExitValue", classOf[Process.PipeSource], classOf[Process.PipeSink]) + m.setAccessible(true) + try m.invoke(this, source, sink).asInstanceOf[Option[Int]] + catch { + case err: InvocationTargetException => throw err.getTargetException + } + } + } + + // PipedProcesses need not to release resources when it normally end + @Test + def normallyEnd() { + val io = BasicIO(false, ProcessLogger(_ => ())) + val source = new PipeSourceMock + val sink = new PipeSinkMock + val a = new ProcessMock(error = false) + val b = new ProcessMock(error = false) + val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = false), io, false) + val f = Future { + p.callRunAndExitValue(source, sink) + } + Await.result(f, Duration(1, SECONDS)) + assert(source.releaseCount == 0) + assert(sink.releaseCount == 0) + assert(a.destroyCount == 0) + assert(b.destroyCount == 0) + } + + // PipedProcesses must release resources when b.run() failed + @Test + def bFailed() { + val io = BasicIO(false, ProcessLogger(_ => ())) + val source = new PipeSourceMock + val sink = new PipeSinkMock + val a = new ProcessMock(error = false) + val b = new ProcessMock(error = false) + val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = true), io, false) + val f = Future { + ignoring(classOf[IOException]) { + p.callRunAndExitValue(source, sink) + } + } + Await.result(f, Duration(1, SECONDS)) + assert(source.releaseCount == 1) + assert(sink.releaseCount == 1) + assert(a.destroyCount == 0) + assert(b.destroyCount == 0) + } + + // PipedProcesses must release resources when a.run() failed + @Test + def aFailed() { + val io = BasicIO(false, ProcessLogger(_ => ())) + val source = new PipeSourceMock + val sink = new PipeSinkMock + val a = new ProcessMock(error = false) + val b = new ProcessMock(error = false) + val p = new PipedProcesses(new ProcessBuilderMock(a, error = true), new ProcessBuilderMock(b, error = false), io, false) + val f = Future { + ignoring(classOf[IOException]) { + p.callRunAndExitValue(source, sink) + } + } + Await.result(f, Duration(1, SECONDS)) + assert(source.releaseCount == 1) + assert(sink.releaseCount == 1) + assert(a.destroyCount == 0) + assert(b.destroyCount == 1) + } + + // PipedProcesses must release resources when interrupted during waiting for first.exitValue() + @Test + def firstInterrupted() { + val io = BasicIO(false, ProcessLogger(_ => ())) + val source = new PipeSourceMock + val sink = new PipeSinkMock + val a = new ProcessMock(error = true) + val b = new ProcessMock(error = false) + val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = false), io, false) + val f = Future { + p.callRunAndExitValue(source, sink) + } + Await.result(f, Duration(1, SECONDS)) + assert(source.releaseCount == 1) + assert(sink.releaseCount == 1) + assert(a.destroyCount == 1) + assert(b.destroyCount == 1) + } + + // PipedProcesses must release resources when interrupted during waiting for second.exitValue() + @Test + def secondInterrupted() { + val io = BasicIO(false, ProcessLogger(_ => ())) + val source = new PipeSourceMock + val sink = new PipeSinkMock + val a = new ProcessMock(error = false) + val b = new ProcessMock(error = true) + val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = false), io, false) + val f = Future { + p.callRunAndExitValue(source, sink) + } + Await.result(f, Duration(1, SECONDS)) + assert(source.releaseCount == 1) + assert(sink.releaseCount == 1) + assert(a.destroyCount == 1) + assert(b.destroyCount == 1) + } +} + +@RunWith(classOf[JUnit4]) +class PipeSourceSinkTest { + def throwsIOException(f: => Unit) = { + try { f; false } + catch { case _: IOException => true } + } + + class PipeSink extends Process.PipeSink("TestPipeSink") { + def ensureRunloopStarted() = { + while (sink.size() > 0) { + Thread.sleep(1) + } + } + def isReleased = { + val field = classOf[Process.PipeSink].getDeclaredField("pipe") + field.setAccessible(true) + val pipe = field.get(this).asInstanceOf[PipedInputStream] + !this.isAlive && throwsIOException { pipe.read() } + } + } + + class PipeSource extends Process.PipeSource("TestPipeSource") { + def ensureRunloopStarted() = { + while (source.size() > 0) { + Thread.sleep(1) + } + } + def isReleased = { + val field = classOf[Process.PipeSource].getDeclaredField("pipe") + field.setAccessible(true) + val pipe = field.get(this).asInstanceOf[PipedOutputStream] + !this.isAlive && throwsIOException { pipe.write(1) } + } + } + + trait CloseChecking extends Closeable { + var closed = false + override def close() = closed = true + } + class DebugOutputStream extends ByteArrayOutputStream with CloseChecking + class DebugInputStream(s: String) extends ByteArrayInputStream(s.getBytes()) with CloseChecking + class DebugInfinityInputStream extends InputStream with CloseChecking { + def read() = 1 + } + + def sourceSink() = { + val source = new PipeSource + val sink = new PipeSink + source connectOut sink + source.start() + sink.start() + (source, sink) + } + + // PipeSource and PipeSink must release resources when it normally end + @Test + def normallyEnd() { + val in = new DebugInputStream("aaa") + val (source, sink) = sourceSink() + val out = new DebugOutputStream + source connectIn in + sink connectOut out + val f = Future { + source.join() + sink.join() + } + Await.result(f, Duration(1, SECONDS)) + assert(in.closed == true) + assert(out.closed == true) + assert(source.isReleased == true) + assert(sink.isReleased == true) + } + + // PipeSource and PipeSink must release resources when interrupted during waiting for source.take() + @Test + def sourceInterrupted() { + val (source, sink) = sourceSink() + val out = new DebugOutputStream + sink connectOut out + val f = Future { + sink.ensureRunloopStarted() + source.release() + sink.release() + } + Await.result(f, Duration(1, SECONDS)) + assert(out.closed == true) + assert(source.isReleased == true) + assert(sink.isReleased == true) + } + + // PipeSource and PipeSink must release resources when interrupted during waiting for sink.take() + @Test + def sinkInterrupted() { + val in = new DebugInputStream("aaa") + val (source, sink) = sourceSink() + source connectIn in + val f = Future { + source.ensureRunloopStarted() + source.release() + sink.release() + } + Await.result(f, Duration(1, SECONDS)) + assert(in.closed == true) + assert(source.isReleased == true) + assert(sink.isReleased == true) + } + + // PipeSource and PipeSink must release resources when interrupted during copy streams" + @Test + def runloopInterrupted() { + val in = new DebugInfinityInputStream + val (source, sink) = sourceSink() + val out = new DebugOutputStream + source connectIn in + sink connectOut out + val f = Future { + source.ensureRunloopStarted() + sink.ensureRunloopStarted() + source.release() + sink.release() + } + Await.result(f, Duration(1, SECONDS)) + assert(in.closed == true) + assert(out.closed == true) + assert(source.isReleased == true) + assert(sink.isReleased == true) + } +} diff --git a/test/junit/scala/tools/nsc/SampleTest.scala b/test/junit/scala/tools/nsc/SampleTest.scala index 8e026da1eabb..60bb09e98f02 100644 --- a/test/junit/scala/tools/nsc/SampleTest.scala +++ b/test/junit/scala/tools/nsc/SampleTest.scala @@ -1,5 +1,4 @@ package scala.tools.nsc -package test import org.junit.Assert._ import org.junit.Test @@ -12,6 +11,6 @@ import org.junit.runners.JUnit4 class SampleTest { @Test def testMath: Unit = { - assert(2+2 == 4, "you didn't get the math right fellow") + assertTrue("you didn't get the math right fellow", 2 + 2 == 4) } } diff --git a/test/junit/scala/tools/nsc/ScriptRunnerTest.scala b/test/junit/scala/tools/nsc/ScriptRunnerTest.scala new file mode 100644 index 000000000000..9bae7a0487f2 --- /dev/null +++ b/test/junit/scala/tools/nsc/ScriptRunnerTest.scala @@ -0,0 +1,23 @@ +package scala.tools.nsc + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class ScriptRunnerTest { + @Test + def testEmptyScriptSucceeds: Unit = { + val s = new GenericRunnerSettings(s => ()) + s.nc.value = true + s.usejavacp.value = true + + // scala -nc -e '' + assertTrue(ScriptRunner.runCommand(s, "", Nil)) + + // scala -nc -save -e '' + s.save.value = true + assertTrue(ScriptRunner.runCommand(s, "", Nil)) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala new file mode 100644 index 000000000000..2347e8288ea8 --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala @@ -0,0 +1,90 @@ +package scala.tools.nsc +package backend.jvm + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import scala.tools.asm.Opcodes +import org.junit.Assert._ + +@RunWith(classOf[JUnit4]) +class BTypesTest { + val settings = new Settings() + settings.processArgumentString("-usejavacp") + val g: Global = new Global(settings) + val run = new g.Run() // initializes some compiler internals + import g.{definitions => d, Symbol} + + def duringBackend[T](f: => T) = g.exitingDelambdafy(f) + + val btypes = new BTypesFromSymbols[g.type](g) + import btypes._ + duringBackend(btypes.initializeCoreBTypes()) + + def classBTypeFromSymbol(sym: Symbol) = duringBackend(btypes.classBTypeFromSymbol(sym)) + + val jlo = d.ObjectClass + val jls = d.StringClass + + val o = classBTypeFromSymbol(jlo) + val s = classBTypeFromSymbol(jls) + val oArr = ArrayBType(o) + val method = MethodBType(List(oArr, INT, DOUBLE, s), UNIT) + + @Test + def classBTypesEquality() { + val s1 = classBTypeFromSymbol(jls) + val s2 = classBTypeFromSymbol(jls) + val o = classBTypeFromSymbol(jlo) + assertEquals(s1, s2) + assertEquals(s1.hashCode, s2.hashCode) + assert(s1 != o) + assert(s2 != o) + } + + @Test + def typedOpcodes() { + assert(UNIT.typedOpcode(Opcodes.IALOAD) == Opcodes.IALOAD) + assert(INT.typedOpcode(Opcodes.IALOAD) == Opcodes.IALOAD) + assert(BOOL.typedOpcode(Opcodes.IALOAD) == Opcodes.BALOAD) + assert(BYTE.typedOpcode(Opcodes.IALOAD) == Opcodes.BALOAD) + assert(CHAR.typedOpcode(Opcodes.IALOAD) == Opcodes.CALOAD) + assert(SHORT.typedOpcode(Opcodes.IALOAD) == Opcodes.SALOAD) + assert(FLOAT.typedOpcode(Opcodes.IALOAD) == Opcodes.FALOAD) + assert(LONG.typedOpcode(Opcodes.IALOAD) == Opcodes.LALOAD) + assert(DOUBLE.typedOpcode(Opcodes.IALOAD) == Opcodes.DALOAD) + assert(classBTypeFromSymbol(jls).typedOpcode(Opcodes.IALOAD) == Opcodes.AALOAD) + + assert(UNIT.typedOpcode(Opcodes.IRETURN) == Opcodes.RETURN) + assert(BOOL.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN) + assert(CHAR.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN) + assert(BYTE.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN) + assert(SHORT.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN) + assert(INT.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN) + assert(FLOAT.typedOpcode(Opcodes.IRETURN) == Opcodes.FRETURN) + assert(LONG.typedOpcode(Opcodes.IRETURN) == Opcodes.LRETURN) + assert(DOUBLE.typedOpcode(Opcodes.IRETURN) == Opcodes.DRETURN) + assert(classBTypeFromSymbol(jls).typedOpcode(Opcodes.IRETURN) == Opcodes.ARETURN) + } + + @Test + def descriptors() { + assert(o.descriptor == "Ljava/lang/Object;") + assert(s.descriptor == "Ljava/lang/String;") + assert(oArr.descriptor == "[Ljava/lang/Object;") + assert(method.descriptor == "([Ljava/lang/Object;IDLjava/lang/String;)V") + } + + @Test + def toAsmTypeTest() { + for (t <- List(o, s, oArr, method, INT, UNIT, DOUBLE)) { + assertEquals(o.descriptor, o.toASMType.getDescriptor) + } + } + + // TODO @lry do more tests + @Test + def maxTypeTest() { + + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala b/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala new file mode 100644 index 000000000000..c1c5a71b836e --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala @@ -0,0 +1,103 @@ +package scala.tools.nsc.backend.jvm + +import org.junit.Assert._ + +import scala.reflect.internal.util.BatchSourceFile +import scala.reflect.io.VirtualDirectory +import scala.tools.asm.Opcodes +import scala.tools.asm.tree.{AbstractInsnNode, LabelNode, ClassNode, MethodNode} +import scala.tools.cmd.CommandLineParser +import scala.tools.nsc.backend.jvm.opt.LocalOpt +import scala.tools.nsc.settings.{MutableSettings, ScalaSettings} +import scala.tools.nsc.{Settings, Global} +import scala.tools.partest.ASMConverters +import scala.collection.JavaConverters._ + +object CodeGenTools { + import ASMConverters._ + + def genMethod( flags: Int = Opcodes.ACC_PUBLIC, + name: String = "m", + descriptor: String = "()V", + genericSignature: String = null, + throwsExceptions: Array[String] = null, + handlers: List[ExceptionHandler] = Nil, + localVars: List[LocalVariable] = Nil)(body: Instruction*): MethodNode = { + val node = new MethodNode(flags, name, descriptor, genericSignature, throwsExceptions) + applyToMethod(node, Method(body.toList, handlers, localVars)) + node + } + + def wrapInClass(method: MethodNode): ClassNode = { + val cls = new ClassNode() + cls.visit(Opcodes.V1_6, Opcodes.ACC_PUBLIC, "C", null, "java/lang/Object", null) + cls.methods.add(method) + cls + } + + private def resetOutput(compiler: Global): Unit = { + compiler.settings.outputDirs.setSingleOutput(new VirtualDirectory("(memory)", None)) + } + + def newCompiler(defaultArgs: String = "-usejavacp", extraArgs: String = ""): Global = { + val settings = new Settings() + val args = (CommandLineParser tokenize defaultArgs) ++ (CommandLineParser tokenize extraArgs) + settings.processArguments(args, processAll = true) + val compiler = new Global(settings) + resetOutput(compiler) + compiler + } + + def compile(compiler: Global)(code: String): List[(String, Array[Byte])] = { + compiler.reporter.reset() + resetOutput(compiler) + val run = new compiler.Run() + run.compileSources(List(new BatchSourceFile("unitTestSource.scala", code))) + val outDir = compiler.settings.outputDirs.getSingleOutput.get + (for (f <- outDir.iterator if !f.isDirectory) yield (f.name, f.toByteArray)).toList + } + + def compileClasses(compiler: Global)(code: String): List[ClassNode] = { + compile(compiler)(code).map(p => AsmUtils.readClass(p._2)).sortBy(_.name) + } + + def compileMethods(compiler: Global)(code: String): List[MethodNode] = { + compileClasses(compiler)(s"class C { $code }").head.methods.asScala.toList.filterNot(_.name == "") + } + + def singleMethodInstructions(compiler: Global)(code: String): List[Instruction] = { + val List(m) = compileMethods(compiler)(code) + instructionsFromMethod(m) + } + + def singleMethod(compiler: Global)(code: String): Method = { + val List(m) = compileMethods(compiler)(code) + convertMethod(m) + } + + def assertSameCode(actual: List[Instruction], expected: List[Instruction]): Unit = { + assertTrue(s"\nExpected: $expected\nActual : $actual", actual === expected) + } + + def getSingleMethod(classNode: ClassNode, name: String): Method = + convertMethod(classNode.methods.asScala.toList.find(_.name == name).get) + + def assertHandlerLabelPostions(h: ExceptionHandler, instructions: List[Instruction], startIndex: Int, endIndex: Int, handlerIndex: Int): Unit = { + val insVec = instructions.toVector + assertTrue(h.start == insVec(startIndex) && h.end == insVec(endIndex) && h.handler == insVec(handlerIndex)) + } + + val localOpt = { + val settings = new MutableSettings(msg => throw new IllegalArgumentException(msg)) + settings.processArguments(List("-Yopt:l:method"), processAll = true) + new LocalOpt(settings) + } + + import scala.language.implicitConversions + + implicit def aliveInstruction(ins: Instruction): (Instruction, Boolean) = (ins, true) + + implicit class MortalInstruction(val ins: Instruction) extends AnyVal { + def dead: (Instruction, Boolean) = (ins, false) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala new file mode 100644 index 000000000000..89900291cacd --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala @@ -0,0 +1,73 @@ +package scala.tools.nsc.backend.jvm + +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Assert._ +import CodeGenTools._ +import scala.tools.asm.Opcodes._ +import scala.tools.partest.ASMConverters._ + +@RunWith(classOf[JUnit4]) +class DirectCompileTest { + val compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:method") + + @Test + def testCompile(): Unit = { + val List(("C.class", bytes)) = compile(compiler)( + """class C { + | def f = 1 + |} + """.stripMargin) + def s(i: Int, n: Int) = (bytes(i) & 0xff) << n + assertTrue((s(0, 24) | s(1, 16) | s(2, 8) | s(3, 0)) == 0xcafebabe) // mocha java latte machiatto surpreme dark roasted espresso + } + + @Test + def testCompileClasses(): Unit = { + val List(cClass, cModuleClass) = compileClasses(compiler)("class C; object C") + + assertTrue(cClass.name == "C") + assertTrue(cModuleClass.name == "C$") + + val List(dMirror, dModuleClass) = compileClasses(compiler)("object D") + + assertTrue(dMirror.name == "D") + assertTrue(dModuleClass.name == "D$") + } + + @Test + def testCompileMethods(): Unit = { + val List(f, g) = compileMethods(compiler)( + """def f = 10 + |def g = f + """.stripMargin) + assertTrue(f.name == "f") + assertTrue(g.name == "g") + + assertSameCode(instructionsFromMethod(f).dropNonOp, + List(IntOp(BIPUSH, 10), Op(IRETURN))) + + assertSameCode(instructionsFromMethod(g).dropNonOp, + List(VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "f", "()I", itf = false), Op(IRETURN))) + } + + @Test + def testDropNonOpAliveLabels(): Unit = { + // makes sure that dropNoOp doesn't drop labels that are being used + val List(f) = compileMethods(compiler)("""def f(x: Int) = if (x == 0) "a" else "b"""") + assertSameCode(instructionsFromMethod(f).dropLinesFrames, List( + Label(0), + VarOp(ILOAD, 1), + Op(ICONST_0), + Jump(IF_ICMPNE, + Label(7)), + Ldc(LDC, "a"), + Op(ARETURN), + Label(7), + Ldc(LDC, "b"), + Op(ARETURN), + Label(11) + )) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala new file mode 100644 index 000000000000..2975bd060dba --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala @@ -0,0 +1,95 @@ +package scala.tools.nsc +package backend.jvm +package opt + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import scala.tools.asm.Opcodes._ +import org.junit.Assert._ + +import scala.tools.nsc.backend.jvm.BTypes.InternalName +import scala.tools.testing.AssertUtil._ + +import CodeGenTools._ +import scala.tools.partest.ASMConverters +import ASMConverters._ + +import scala.collection.convert.decorateAsScala._ + +@RunWith(classOf[JUnit4]) +class BTypesFromClassfileTest { + val compiler = newCompiler(extraArgs = "-Ybackend:GenBCode") + + import compiler._ + import definitions._ + import genBCode.bTypes + import bTypes._ + + def duringBackend[T](f: => T) = compiler.exitingDelambdafy(f) + + val run = new compiler.Run() // initializes some of the compiler + duringBackend(bTypes.initializeCoreBTypes()) + + def clearCache() = bTypes.classBTypeFromInternalName.clear() + + def sameBType(fromSym: ClassBType, fromClassfile: ClassBType, checked: Set[InternalName] = Set.empty): Set[InternalName] = { + if (checked(fromSym.internalName)) checked + else { + assert(fromSym == fromClassfile, s"$fromSym != $fromClassfile") + sameInfo(fromSym.info, fromClassfile.info, checked + fromSym.internalName) + } + } + + def sameBTypes(fromSyms: Iterable[ClassBType], fromClassfiles: Iterable[ClassBType], checked: Set[InternalName]): Set[InternalName] = { + assert(fromSyms.size == fromClassfiles.size, s"\n$fromSyms\n$fromClassfiles") + (fromSyms, fromClassfiles).zipped.foldLeft(checked) { + case (chk, (fromSym, fromClassfile)) => sameBType(fromSym, fromClassfile, chk) + } + } + + def sameInfo(fromSym: ClassInfo, fromClassfile: ClassInfo, checked: Set[InternalName]): Set[InternalName] = { + assert({ + // Nested class symbols can undergo makeNotPrivate (ExplicitOuter). But this is only applied + // for symbols of class symbols that are being compiled, not those read from a pickle. + // So a class may be public in bytecode, but the symbol still says private. + if (fromSym.nestedInfo.isEmpty) fromSym.flags == fromClassfile.flags + else (fromSym.flags | ACC_PRIVATE | ACC_PUBLIC) == (fromClassfile.flags | ACC_PRIVATE | ACC_PUBLIC) + }, s"class flags differ\n$fromSym\n$fromClassfile") + + val chk1 = sameBTypes(fromSym.superClass, fromClassfile.superClass, checked) + + val chk2 = sameBTypes(fromSym.interfaces, fromClassfile.interfaces, chk1) + + // The fromSym info has only member classes, no local or anonymous. The symbol is read from the + // Scala pickle data and only member classes are created / entered. + // (This is different for symbols that are being compiled, there flatten will enter all local + // and anonymous classes as members of the outer class. But not for unpickled symbols). + // The fromClassfile info has all nested classes, including anonymous and local. So we filter + // them out: member classes are identified by having the `outerName` defined. + val memberClassesFromClassfile = fromClassfile.nestedClasses.filter(_.info.nestedInfo.get.outerName.isDefined) + // Sorting is required: the backend sorts all InnerClass entries by internalName before writing + // them to the classfile (to make it deterministic: the entries are collected in a Set during + // code generation). + val chk3 = sameBTypes(fromSym.nestedClasses.sortBy(_.internalName), memberClassesFromClassfile.sortBy(_.internalName), chk2) + sameBTypes(fromSym.nestedInfo.map(_.enclosingClass), fromClassfile.nestedInfo.map(_.enclosingClass), chk3) + } + + def check(classSym: Symbol): Unit = duringBackend { + clearCache() + val fromSymbol = classBTypeFromSymbol(classSym) + clearCache() + val fromClassfile = bTypes.classBTypeFromParsedClassfile(fromSymbol.internalName) + sameBType(fromSymbol, fromClassfile) + } + + @Test + def compareClassBTypes(): Unit = { + // Note that not only these classes are tested, but also all their parents and all nested + // classes in their InnerClass attributes. + check(ObjectClass) + check(JavaNumberClass) + check(ConsClass) + check(ListModule.moduleClass) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala new file mode 100644 index 000000000000..fc748196d074 --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala @@ -0,0 +1,80 @@ +package scala.tools.nsc +package backend.jvm +package opt + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import scala.tools.asm.Opcodes._ +import org.junit.Assert._ + +import CodeGenTools._ +import scala.tools.partest.ASMConverters +import ASMConverters._ + +@RunWith(classOf[JUnit4]) +class CompactLocalVariablesTest { + + // recurse-unreachable-jumps is required for eliminating catch blocks, in the first dce round they + // are still live.only after eliminating the empty handler the catch blocks become unreachable. + val methodOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code,recurse-unreachable-jumps,compact-locals") + val noCompactVarsCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code,recurse-unreachable-jumps") + + @Test + def compactUnused(): Unit = { + val code = + """def f: Double = { + | try { } + | catch { + | case _: Throwable => + | // eliminated by dce + | val i = 1 + | val d = 1d + | val f = 1f + | val l = 1l + | } + | + | val i = 1 // variable index 1 (it's an instance method, so at index 0 we have `this`) + | val d = 1d // 2,3 + | val f = 1f // 4 + | val l = 1l // 5,6 + | + | try { } + | catch { + | case _: Throwable => + | // eliminated by dce + | val i = 1 + | val d = 1d + | val f = 1f + | val l = 1l + | } + | + | val ii = 1 // 7 + | val dd = 1d // 8,9 + | val ff = 1f // 10 + | val ll = 1l // 11,12 + | + | i + ii + d + dd + f + ff + l + ll + |} + |""".stripMargin + + val List(noCompact) = compileMethods(noCompactVarsCompiler)(code) + val List(withCompact) = compileMethods(methodOptCompiler)(code) + + // code is the same, except for local var indices + assertTrue(noCompact.instructions.size == withCompact.instructions.size) + + val varOpSlots = convertMethod(withCompact).instructions collect { + case VarOp(_, v) => v + } + assertTrue(varOpSlots.toString, varOpSlots == List(1, 2, 4, 5, 7, 8, 10, 11, // stores + 1, 7, 2, 8, 4, 10, 5, 11)) // loads + + // the local variables descriptor table is cleaned up to remove stale entries after dce, + // also when the slots are not compacted + assertTrue(noCompact.localVariables.size == withCompact.localVariables.size) + + assertTrue(noCompact.maxLocals == 25) + assertTrue(withCompact.maxLocals == 13) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala new file mode 100644 index 000000000000..7d83c54b5b2f --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala @@ -0,0 +1,88 @@ +package scala.tools.nsc +package backend.jvm +package opt + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import scala.tools.asm.Opcodes._ +import org.junit.Assert._ + +import CodeGenTools._ +import scala.tools.partest.ASMConverters +import ASMConverters._ + +@RunWith(classOf[JUnit4]) +class EmptyExceptionHandlersTest { + + val exceptionDescriptor = "java/lang/Exception" + + @Test + def eliminateEmpty(): Unit = { + val handlers = List(ExceptionHandler(Label(1), Label(2), Label(2), Some(exceptionDescriptor))) + val asmMethod = genMethod(handlers = handlers)( + Label(1), + Label(2), + Op(RETURN) + ) + assertTrue(convertMethod(asmMethod).handlers.length == 1) + localOpt.removeEmptyExceptionHandlers(asmMethod) + assertTrue(convertMethod(asmMethod).handlers.isEmpty) + } + + @Test + def eliminateHandlersGuardingNops(): Unit = { + val handlers = List(ExceptionHandler(Label(1), Label(2), Label(2), Some(exceptionDescriptor))) + val asmMethod = genMethod(handlers = handlers)( + Label(1), // nops only + Jump(GOTO, Label(3)), + Label(3), + Jump(GOTO, Label(4)), + + Label(2), // handler + Op(ACONST_NULL), + Op(ATHROW), + + Label(4), // return + Op(RETURN) + ) + assertTrue(convertMethod(asmMethod).handlers.length == 1) + localOpt.removeEmptyExceptionHandlers(asmMethod) + assertTrue(convertMethod(asmMethod).handlers.isEmpty) + } + + val noOptCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none") + val dceCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:unreachable-code") + + @Test + def eliminateUnreachableHandler(): Unit = { + val code = "def f: Unit = try { } catch { case _: Exception => println(0) }; println(1)" + + assertTrue(singleMethod(noOptCompiler)(code).handlers.length == 1) + val optMethod = singleMethod(dceCompiler)(code) + assertTrue(optMethod.handlers.isEmpty) + + val code2 = + """def f: Unit = { + | println(0) + | return + | try { throw new Exception("") } // removed by dce, so handler will be removed as well + | catch { case _: Exception => println(1) } + | println(2) + |}""".stripMargin + + assertTrue(singleMethod(dceCompiler)(code2).handlers.isEmpty) + } + + @Test + def keepAliveHandlers(): Unit = { + val code = + """def f: Int = { + | println(0) + | try { 1 } + | catch { case _: Exception => 2 } + |}""".stripMargin + + assertTrue(singleMethod(dceCompiler)(code).handlers.length == 1) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala new file mode 100644 index 000000000000..8c0168826ece --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala @@ -0,0 +1,99 @@ +package scala.tools.nsc +package backend.jvm +package opt + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import scala.tools.asm.Opcodes._ +import org.junit.Assert._ +import scala.tools.testing.AssertUtil._ + +import CodeGenTools._ +import scala.tools.partest.ASMConverters +import ASMConverters._ + +@RunWith(classOf[JUnit4]) +class EmptyLabelsAndLineNumbersTest { + @Test + def removeEmptyLineNumbers(): Unit = { + val ops = List[(Instruction, Boolean)]( + Label(1), + LineNumber(1, Label(1)), + Label(2), + Label(3), + Op(RETURN), + + Label(4), + LineNumber(4, Label(4)).dead, + LineNumber(5, Label(4)), + Op(RETURN), + + Label(5), + LineNumber(6, Label(5)).dead, + Label(6), + Label(7), + LineNumber(7, Label(7)), + Op(RETURN), + + Label(9), + LineNumber(8, Label(9)).dead, + Label(10) + ) + + val method = genMethod()(ops.map(_._1): _*) + assertTrue(localOpt.removeEmptyLineNumbers(method)) + assertSameCode(instructionsFromMethod(method), ops.filter(_._2).map(_._1)) + } + + @Test + def badlyLocatedLineNumbers(): Unit = { + def t(ops: Instruction*) = + assertThrows[AssertionError](localOpt.removeEmptyLineNumbers(genMethod()(ops: _*))) + + // line numbers have to be right after their referenced label node + t(LineNumber(0, Label(1)), Label(1)) + t(Label(0), Label(1), LineNumber(0, Label(0))) + } + + @Test + def removeEmptyLabels(): Unit = { + val handler = List(ExceptionHandler(Label(4), Label(5), Label(6), Some("java/lang/Throwable"))) + def ops(target1: Int, target2: Int, target3: Int, target4: Int, target5: Int, target6: Int) = List[(Instruction, Boolean)]( + Label(1), + Label(2).dead, + Label(3).dead, + LineNumber(3, Label(target1)), + VarOp(ILOAD, 1), + Jump(IFGE, Label(target2)), + + Label(4), + Label(5).dead, + Label(6).dead, + VarOp(ILOAD, 2), + Jump(IFGE, Label(target3)), + + Label(7), + Label(8).dead, + Label(9).dead, + Op(RETURN), + + LookupSwitch(LOOKUPSWITCH, Label(target4), List(1,2), List(Label(target4), Label(target5))), + TableSwitch(TABLESWITCH, 1, 2, Label(target4), List(Label(target4), Label(target5))), + + Label(10), + LineNumber(10, Label(10)), + Label(11).dead, + LineNumber(12, Label(target6)) + ) + + val method = genMethod(handlers = handler)(ops(2, 3, 8, 8, 9, 11).map(_._1): _*) + assertTrue(localOpt.removeEmptyLabelNodes(method)) + val m = convertMethod(method) + assertSameCode(m.instructions, ops(1, 1, 7, 7, 7, 10).filter(_._2).map(_._1)) + assertTrue(m.handlers match { + case List(ExceptionHandler(Label(4), Label(4), Label(4), _)) => true + case _ => false + }) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala new file mode 100644 index 000000000000..5430e33d6cd1 --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala @@ -0,0 +1,83 @@ +package scala.tools.nsc +package backend.jvm +package opt + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import scala.tools.asm.Opcodes._ +import org.junit.Assert._ + +import scala.tools.testing.AssertUtil._ + +import CodeGenTools._ +import scala.tools.partest.ASMConverters +import ASMConverters._ + +@RunWith(classOf[JUnit4]) +class MethodLevelOpts { + val methodOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:method") + + def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1)) + + @Test + def eliminateEmptyTry(): Unit = { + val code = "def f = { try {} catch { case _: Throwable => 0; () }; 1 }" + assertSameCode(singleMethodInstructions(methodOptCompiler)(code), wrapInDefault(Op(ICONST_1), Op(IRETURN))) + } + + @Test + def cannotEliminateLoadBoxedUnit(): Unit = { + // the compiler inserts a boxed into the try block. it's therefore non-empty (and live) and not eliminated. + val code = "def f = { try {} catch { case _: Throwable => 0 }; 1 }" + val m = singleMethod(methodOptCompiler)(code) + assertTrue(m.handlers.length == 1) + assertSameCode(m.instructions.take(3), List(Label(0), LineNumber(1, Label(0)), Field(GETSTATIC, "scala/runtime/BoxedUnit", "UNIT", "Lscala/runtime/BoxedUnit;"))) + } + + @Test + def inlineThrowInCatchNotTry(): Unit = { + // the try block does not contain the `ATHROW` instruction, but in the catch block, `ATHROW` is inlined + val code = "def f(e: Exception) = throw { try e catch { case _: Throwable => e } }" + val m = singleMethod(methodOptCompiler)(code) + assertHandlerLabelPostions(m.handlers.head, m.instructions, 0, 3, 5) + assertSameCode(m.instructions, + wrapInDefault(VarOp(ALOAD, 1), Label(3), Op(ATHROW), Label(5), FrameEntry(4, List(), List("java/lang/Throwable")), Op(POP), VarOp(ALOAD, 1), Op(ATHROW)) + ) + } + + @Test + def inlineReturnInCachtNotTry(): Unit = { + val code = "def f: Int = return { try 1 catch { case _: Throwable => 2 } }" + // cannot inline the IRETURN into the try block (because RETURN may throw IllegalMonitorState) + val m = singleMethod(methodOptCompiler)(code) + assertHandlerLabelPostions(m.handlers.head, m.instructions, 0, 3, 5) + assertSameCode(m.instructions, + wrapInDefault(Op(ICONST_1), Label(3), Op(IRETURN), Label(5), FrameEntry(4, List(), List("java/lang/Throwable")), Op(POP), Op(ICONST_2), Op(IRETURN))) + } + + @Test + def simplifyJumpsInTryCatchFinally(): Unit = { + val code = + """def f: Int = + | try { + | return 1 + | } catch { + | case _: Throwable => + | return 2 + | } finally { + | return 2 + | // dead + | val x = try 10 catch { case _: Throwable => 11 } + | println(x) + | } + """.stripMargin + val m = singleMethod(methodOptCompiler)(code) + assertTrue(m.handlers.length == 2) + assertSameCode(m.instructions.dropNonOp, // drop line numbers and labels that are only used by line numbers + + // one single label left :-) + List(Op(ICONST_1), VarOp(ISTORE, 2), Jump(GOTO, Label(20)), Op(POP), Op(ICONST_2), VarOp(ISTORE, 2), Jump(GOTO, Label(20)), VarOp(ASTORE, 3), Op(ICONST_2), Op(IRETURN), Label(20), Op(ICONST_2), Op(IRETURN)) + ) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala new file mode 100644 index 000000000000..360fa1d23daf --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala @@ -0,0 +1,221 @@ +package scala.tools.nsc +package backend.jvm +package opt + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import scala.tools.asm.Opcodes._ +import org.junit.Assert._ + +import CodeGenTools._ +import scala.tools.partest.ASMConverters +import ASMConverters._ + +@RunWith(classOf[JUnit4]) +class SimplifyJumpsTest { + @Test + def simpleGotoReturn(): Unit = { + val ops = List( + Jump(GOTO, Label(2)), // replaced by RETURN + Op(ICONST_1), // need some code, otherwise removeJumpToSuccessor kicks in + Op(POP), + Label(1), // multiple labels OK + Label(2), + Label(3), + Op(RETURN) + ) + val method = genMethod()(ops: _*) + assertTrue(localOpt.simplifyJumps(method)) + assertSameCode(instructionsFromMethod(method), Op(RETURN) :: ops.tail) + } + + @Test + def simpleGotoThrow(): Unit = { + val rest = List( + Op(ICONST_1), // need some code, otherwise removeJumpToSuccessor kicks in + Op(POP), + Label(1), + Label(2), + Label(3), + Op(ATHROW) + ) + val method = genMethod()( + Op(ACONST_NULL) :: + Jump(GOTO, Label(2)) :: // replaced by ATHROW + rest: _* + ) + assertTrue(localOpt.simplifyJumps(method)) + assertSameCode(instructionsFromMethod(method), Op(ACONST_NULL) :: Op(ATHROW) :: rest) + } + + @Test + def gotoThrowInTry(): Unit = { + val handler = List(ExceptionHandler(Label(1), Label(2), Label(4), Some("java/lang/Throwable"))) + val initialInstrs = List( + Label(1), + Op(ACONST_NULL), + Jump(GOTO, Label(3)), // not by ATHROW (would move the ATHROW into a try block) + Label(2), + Op(ICONST_1), // need some code, otherwise removeJumpToSuccessor kicks in + Op(POP), + Label(3), + Op(ATHROW), + Label(4), + Op(POP), + Op(RETURN) + ) + val method = genMethod(handlers = handler)(initialInstrs: _*) + assertFalse(localOpt.simplifyJumps(method)) + assertSameCode(instructionsFromMethod(method), initialInstrs) + + val optMethod = genMethod()(initialInstrs: _*) // no handler + assertTrue(localOpt.simplifyJumps(optMethod)) + assertSameCode(instructionsFromMethod(optMethod).take(3), List(Label(1), Op(ACONST_NULL), Op(ATHROW))) + } + + @Test + def simplifyBranchOverGoto(): Unit = { + val begin = List( + VarOp(ILOAD, 1), + Jump(IFGE, Label(2)) + ) + val rest = List( + Jump(GOTO, Label(3)), + Label(11), // other labels here are allowed + Label(2), + VarOp(ILOAD, 1), + Op(RETURN), + Label(3), + VarOp(ILOAD, 1), + Op(IRETURN) + ) + val method = genMethod()(begin ::: rest: _*) + assertTrue(localOpt.simplifyJumps(method)) + assertSameCode( + instructionsFromMethod(method), + List(VarOp(ILOAD, 1), Jump(IFLT, Label(3))) ::: rest.tail ) + + // no label allowed between begin and rest. if there's another label, then there could be a + // branch that label. eliminating the GOTO would change the behavior. + val nonOptMethod = genMethod()(begin ::: Label(22) :: rest: _*) + assertFalse(localOpt.simplifyJumps(nonOptMethod)) + } + + @Test + def ensureGotoRemoved(): Unit = { + def code(jumps: Instruction*) = List( + VarOp(ILOAD, 1)) ::: jumps.toList ::: List( + Label(2), + + Op(RETURN), + Label(3), + Op(RETURN) + ) + + // ensures that the goto is safely removed. ASM supports removing while iterating, but not the + // next element of the current. Here, the current is the IFGE, the next is the GOTO. + val method = genMethod()(code(Jump(IFGE, Label(2)), Jump(GOTO, Label(3))): _*) + assertTrue(localOpt.simplifyJumps(method)) + assertSameCode(instructionsFromMethod(method), code(Jump(IFLT, Label(3)))) + } + + @Test + def removeJumpToSuccessor(): Unit = { + val ops = List( + Jump(GOTO, Label(1)), + Label(11), + Label(1), + Label(2), + VarOp(ILOAD, 1), + Op(IRETURN) + ) + val method = genMethod()(ops: _*) + assertTrue(localOpt.simplifyJumps(method)) + assertSameCode(instructionsFromMethod(method), ops.tail) + } + + @Test + def collapseJumpChains(): Unit = { + def ops(target1: Int, target2: Int, target3: Int) = List( + VarOp(ILOAD, 1), + Jump(IFGE, Label(target1)), // initially 1, then 3 + VarOp(ILOAD, 1), + Op(IRETURN), + + Label(2), + Jump(GOTO, Label(target3)), + + Label(1), + Jump(GOTO, Label(target2)), // initially 2, then 3 + + VarOp(ILOAD, 1), // some code to prevent jumpToSuccessor optimization (once target2 is replaced by 3) + Op(RETURN), + + Label(3), + VarOp(ILOAD, 1), + Op(IRETURN) + ) + val method = genMethod()(ops(1, 2, 3): _*) + assertTrue(localOpt.simplifyJumps(method)) + assertSameCode(instructionsFromMethod(method), ops(3, 3, 3)) + } + + @Test + def collapseJumpChainLoop(): Unit = { + def ops(target: Int) = List( + VarOp(ILOAD, 1), + Jump(IFGE, Label(target)), + + Label(4), + Jump(GOTO, Label(3)), + + VarOp(ILOAD, 1), // some code to prevent jumpToSuccessor (label 3) + Op(IRETURN), + + Label(3), + Jump(GOTO, Label(4)), + + Label(2), + Jump(GOTO, Label(3)) + ) + + val method = genMethod()(ops(2): _*) + assertTrue(localOpt.simplifyJumps(method)) + assertSameCode(instructionsFromMethod(method), ops(3)) + } + + @Test + def simplifyThenElseSameTarget(): Unit = { + def ops(jumpOp: Instruction) = List( + VarOp(ILOAD, 1), + jumpOp, + Label(2), + Jump(GOTO, Label(1)), + + VarOp(ILOAD, 1), // some code to prevent jumpToSuccessor (label 1) + Op(IRETURN), + + Label(1), + VarOp(ILOAD, 1), + Op(IRETURN) + ) + + val method = genMethod()(ops(Jump(IFGE, Label(1))): _*) + assertTrue(localOpt.simplifyJumps(method)) + assertSameCode(instructionsFromMethod(method), ops(Op(POP))) + } + + @Test + def thenElseSameTargetLoop(): Unit = { + def ops(br: List[Instruction]) = List( + VarOp(ILOAD, 1), + VarOp(ILOAD, 2)) ::: br ::: List( + Label(1), + Jump(GOTO, Label(1)) + ) + val method = genMethod()(ops(List(Jump(IF_ICMPGE, Label(1)))): _*) + assertTrue(localOpt.simplifyJumps(method)) + assertSameCode(instructionsFromMethod(method), ops(List(Op(POP), Op(POP)))) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala new file mode 100644 index 000000000000..4a45dd9138fc --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala @@ -0,0 +1,211 @@ +package scala.tools.nsc +package backend.jvm +package opt + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import scala.tools.asm.Opcodes._ +import org.junit.Assert._ + +import scala.tools.testing.AssertUtil._ + +import CodeGenTools._ +import scala.tools.partest.ASMConverters +import ASMConverters._ + +@RunWith(classOf[JUnit4]) +class UnreachableCodeTest { + + def assertEliminateDead(code: (Instruction, Boolean)*): Unit = { + val method = genMethod()(code.map(_._1): _*) + localOpt.removeUnreachableCodeImpl(method, "C") + val nonEliminated = instructionsFromMethod(method) + val expectedLive = code.filter(_._2).map(_._1).toList + assertSameCode(nonEliminated, expectedLive) + } + + // jvm-1.6 enables emitting stack map frames, which impacts the code generation wrt dead basic blocks, + // see comment in BCodeBodyBuilder + val methodOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:method") + val dceCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code") + val noOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:none") + + // jvm-1.5 disables computing stack map frames, and it emits dead code as-is. + val noOptNoFramesCompiler = newCompiler(extraArgs = "-target:jvm-1.5 -Ybackend:GenBCode -Yopt:l:none") + + @Test + def basicElimination(): Unit = { + assertEliminateDead( + Op(ACONST_NULL), + Op(ATHROW), + Op(RETURN).dead + ) + + assertEliminateDead( + Op(RETURN) + ) + + assertEliminateDead( + Op(RETURN), + Op(ACONST_NULL).dead, + Op(ATHROW).dead + ) + } + + @Test + def eliminateNop(): Unit = { + assertEliminateDead( + // reachable, but removed anyway. + Op(NOP).dead, + Op(RETURN), + Op(NOP).dead + ) + } + + @Test + def eliminateBranchOver(): Unit = { + assertEliminateDead( + Jump(GOTO, Label(1)), + Op(ACONST_NULL).dead, + Op(ATHROW).dead, + Label(1), + Op(RETURN) + ) + + assertEliminateDead( + Jump(GOTO, Label(1)), + Label(1), + Op(RETURN) + ) + } + + @Test + def deadLabelsRemain(): Unit = { + assertEliminateDead( + Op(RETURN), + Jump(GOTO, Label(1)).dead, + // not dead - labels may be referenced from other places in a classfile (eg exceptions table). + // will need a different opt to get rid of them + Label(1) + ) + } + + @Test + def pushPopNotEliminated(): Unit = { + assertEliminateDead( + // not dead, visited by data flow analysis. + Op(ACONST_NULL), + Op(POP), + Op(RETURN) + ) + } + + @Test + def nullnessNotConsidered(): Unit = { + assertEliminateDead( + Op(ACONST_NULL), + Jump(IFNULL, Label(1)), + Op(RETURN), // not dead + Label(1), + Op(RETURN) + ) + } + + @Test + def basicEliminationCompiler(): Unit = { + val code = "def f: Int = { return 1; 2 }" + val withDce = singleMethodInstructions(dceCompiler)(code) + assertSameCode(withDce.dropNonOp, List(Op(ICONST_1), Op(IRETURN))) + + val noDce = singleMethodInstructions(noOptCompiler)(code) + + // The emitted code is ICONST_1, IRETURN, ICONST_2, IRETURN. The latter two are dead. + // + // GenBCode puts the last IRETURN into a new basic block: it emits a label before the second + // IRETURN. This is an implementation detail, it may change; it affects the outcome of this test. + // + // During classfile writing with COMPUTE_FAMES (-target:jvm-1.6 or larger), the ClassfileWriter + // puts the ICONST_2 into a new basic block, because the preceding operation (IRETURN) ends + // the current block. We get something like + // + // L1: ICONST_1; IRETURN + // L2: ICONST_2 << dead + // L3: IRETURN << dead + // + // Finally, instructions in the dead basic blocks are replaced by ATHROW, as explained in + // a comment in BCodeBodyBuilder. + assertSameCode(noDce.dropNonOp, List(Op(ICONST_1), Op(IRETURN), Op(ATHROW), Op(ATHROW))) + + // when NOT computing stack map frames, ASM's ClassWriter does not replace dead code by NOP/ATHROW + val noDceNoFrames = singleMethodInstructions(noOptNoFramesCompiler)(code) + assertSameCode(noDceNoFrames.dropNonOp, List(Op(ICONST_1), Op(IRETURN), Op(ICONST_2), Op(IRETURN))) + } + + @Test + def eliminateDeadCatchBlocks(): Unit = { + // the Label(1) is live: it's used in the local variable descriptor table (local variable "this" has a range from 0 to 1). + def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1)) + + val code = "def f: Int = { return 0; try { 1 } catch { case _: Exception => 2 } }" + val m = singleMethod(dceCompiler)(code) + assertTrue(m.handlers.isEmpty) // redundant (if code is gone, handler is gone), but done once here for extra safety + assertSameCode(m.instructions, + wrapInDefault(Op(ICONST_0), Op(IRETURN))) + + val code2 = "def f: Unit = { try { } catch { case _: Exception => () }; () }" + // requires fixpoint optimization of methodOptCompiler (dce alone is not enough): first the handler is eliminated, then it's dead catch block. + assertSameCode(singleMethodInstructions(methodOptCompiler)(code2), wrapInDefault(Op(RETURN))) + + val code3 = "def f: Unit = { try { } catch { case _: Exception => try { } catch { case _: Exception => () } }; () }" + assertSameCode(singleMethodInstructions(methodOptCompiler)(code3), wrapInDefault(Op(RETURN))) + + // this example requires two iterations to get rid of the outer handler. + // the first iteration of DCE cannot remove the inner handler. then the inner (empty) handler is removed. + // then the second iteration of DCE removes the inner catch block, and then the outer handler is removed. + val code4 = "def f: Unit = { try { try { } catch { case _: Exception => () } } catch { case _: Exception => () }; () }" + assertSameCode(singleMethodInstructions(methodOptCompiler)(code4), wrapInDefault(Op(RETURN))) + } + + @Test // test the dce-testing tools + def metaTest(): Unit = { + assertThrows[AssertionError]( + assertEliminateDead(Op(RETURN).dead), + _.contains("Expected: List()\nActual : List(Op(RETURN))") + ) + + assertThrows[AssertionError]( + assertEliminateDead(Op(RETURN), Op(RETURN)), + _.contains("Expected: List(Op(RETURN), Op(RETURN))\nActual : List(Op(RETURN))") + ) + } + + @Test + def bytecodeEquivalence: Unit = { + assertTrue(List(VarOp(ILOAD, 1)) === + List(VarOp(ILOAD, 2))) + assertTrue(List(VarOp(ILOAD, 1), VarOp(ISTORE, 1)) === + List(VarOp(ILOAD, 2), VarOp(ISTORE, 2))) + + // the first Op will associate 1->2, then the 2->2 will fail + assertFalse(List(VarOp(ILOAD, 1), VarOp(ISTORE, 2)) === + List(VarOp(ILOAD, 2), VarOp(ISTORE, 2))) + + // will associate 1->2 and 2->1, which is OK + assertTrue(List(VarOp(ILOAD, 1), VarOp(ISTORE, 2)) === + List(VarOp(ILOAD, 2), VarOp(ISTORE, 1))) + + assertTrue(List(Label(1), Label(2), Label(1)) === + List(Label(2), Label(4), Label(2))) + assertTrue(List(LineNumber(1, Label(1)), Label(1)) === + List(LineNumber(1, Label(3)), Label(3))) + assertFalse(List(LineNumber(1, Label(1)), Label(1)) === + List(LineNumber(1, Label(3)), Label(1))) + + assertTrue(List(TableSwitch(TABLESWITCH, 1, 3, Label(4), List(Label(5), Label(6))), Label(4), Label(5), Label(6)) === + List(TableSwitch(TABLESWITCH, 1, 3, Label(9), List(Label(3), Label(4))), Label(9), Label(3), Label(4))) + + assertTrue(List(FrameEntry(F_FULL, List(INTEGER, DOUBLE, Label(3)), List("java/lang/Object", Label(4))), Label(3), Label(4)) === + List(FrameEntry(F_FULL, List(INTEGER, DOUBLE, Label(1)), List("java/lang/Object", Label(3))), Label(1), Label(3))) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala new file mode 100644 index 000000000000..24a1f9d1c1fb --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala @@ -0,0 +1,87 @@ +package scala.tools.nsc +package backend.jvm +package opt + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import scala.tools.asm.Opcodes._ +import org.junit.Assert._ +import scala.collection.JavaConverters._ + +import CodeGenTools._ +import scala.tools.partest.ASMConverters +import ASMConverters._ + +@RunWith(classOf[JUnit4]) +class UnusedLocalVariablesTest { + val dceCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:unreachable-code") + + @Test + def removeUnusedVar(): Unit = { + val code = """def f(a: Long, b: String, c: Double): Unit = { return; var x = a; var y = x + 10 }""" + assertLocalVarCount(code, 4) // `this, a, b, c` + + val code2 = """def f(): Unit = { var x = if (true) return else () }""" + assertLocalVarCount(code2, 1) // x is eliminated, constant folding in scalac removes the if + + val code3 = """def f: Unit = return""" // paramless method + assertLocalVarCount(code3, 1) // this + } + + @Test + def keepUsedVar(): Unit = { + val code = """def f(a: Long, b: String, c: Double): Unit = { val x = 10 + a; val y = x + 10 }""" + assertLocalVarCount(code, 6) + + val code2 = """def f(a: Long): Unit = { var x = if (a == 0l) return else () }""" + assertLocalVarCount(code2, 3) // remains + } + + @Test + def constructorLocals(): Unit = { + val code = """class C { + | def this(a: Int) = { + | this() + | throw new Exception("") + | val y = 0 + | } + |} + |""".stripMargin + val cls = compileClasses(dceCompiler)(code).head + val m = convertMethod(cls.methods.asScala.toList.find(_.desc == "(I)V").get) + assertTrue(m.localVars.length == 2) // this, a, but not y + + + val code2 = + """class C { + | { + | throw new Exception("") + | val a = 0 + | } + |} + | + |object C { + | { + | throw new Exception("") + | val b = 1 + | } + |} + """.stripMargin + + val clss2 = compileClasses(dceCompiler)(code2) + val cls2 = clss2.find(_.name == "C").get + val companion2 = clss2.find(_.name == "C$").get + + val clsConstr = convertMethod(cls2.methods.asScala.toList.find(_.name == "").get) + val companionConstr = convertMethod(companion2.methods.asScala.toList.find(_.name == "").get) + + assertTrue(clsConstr.localVars.length == 1) // this + assertTrue(companionConstr.localVars.length == 1) // this + } + + def assertLocalVarCount(code: String, numVars: Int): Unit = { + assertTrue(singleMethod(dceCompiler)(code).localVars.length == numVars) + } + +} diff --git a/test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala b/test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala new file mode 100644 index 000000000000..9a004d5e0eae --- /dev/null +++ b/test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala @@ -0,0 +1,208 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.classpath + +import java.net.URL +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import scala.reflect.io.VirtualFile +import scala.tools.nsc.io.AbstractFile + +/** + * Tests whether AggregateFlatClassPath returns correct entries taken from + * cp instances used during creating it and whether it preserves the ordering + * (in the case of the repeated entry for a class or a source it returns the first one). + */ +@RunWith(classOf[JUnit4]) +class AggregateFlatClassPathTest { + + private class TestFlatClassPath extends FlatClassPath { + override def packages(inPackage: String): Seq[PackageEntry] = unsupported + override def sources(inPackage: String): Seq[SourceFileEntry] = unsupported + override def classes(inPackage: String): Seq[ClassFileEntry] = unsupported + + override def list(inPackage: String): FlatClassPathEntries = unsupported + override def findClassFile(name: String): Option[AbstractFile] = unsupported + + override def asClassPathStrings: Seq[String] = unsupported + override def asSourcePathString: String = unsupported + override def asURLs: Seq[URL] = unsupported + } + + private case class TestClassPath(virtualPath: String, classesInPackage: EntryNamesInPackage*) extends TestFlatClassPath { + + override def classes(inPackage: String): Seq[ClassFileEntry] = + for { + entriesWrapper <- classesInPackage if entriesWrapper.inPackage == inPackage + name <- entriesWrapper.names + } yield classFileEntry(virtualPath, inPackage, name) + + override def sources(inPackage: String): Seq[SourceFileEntry] = Nil + + // we'll ignore packages + override def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(Nil, classes(inPackage)) + } + + private case class TestSourcePath(virtualPath: String, sourcesInPackage: EntryNamesInPackage*) extends TestFlatClassPath { + + override def sources(inPackage: String): Seq[SourceFileEntry] = + for { + entriesWrapper <- sourcesInPackage if entriesWrapper.inPackage == inPackage + name <- entriesWrapper.names + } yield sourceFileEntry(virtualPath, inPackage, name) + + override def classes(inPackage: String): Seq[ClassFileEntry] = Nil + + // we'll ignore packages + override def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(Nil, sources(inPackage)) + } + + private case class EntryNamesInPackage(inPackage: String)(val names: String*) + + private val dir1 = "./dir1" + private val dir2 = "./dir2" + private val dir3 = "./dir3" + private val dir4 = "" + + private val pkg1 = "pkg1" + private val pkg2 = "pkg2" + private val pkg3 = "pkg1.nested" + private val nonexistingPkg = "nonexisting" + + private def unsupported = throw new UnsupportedOperationException + + private def classFileEntry(pathPrefix: String, inPackage: String, fileName: String) = + ClassFileEntryImpl(classFile(pathPrefix, inPackage, fileName)) + + private def sourceFileEntry(pathPrefix: String, inPackage: String, fileName: String) = + SourceFileEntryImpl(sourceFile(pathPrefix, inPackage, fileName)) + + private def classFile(pathPrefix: String, inPackage: String, fileName: String) = + virtualFile(pathPrefix, inPackage, fileName, ".class") + + private def sourceFile(pathPrefix: String, inPackage: String, fileName: String) = + virtualFile(pathPrefix, inPackage, fileName, ".scala") + + private def virtualFile(pathPrefix: String, inPackage: String, fileName: String, extension: String) = { + val packageDirs = + if (inPackage == FlatClassPath.RootPackage) "" + else inPackage.split('.').mkString("/", "/", "") + new VirtualFile(fileName + extension, s"$pathPrefix$packageDirs/$fileName$extension") + } + + private def createDefaultTestClasspath() = { + val partialClassPaths = Seq(TestSourcePath(dir1, EntryNamesInPackage(pkg1)("F", "A", "G")), + TestClassPath(dir2, EntryNamesInPackage(pkg1)("C", "B", "A"), EntryNamesInPackage(pkg2)("D", "A", "E")), + TestClassPath(dir3, EntryNamesInPackage(pkg1)("A", "D", "F")), + TestSourcePath(dir4, EntryNamesInPackage(pkg2)("A", "H", "I"), EntryNamesInPackage(pkg1)("A")), + TestSourcePath(dir2, EntryNamesInPackage(pkg3)("J", "K", "L")) + ) + + AggregateFlatClassPath(partialClassPaths) + } + + @Test + def testGettingPackages: Unit = { + case class ClassPathWithPackages(packagesInPackage: EntryNamesInPackage*) extends TestFlatClassPath { + override def packages(inPackage: String): Seq[PackageEntry] = + packagesInPackage.find(_.inPackage == inPackage).map(_.names).getOrElse(Nil) map PackageEntryImpl + } + + val partialClassPaths = Seq(ClassPathWithPackages(EntryNamesInPackage(pkg1)("pkg1.a", "pkg1.d", "pkg1.f")), + ClassPathWithPackages(EntryNamesInPackage(pkg1)("pkg1.c", "pkg1.b", "pkg1.a"), + EntryNamesInPackage(pkg2)("pkg2.d", "pkg2.a", "pkg2.e")) + ) + val cp = AggregateFlatClassPath(partialClassPaths) + + val packagesInPkg1 = Seq("pkg1.a", "pkg1.d", "pkg1.f", "pkg1.c", "pkg1.b") + assertEquals(packagesInPkg1, cp.packages(pkg1).map(_.name)) + + val packagesInPkg2 = Seq("pkg2.d", "pkg2.a", "pkg2.e") + assertEquals(packagesInPkg2, cp.packages(pkg2).map(_.name)) + + assertEquals(Seq.empty, cp.packages(nonexistingPkg)) + } + + @Test + def testGettingClasses: Unit = { + val cp = createDefaultTestClasspath() + + val classesInPkg1 = Seq(classFileEntry(dir2, pkg1, "C"), + classFileEntry(dir2, pkg1, "B"), + classFileEntry(dir2, pkg1, "A"), + classFileEntry(dir3, pkg1, "D"), + classFileEntry(dir3, pkg1, "F") + ) + assertEquals(classesInPkg1, cp.classes(pkg1)) + + val classesInPkg2 = Seq(classFileEntry(dir2, pkg2, "D"), + classFileEntry(dir2, pkg2, "A"), + classFileEntry(dir2, pkg2, "E") + ) + assertEquals(classesInPkg2, cp.classes(pkg2)) + + assertEquals(Seq.empty, cp.classes(pkg3)) + assertEquals(Seq.empty, cp.classes(nonexistingPkg)) + } + + @Test + def testGettingSources: Unit = { + val partialClassPaths = Seq(TestClassPath(dir1, EntryNamesInPackage(pkg1)("F", "A", "G")), + TestSourcePath(dir2, EntryNamesInPackage(pkg1)("C", "B", "A"), EntryNamesInPackage(pkg2)("D", "A", "E")), + TestSourcePath(dir3, EntryNamesInPackage(pkg1)("A", "D", "F")), + TestClassPath(dir4, EntryNamesInPackage(pkg2)("A", "H", "I")), + TestClassPath(dir2, EntryNamesInPackage(pkg3)("J", "K", "L")) + ) + val cp = AggregateFlatClassPath(partialClassPaths) + + val sourcesInPkg1 = Seq(sourceFileEntry(dir2, pkg1, "C"), + sourceFileEntry(dir2, pkg1, "B"), + sourceFileEntry(dir2, pkg1, "A"), + sourceFileEntry(dir3, pkg1, "D"), + sourceFileEntry(dir3, pkg1, "F") + ) + assertEquals(sourcesInPkg1, cp.sources(pkg1)) + + val sourcesInPkg2 = Seq(sourceFileEntry(dir2, pkg2, "D"), + sourceFileEntry(dir2, pkg2, "A"), + sourceFileEntry(dir2, pkg2, "E") + ) + assertEquals(sourcesInPkg2, cp.sources(pkg2)) + + assertEquals(Seq.empty, cp.sources(pkg3)) + assertEquals(Seq.empty, cp.sources(nonexistingPkg)) + } + + @Test + def testList: Unit = { + val cp = createDefaultTestClasspath() + + val classesAndSourcesInPkg1 = Seq( + ClassAndSourceFilesEntry(classFile(dir3, pkg1, "F"), sourceFile(dir1, pkg1, "F")), + ClassAndSourceFilesEntry(classFile(dir2, pkg1, "A"), sourceFile(dir1, pkg1, "A")), + sourceFileEntry(dir1, pkg1, "G"), + classFileEntry(dir2, pkg1, "C"), + classFileEntry(dir2, pkg1, "B"), + classFileEntry(dir3, pkg1, "D") + ) + assertEquals(classesAndSourcesInPkg1, cp.list(pkg1).classesAndSources) + + assertEquals(FlatClassPathEntries(Nil, Nil), cp.list(nonexistingPkg)) + } + + @Test + def testFindClass: Unit = { + val cp = createDefaultTestClasspath() + + assertEquals( + Some(ClassAndSourceFilesEntry(classFile(dir2, pkg1, "A"), sourceFile(dir1, pkg1, "A"))), + cp.findClass(s"$pkg1.A") + ) + assertEquals(Some(classFileEntry(dir3, pkg1, "D")), cp.findClass(s"$pkg1.D")) + assertEquals(Some(sourceFileEntry(dir2, pkg3, "L")), cp.findClass(s"$pkg3.L")) + assertEquals(None, cp.findClass("Nonexisting")) + } +} diff --git a/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala b/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala new file mode 100644 index 000000000000..a37ba31b31ac --- /dev/null +++ b/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala @@ -0,0 +1,159 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.classpath + +import java.io.File +import org.junit.Assert._ +import org.junit._ +import org.junit.rules.TemporaryFolder +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import scala.annotation.tailrec +import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.util.ClassPath +import scala.tools.nsc.Settings +import scala.tools.util.FlatClassPathResolver +import scala.tools.util.PathResolver + +@RunWith(classOf[JUnit4]) +class FlatClassPathResolverTest { + + val tempDir = new TemporaryFolder() + + private val packagesToTest = List(FlatClassPath.RootPackage, "scala", "scala.reflect", "scala.reflect.io") + private val classFilesToFind = List("scala.tools.util.FlatClassPathResolver", + "scala.reflect.io.AbstractFile", + "scala.collection.immutable.List", + "scala.Option", + "scala.collection.immutable.Vector", + "scala.util.hashing.MurmurHash3", + "java.lang.Object", + "java.util.Date") + + private val classesToFind = classFilesToFind ++ List("TestSourceInRootPackage", + "scala.reflect.io.TestScalaSource", + "scala.reflect.io.TestJavaSource") + + private val settings = new Settings + + @Before + def initTempDirAndSourcePath: Unit = { + // In Java TemporaryFolder in JUnit is managed automatically using @Rule. + // It would work also in Scala after adding and extending a class like + // TestWithTempFolder.java containing it. But in this case it doesn't work when running tests + // from the command line - java class is not compiled due to some, misterious reasons. + // That's why such dirs are here created and deleted manually. + tempDir.create() + tempDir.newFile("TestSourceInRootPackage.scala") + val ioDir = tempDir.newFolder("scala", "reflect", "io") + new File(ioDir, "AbstractFile.scala").createNewFile() + new File(ioDir, "ZipArchive.java").createNewFile() + new File(ioDir, "TestScalaSource.scala").createNewFile() + new File(ioDir, "TestJavaSource.java").createNewFile() + + settings.usejavacp.value = true + settings.sourcepath.value = tempDir.getRoot.getAbsolutePath + } + + @After + def deleteTempDir: Unit = tempDir.delete() + + private def createFlatClassPath(settings: Settings) = + new FlatClassPathResolver(settings).result + + @Test + def testEntriesFromListOperationAgainstSeparateMethods: Unit = { + val classPath = createFlatClassPath(settings) + + def compareEntriesInPackage(inPackage: String): Unit = { + val packages = classPath.packages(inPackage) + val classes = classPath.classes(inPackage) + val sources = classPath.sources(inPackage) + val FlatClassPathEntries(packagesFromList, classesAndSourcesFromList) = classPath.list(inPackage) + + val packageNames = packages.map(_.name).sorted + val packageNamesFromList = packagesFromList.map(_.name).sorted + assertEquals(s"Methods list and packages for package '$inPackage' should return the same packages", + packageNames, packageNamesFromList) + + val classFileNames = classes.map(_.name).sorted + val classFileNamesFromList = classesAndSourcesFromList.filter(_.binary.isDefined).map(_.name).sorted + assertEquals(s"Methods list and classes for package '$inPackage' should return entries for the same class files", + classFileNames, classFileNamesFromList) + + val sourceFileNames = sources.map(_.name).sorted + val sourceFileNamesFromList = classesAndSourcesFromList.filter(_.source.isDefined).map(_.name).sorted + assertEquals(s"Methods list and sources for package '$inPackage' should return entries for the same source files", + sourceFileNames, sourceFileNamesFromList) + + val uniqueNamesOfClassAndSourceFiles = (classFileNames ++ sourceFileNames).toSet + assertEquals(s"Class and source entries with the same name obtained via list for package '$inPackage' should be merged into one containing both files", + uniqueNamesOfClassAndSourceFiles.size, classesAndSourcesFromList.length) + } + + packagesToTest foreach compareEntriesInPackage + } + + @Test + def testCreatedEntriesAgainstRecursiveClassPath: Unit = { + val flatClassPath = createFlatClassPath(settings) + val recursiveClassPath = new PathResolver(settings).result + + def compareEntriesInPackage(inPackage: String): Unit = { + + @tailrec + def traverseToPackage(packageNameParts: Seq[String], cp: ClassPath[AbstractFile]): ClassPath[AbstractFile] = { + packageNameParts match { + case Nil => cp + case h :: t => + cp.packages.find(_.name == h) match { + case Some(nestedCp) => traverseToPackage(t, nestedCp) + case _ => throw new Exception(s"There's no package $inPackage in recursive classpath - error when searching for '$h'") + } + } + } + + val packageNameParts = if (inPackage == FlatClassPath.RootPackage) Nil else inPackage.split('.').toList + val recursiveClassPathInPackage = traverseToPackage(packageNameParts, recursiveClassPath) + + val flatCpPackages = flatClassPath.packages(inPackage).map(_.name) + val pkgPrefix = PackageNameUtils.packagePrefix(inPackage) + val recursiveCpPackages = recursiveClassPathInPackage.packages.map(pkgPrefix + _.name) + assertEquals(s"Packages in package '$inPackage' on flat cp should be the same as on the recursive cp", + recursiveCpPackages, flatCpPackages) + + val flatCpSources = flatClassPath.sources(inPackage).map(_.name).sorted + val recursiveCpSources = recursiveClassPathInPackage.classes + .filter(_.source.nonEmpty) + .map(_.name).sorted + assertEquals(s"Source entries in package '$inPackage' on flat cp should be the same as on the recursive cp", + recursiveCpSources, flatCpSources) + + val flatCpClasses = flatClassPath.classes(inPackage).map(_.name).sorted + val recursiveCpClasses = recursiveClassPathInPackage.classes + .filter(_.binary.nonEmpty) + .map(_.name).sorted + assertEquals(s"Class entries in package '$inPackage' on flat cp should be the same as on the recursive cp", + recursiveCpClasses, flatCpClasses) + } + + packagesToTest foreach compareEntriesInPackage + } + + @Test + def testFindClassFile: Unit = { + val classPath = createFlatClassPath(settings) + classFilesToFind foreach { className => + assertTrue(s"File for $className should be found", classPath.findClassFile(className).isDefined) + } + } + + @Test + def testFindClass: Unit = { + val classPath = createFlatClassPath(settings) + classesToFind foreach { className => + assertTrue(s"File for $className should be found", classPath.findClass(className).isDefined) + } + } +} diff --git a/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala b/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala new file mode 100644 index 000000000000..77a2da828ee5 --- /dev/null +++ b/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala @@ -0,0 +1,18 @@ +package scala.tools.nsc +package settings + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import scala.tools.testing.AssertUtil.assertThrows + +@RunWith(classOf[JUnit4]) +class ScalaVersionTest { + // SI-8711 + @Test def versionUnparse() { + val v = "2.11.3" + + assertEquals(ScalaVersion(v).unparse, v) + } +} diff --git a/test/junit/scala/tools/nsc/settings/SettingsTest.scala b/test/junit/scala/tools/nsc/settings/SettingsTest.scala index e4b5ecc7c32e..96f83c4c2f8b 100644 --- a/test/junit/scala/tools/nsc/settings/SettingsTest.scala +++ b/test/junit/scala/tools/nsc/settings/SettingsTest.scala @@ -26,7 +26,7 @@ class SettingsTest { assertThrows[IllegalArgumentException](check("-Ytest-setting:rubbish")) } - @Test def userSettingsHavePredecenceOverOptimize() { + @Test def userSettingsHavePrecedenceOverOptimize() { def check(args: String*): MutableSettings#BooleanSetting = { val s = new MutableSettings(msg => throw new IllegalArgumentException(msg)) val (ok, residual) = s.processArguments(args.toList, processAll = true) @@ -38,15 +38,146 @@ class SettingsTest { assertFalse(check("-Yinline:false", "-optimise").value) } - @Test def userSettingsHavePredecenceOverLint() { - def check(args: String*): MutableSettings#BooleanSetting = { + // for the given args, select the desired setting + private def check(args: String*)(b: MutableSettings => Boolean): Boolean = { + val s = new MutableSettings(msg => throw new IllegalArgumentException(msg)) + val (ok, residual) = s.processArguments(args.toList, processAll = true) + assert(residual.isEmpty) + b(s) + } + @Test def userSettingsHavePrecedenceOverLint() { + assertTrue(check("-Xlint")(_.warnAdaptedArgs)) + assertFalse(check("-Xlint", "-Ywarn-adapted-args:false")(_.warnAdaptedArgs)) + assertFalse(check("-Ywarn-adapted-args:false", "-Xlint")(_.warnAdaptedArgs)) + } + + @Test def anonymousLintersCanBeNamed() { + assertTrue(check("-Xlint")(_.warnMissingInterpolator)) // among Xlint + assertFalse(check("-Xlint:-missing-interpolator")(_.warnMissingInterpolator)) + + // positive overrides negative, but not the other way around + assertTrue(check("-Xlint:-missing-interpolator,missing-interpolator")(_.warnMissingInterpolator)) + assertTrue(check("-Xlint:-missing-interpolator", "-Xlint:missing-interpolator")(_.warnMissingInterpolator)) + + assertTrue(check("-Xlint:missing-interpolator,-missing-interpolator")(_.warnMissingInterpolator)) + assertTrue(check("-Xlint:missing-interpolator", "-Xlint:-missing-interpolator")(_.warnMissingInterpolator)) + + // -Xlint:_ adds all possible choices, but explicit negative settings will override + assertFalse(check("-Xlint:-missing-interpolator,_")(_.warnMissingInterpolator)) + assertFalse(check("-Xlint:-missing-interpolator", "-Xlint:_")(_.warnMissingInterpolator)) + assertFalse(check("-Xlint:_", "-Xlint:-missing-interpolator")(_.warnMissingInterpolator)) + assertFalse(check("-Xlint:_,-missing-interpolator")(_.warnMissingInterpolator)) + + // -Xlint is the same as -Xlint:_ + assertFalse(check("-Xlint:-missing-interpolator", "-Xlint")(_.warnMissingInterpolator)) + assertFalse(check("-Xlint", "-Xlint:-missing-interpolator")(_.warnMissingInterpolator)) + + // combination of positive, negative and _ + assertTrue(check("-Xlint:_,-missing-interpolator,missing-interpolator")(_.warnMissingInterpolator)) + assertTrue(check("-Xlint:-missing-interpolator,_,missing-interpolator")(_.warnMissingInterpolator)) + assertTrue(check("-Xlint:-missing-interpolator,missing-interpolator,_")(_.warnMissingInterpolator)) + assertTrue(check("-Xlint:missing-interpolator,-missing-interpolator,_")(_.warnMissingInterpolator)) + assertTrue(check("-Xlint:missing-interpolator,_,-missing-interpolator")(_.warnMissingInterpolator)) + } + + @Test def xLintInvalidChoices(): Unit = { + assertThrows[IllegalArgumentException](check("-Xlint:-_")(_.warnAdaptedArgs)) + assertThrows[IllegalArgumentException](check("-Xlint:-warn-adapted-args")(_.warnAdaptedArgs)) // "warn-" should not be there + } + + @Test def xLintNonColonated(): Unit = { + assertTrue(check("-Xlint", "adapted-args", "-deprecation")(_.warnAdaptedArgs)) + assertFalse(check("-Xlint", "adapted-args", "-deprecation")(_.warnMissingInterpolator)) + assertTrue(check("-Xlint", "adapted-args", "missing-interpolator", "-deprecation")(s => s.warnMissingInterpolator && s.warnAdaptedArgs)) + assertThrows[IllegalArgumentException](check("-Xlint", "adapted-args", "-missing-interpolator")(_.warnAdaptedArgs)) // non-colonated: cannot provide negative args + } + + @Test def xLintContainsValues(): Unit = { + // make sure that lint.contains and lint.value.contains are consistent + def t(s: MutableSettings, v: String) = { + val r = s.lint.contains(v) + assertSame(r, s.lint.value.contains((s.LintWarnings withName v).asInstanceOf[s.lint.domain.Value])) + r + } + + assertTrue(check("-Xlint")(t(_, "adapted-args"))) + assertTrue(check("-Xlint:_")(t(_, "adapted-args"))) + assertFalse(check("-Xlint:_,-adapted-args")(t(_, "adapted-args"))) + assertFalse(check("-Xlint:-adapted-args,_")(t(_, "adapted-args"))) + assertTrue(check("-Xlint:-adapted-args,_,adapted-args")(t(_, "adapted-args"))) + } + + @Test def xLintDeprecatedAlias(): Unit = { + assertTrue(check("-Ywarn-adapted-args")(_.warnAdaptedArgs)) + assertTrue(check("-Xlint:_,-adapted-args", "-Ywarn-adapted-args")(_.warnAdaptedArgs)) + assertTrue(check("-Xlint:-adapted-args", "-Ywarn-adapted-args")(_.warnAdaptedArgs)) + assertTrue(check("-Ywarn-adapted-args", "-Xlint:-adapted-args,_")(_.warnAdaptedArgs)) + + assertFalse(check("-Ywarn-adapted-args:false")(_.warnAdaptedArgs)) + assertFalse(check("-Ywarn-adapted-args:false", "-Xlint:_")(_.warnAdaptedArgs)) + assertFalse(check("-Ywarn-adapted-args:false", "-Xlint:_,-adapted-args")(_.warnAdaptedArgs)) + assertTrue(check("-Ywarn-adapted-args:false", "-Xlint:_,adapted-args")(_.warnAdaptedArgs)) + } + + @Test def expandingMultichoice(): Unit = { + val s = new MutableSettings(msg => throw new IllegalArgumentException(msg)) + object mChoices extends s.MultiChoiceEnumeration { + val a = Choice("a") + val b = Choice("b") + val c = Choice("c") + val d = Choice("d") + + val ab = Choice("ab", expandsTo = List(a, b)) + val ac = Choice("ac", expandsTo = List(a, c)) + val uber = Choice("uber", expandsTo = List(ab, d)) + } + val m = s.MultiChoiceSetting("-m", "args", "magic sauce", mChoices, Some(List("ac"))) + + def check(args: String*)(t: s.MultiChoiceSetting[mChoices.type] => Boolean): Boolean = { + m.clear() + val (ok, rest) = s.processArguments(args.toList, processAll = true) + assert(rest.isEmpty) + t(m) + } + + import mChoices._ + + assertTrue(check("-m")(_.value == Set(a,c))) + assertTrue(check("-m:a,-b,c")(_.value == Set(a,c))) + + // expanding options don't end up in the value set, only the terminal ones + assertTrue(check("-m:ab,ac")(_.value == Set(a,b,c))) + assertTrue(check("-m:_")(_.value == Set(a,b,c,d))) + assertTrue(check("-m:uber,ac")(_.value == Set(a,b,c,d))) // recursive expansion of uber + + // explicit nays + assertTrue(check("-m:_,-b")(_.value == Set(a,c,d))) + assertTrue(check("-m:b,_,-b")(_.value == Set(a,b,c,d))) + assertTrue(check("-m:ac,-c")(_.value == Set(a))) + assertTrue(check("-m:ac,-a,-c")(_.value == Set())) + assertTrue(check("-m:-d,ac")(_.value == Set(a,c))) + assertTrue(check("-m:-b,ac,uber")(_.value == Set(a,c,d))) + + assertFalse(check("-m:uber")(_.contains("i-m-not-an-option"))) + + assertThrows[IllegalArgumentException](check("-m:-_")(_ => true), _ contains "'-_' is not a valid choice") + assertThrows[IllegalArgumentException](check("-m:a,b,-ab")(_ => true), _ contains "'ab' cannot be negated") + assertThrows[IllegalArgumentException](check("-m:a,ac,-uber,uber")(_ => true), _ contains "'uber' cannot be negated") + } + + @Test def xSourceTest(): Unit = { + def check(expected: String, args: String*): Unit = { val s = new MutableSettings(msg => throw new IllegalArgumentException(msg)) - val (ok, residual) = s.processArguments(args.toList, processAll = true) + val (_, residual) = s.processArguments(args.toList, processAll = true) assert(residual.isEmpty) - s.warnAdaptedArgs // among Xlint + assertTrue(s.source.value == ScalaVersion(expected)) } - assertTrue(check("-Xlint").value) - assertFalse(check("-Xlint", "-Ywarn-adapted-args:false").value) - assertFalse(check("-Ywarn-adapted-args:false", "-Xlint").value) + check(expected = "2.11.0") // default + check(expected = "2.11.0", "-Xsource:2.11") + check(expected = "2.10", "-Xsource:2.10.0") + check(expected = "2.12", "-Xsource:2.12") + assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource"), _ == "-Xsource requires an argument, the syntax is -Xsource:") + assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource", "2.11"), _ == "-Xsource requires an argument, the syntax is -Xsource:") + assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource:2.invalid"), _ contains "There was a problem parsing 2.invalid") } } diff --git a/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala b/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala index 355771bf0410..69931c9e2403 100644 --- a/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala +++ b/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala @@ -47,7 +47,7 @@ class CannotHaveAttrsTest { assertEquals(t.tpe, NoType) } - @Test + @Test @org.junit.Ignore // SI-8816 def nonDefaultPosAssignmentFails = { val pos = new OffsetPosition(null, 0) attrlessTrees.foreach { t => @@ -56,7 +56,7 @@ class CannotHaveAttrsTest { } } - @Test + @Test @org.junit.Ignore // SI-8816 def nonDefaultTpeAssignmentFails = { val tpe = typeOf[Int] attrlessTrees.foreach { t => @@ -64,4 +64,16 @@ class CannotHaveAttrsTest { assertThrows[IllegalArgumentException] { t.setType(tpe) } } } + + class Attach + @Test + def attachmentsAreIgnored = { + attrlessTrees.foreach { t => + t.setAttachments(NoPosition.update(new Attach)) + assert(t.attachments == NoPosition) + t.updateAttachment(new Attach) + assert(t.attachments == NoPosition) + t.removeAttachment[Attach] // no exception + } + } } diff --git a/test/junit/scala/tools/nsc/symtab/FreshNameExtractorTest.scala b/test/junit/scala/tools/nsc/symtab/FreshNameExtractorTest.scala index cf09abdfffdb..effbfb2f7c0b 100644 --- a/test/junit/scala/tools/nsc/symtab/FreshNameExtractorTest.scala +++ b/test/junit/scala/tools/nsc/symtab/FreshNameExtractorTest.scala @@ -36,7 +36,7 @@ class FreshNameExtractorTest { } } - @Test + @Test @org.junit.Ignore // SI-8818 def extractionsFailsIfNameDoesntEndWithNumber = { val Creator = new FreshNameCreator(prefixes.head) val Extractor = new FreshNameExtractor(prefixes.head) @@ -44,4 +44,4 @@ class FreshNameExtractorTest { val Extractor(_) = TermName(Creator.newName("foo") + "bar") } } -} \ No newline at end of file +} diff --git a/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala b/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala index 4a39cf9d485c..91f94e09b622 100644 --- a/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala +++ b/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala @@ -15,11 +15,15 @@ class StdNamesTest { @Test def testNewTermNameInvalid(): Unit = { - assertThrows[IllegalArgumentException](newTermName("foo".toCharArray, 0, -1)) - assertThrows[IllegalArgumentException](newTermName("foo".toCharArray, 0, 0)) assertThrows[IllegalArgumentException](newTermName("foo".toCharArray, -1, 1)) } + @Test + def testNewTermNameNegativeLength(): Unit = { + assertEquals(nme.EMPTY, newTermName("foo".toCharArray, 0, -1)) + assertEquals(nme.EMPTY, newTermName("foo".toCharArray, 0, 0)) + } + @Test def testUnspecializedName(): Unit = { def test(expected: Name, nme: Name) { diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala index 25d8c4667f51..f0f20acf07ea 100644 --- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala +++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala @@ -3,6 +3,9 @@ package symtab import scala.reflect.ClassTag import scala.reflect.internal.{Phase, NoPhase, SomePhase} +import scala.tools.nsc.classpath.FlatClassPath +import scala.tools.nsc.settings.ClassPathRepresentationType +import scala.tools.util.FlatClassPathResolver import scala.tools.util.PathResolver import util.ClassPath import io.AbstractFile @@ -26,14 +29,28 @@ class SymbolTableForUnitTesting extends SymbolTable { class LazyTreeCopier extends super.LazyTreeCopier with TreeCopier override def isCompilerUniverse: Boolean = true - def classPath = new PathResolver(settings).result + + def classPath = platform.classPath + def flatClassPath: FlatClassPath = platform.flatClassPath object platform extends backend.Platform { val symbolTable: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this lazy val loaders: SymbolTableForUnitTesting.this.loaders.type = SymbolTableForUnitTesting.this.loaders + def platformPhases: List[SubComponent] = Nil - val classPath: ClassPath[AbstractFile] = new PathResolver(settings).result - def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean = true + + lazy val classPath: ClassPath[AbstractFile] = { + assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Recursive, + "It's not possible to use the recursive classpath representation, when it's not the chosen classpath scanning method") + new PathResolver(settings).result + } + + private[nsc] lazy val flatClassPath: FlatClassPath = { + assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Flat, + "It's not possible to use the flat classpath representation, when it's not the chosen classpath scanning method") + new FlatClassPathResolver(settings).result + } + def isMaybeBoxed(sym: Symbol): Boolean = ??? def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = ??? def externalEquals: Symbol = ??? @@ -51,7 +68,12 @@ class SymbolTableForUnitTesting extends SymbolTable { class GlobalMirror extends Roots(NoSymbol) { val universe: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this - def rootLoader: LazyType = new loaders.PackageLoader(classPath) + + def rootLoader: LazyType = settings.YclasspathImpl.value match { + case ClassPathRepresentationType.Flat => new loaders.PackageLoaderUsingFlatClassPath(FlatClassPath.RootPackage, flatClassPath) + case ClassPathRepresentationType.Recursive => new loaders.PackageLoader(classPath) + } + override def toString = "compiler mirror" } @@ -61,7 +83,7 @@ class SymbolTableForUnitTesting extends SymbolTable { rm.asInstanceOf[Mirror] } - def settings: Settings = { + lazy val settings: Settings = { val s = new Settings // initialize classpath using java classpath s.usejavacp.value = true @@ -72,6 +94,18 @@ class SymbolTableForUnitTesting extends SymbolTable { def picklerPhase: scala.reflect.internal.Phase = SomePhase def erasurePhase: scala.reflect.internal.Phase = SomePhase + // Members declared in scala.reflect.internal.Reporting + def reporter = new scala.reflect.internal.ReporterImpl { + protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = println(msg) + } + + // minimal Run to get Reporting wired + def currentRun = new RunReporting {} + class PerRunReporting extends PerRunReportingBase { + def deprecationWarning(pos: Position, msg: String): Unit = reporter.warning(pos, msg) + } + protected def PerRunReporting = new PerRunReporting + // Members declared in scala.reflect.internal.SymbolTable def currentRunId: Int = 1 def log(msg: => AnyRef): Unit = println(msg) diff --git a/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala b/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala new file mode 100644 index 000000000000..1fff9c9a3203 --- /dev/null +++ b/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala @@ -0,0 +1,555 @@ +package scala.tools.nsc.transform.patmat + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.collection.mutable +import scala.tools.nsc.{Global, Settings} + +object TestSolver extends Logic with Solving { + + val global: Global = new Global(new Settings()) + + // disable max recursion depth in order to get all solutions + global.settings.YpatmatExhaustdepth.tryToSet("off" :: Nil) + + object TestSolver extends Solver { + + class Const { + override def toString: String = "Const" + } + + val NullConst = new Const + type Type = Int + + case class TypeConst(i: Int) extends Const + + object TypeConst extends TypeConstExtractor + + case class ValueConst(i: Int) extends Const + + object ValueConst extends ValueConstExtractor { + def apply(t: Tree): Const = ??? + } + + case class Tree(name: String) + + class Var(val x: Tree) extends AbsVar { + + override def equals(other: scala.Any): Boolean = other match { + case that: Var => this.x == that.x + case _ => false + } + + override def hashCode(): Int = x.hashCode() + + override def toString: String = { + s"Var($x)" + } + + def domainSyms = None + + def implications = Nil + + def mayBeNull = false + + def propForEqualsTo(c: Const): Prop = ??? + + def registerEquality(c: Const) = () + + def registerNull() = () + + def symForStaticTp = None + } + + object Var extends VarExtractor { + def apply(x: Tree): Var = new Var(x) + + def unapply(v: Var): Some[Tree] = Some(v.x) + } + + def prepareNewAnalysis() = {} + + def reportWarning(msg: String) = sys.error(msg) + + /** + * The DPLL procedure only returns a minimal mapping from literal to value + * such that the CNF formula is satisfied. + * E.g. for: + * `(a \/ b)` + * The DPLL procedure will find either {a = true} or {b = true} + * as solution. + * + * The expansion step will amend both solutions with the unassigned variable + * i.e., {a = true} will be expanded to {a = true, b = true} and + * {a = true, b = false}. + */ + def expandUnassigned(solution: Solution): List[Model] = { + import solution._ + + // the number of solutions is doubled for every unassigned variable + val expandedModels = 1 << unassigned.size + var current = mutable.ArrayBuffer[Model]() + var next = mutable.ArrayBuffer[Model]() + current.sizeHint(expandedModels) + next.sizeHint(expandedModels) + + current += model + + // we use double buffering: + // read from `current` and create a two models for each model in `next` + for { + s <- unassigned + } { + for { + model <- current + } { + def force(s: Sym, pol: Boolean) = model + (s -> pol) + + next += force(s, pol = true) + next += force(s, pol = false) + } + + val tmp = current + current = next + next = tmp + + next.clear() + } + + current.toList + } + + /** + * Old CNF conversion code, used for reference: + * - convert formula into NNF + * (i.e., no negated terms, only negated variables) + * - use distributive laws to convert into CNF + */ + def eqFreePropToSolvableViaDistribution(p: Prop) = { + val symbolMapping = new SymbolMapping(gatherSymbols(p)) + + type Formula = Array[TestSolver.Clause] + + def formula(c: Clause*): Formula = c.toArray + + def merge(a: Clause, b: Clause) = a ++ b + + def negationNormalFormNot(p: Prop): Prop = p match { + case And(ps) => Or(ps map negationNormalFormNot) + case Or(ps) => And(ps map negationNormalFormNot) + case Not(p) => negationNormalForm(p) + case True => False + case False => True + case s: Sym => Not(s) + } + + def negationNormalForm(p: Prop): Prop = p match { + case Or(ps) => Or(ps map negationNormalForm) + case And(ps) => And(ps map negationNormalForm) + case Not(negated) => negationNormalFormNot(negated) + case True + | False + | (_: Sym) => p + } + + val TrueF: Formula = Array() + val FalseF = Array(clause()) + def lit(sym: Sym) = Array(clause(symbolMapping.lit(sym))) + def negLit(sym: Sym) = Array(clause(-symbolMapping.lit(sym))) + + def conjunctiveNormalForm(p: Prop): Formula = { + def distribute(a: Formula, b: Formula): Formula = + (a, b) match { + // true \/ _ = true + // _ \/ true = true + case (trueA, trueB) if trueA.size == 0 || trueB.size == 0 => TrueF + // lit \/ lit + case (a, b) if a.size == 1 && b.size == 1 => formula(merge(a(0), b(0))) + // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d)) + // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn)) + case (cs, ds) => + val (big, small) = if (cs.size > ds.size) (cs, ds) else (ds, cs) + big flatMap (c => distribute(formula(c), small)) + } + + p match { + case True => TrueF + case False => FalseF + case s: Sym => lit(s) + case Not(s: Sym) => negLit(s) + case And(ps) => + ps.toArray flatMap conjunctiveNormalForm + case Or(ps) => + ps map conjunctiveNormalForm reduceLeft { (a, b) => + distribute(a, b) + } + } + } + val cnf = conjunctiveNormalForm(negationNormalForm(p)) + Solvable(cnf, symbolMapping) + } + + } + +} + +/** + * Testing CNF conversion via Tseitin vs NNF & expansion. + */ +@RunWith(classOf[JUnit4]) +class SolvingTest { + + import scala.tools.nsc.transform.patmat.TestSolver.TestSolver._ + + implicit val Ord: Ordering[TestSolver.TestSolver.Model] = Ordering.by { + _.toSeq.sortBy(_.toString()).toIterable + } + + private def sym(name: String) = Sym(Var(Tree(name)), NullConst) + + @Test + def testSymCreation() { + val s1 = sym("hello") + val s2 = sym("hello") + assertEquals(s1, s2) + } + + /** + * Simplest possible test: solve a formula and check the solution(s) + */ + @Test + def testUnassigned() { + val pSym = sym("p") + val solvable = propToSolvable(Or(pSym, Not(pSym))) + val solutions = TestSolver.TestSolver.findAllModelsFor(solvable) + val expected = List(Solution(Map(), List(pSym))) + assertEquals(expected, solutions) + } + + /** + * Unassigned variables must be expanded + * for stable results + */ + @Test + def testNoUnassigned() { + val pSym = sym("p") + val qSym = sym("q") + val solvable = propToSolvable(Or(pSym, Not(qSym))) + val solutions = findAllModelsFor(solvable) + val expanded = solutions.flatMap(expandUnassigned).sorted + val expected = Seq( + Map(pSym -> false, qSym -> false), + Map(pSym -> true, qSym -> false), + Map(pSym -> true, qSym -> true) + ).sorted + + assertEquals(expected, expanded) + } + + @Test + def testTseitinVsExpansionFrom_t7020() { + val formulas = Seq( + And(And(And(Not(sym("V1=null")), + sym("V1=scala.collection.immutable.::[?]")), And(Not(sym("V1=null")), + And(Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), sym("V3=Nil")))), + And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))))), + + And(And(And(Not(sym("V1=null")), + sym("V1=scala.collection.immutable.::[?]")), And(Not(sym("V1=null")), + And(sym("V2=7"), sym("V3=Nil")))), + And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), And(And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil")))))))), + + And(And(Not(sym("V1=null")), + sym("V1=scala.collection.immutable.::[?]")), And(Not(sym("V1=null")), + And(Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), sym("V3=Nil")))), + + And(And(Not(sym("V1=null")), sym("V1=scala.collection.immutable.::[?]")), + And(Not(sym("V1=null")), And(sym("V2=7"), sym("V3=Nil")))), + + And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), And(And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))))), + + And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil"))))))), + + And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), And(sym("V1=Nil"), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil"))))))))), + + And(And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), Not(sym("V1=Nil"))))), + + And(And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil")))))), + + And(And(Or(sym("V3=scala.collection.immutable.::[?]"), sym("V3=Nil")), + Or(sym("V1=scala.collection.immutable.::[?]"), sym("V1=Nil"))), + And(And(Or(Or(False, Not(sym("V1=scala.collection.immutable.::[?]"))), + Or(False, Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), Not(sym("V1=Nil")))))), + + And(Not(sym("V1=null")), And(Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), + sym("V3=Nil"))), + + And(Not(sym("V1=null")), And(sym("V2=7"), sym("V3=Nil"))), + + And(Not(sym("V1=null")), sym("V1=scala.collection.immutable.::[?]")), + + And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + + And(Not(sym("V2=5")), Not(sym("V2=6"))), + + And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))), + + And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), + + And(Or(Not(sym("V3=Nil")), Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))), + + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null"))))), + + And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), Not(sym("V1=Nil")))), + + And(Or(Or(False, Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), + + And(Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), Not(sym("V1=Nil"))), + + And(Or(Or(sym("V1=null"), Not(sym("V1=scala.collection.immutable.::[?]"))), + Or(sym("V1=null"), Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), + Not(sym("V2=6")))), Not(sym("V3=Nil"))))), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil"))))))), + + And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), + + And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), + And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")), + Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"), + Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")), + Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null")))))))), And(sym("V1=Nil"), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))))))), + + And(Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), sym("V3=Nil")), + + And(Or(sym("V3=scala.collection.immutable.::[?]"), Or(sym("V3=Nil"), + sym("V3=null"))), And(Or(Not(sym("V3=Nil")), Not(sym("V3=null"))), + And(Or(Not(sym("V3=scala.collection.immutable.::[?]")), + Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"), + sym("V1=null"))))))), + + And(Or(sym("V3=scala.collection.immutable.::[?]"), + sym("V3=Nil")), Or(sym("V1=scala.collection.immutable.::[?]"), + sym("V1=Nil"))), + + And(sym("V1=Nil"), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), And(Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))))), + + And(sym("V2=7"), sym("V3=Nil")), + + False, + + Not(sym("V1=Nil")), + + Or(And(Not(sym("V2=4")), + And(Not(sym("V2=5")), Not(sym("V2=6")))), Not(sym("V3=Nil"))), + + Or(False, Not(sym("V1=scala.collection.immutable.::[?]"))), + + Or(False, + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil")))), + + Or(False, Or(Not(sym("V2=1")), Not(sym("V3=Nil")))), + + Or(Not(sym("V1=Nil")), Not(sym("V1=null"))), + + Or(Not(sym("V3=scala.collection.immutable.::[?]")), Not(sym("V3=null"))), + + Or(Or(False, Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), + + Or(Or(False, + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False, + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), + + Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil"))))), + + Or(Or(sym("V1=null"), + Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"), + Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), + + Or(sym("V1=null"), Not(sym("V1=scala.collection.immutable.::[?]"))), + + Or(sym("V1=null"), + Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))), + Not(sym("V3=Nil")))), + + Or(sym("V1=null"), Or(Not(sym("V2=1")), Not(sym("V3=Nil")))), + + Or(sym("V1=scala.collection.immutable.::[?]"), + Or(sym("V1=Nil"), sym("V1=null"))), + + Or(sym("V1=scala.collection.immutable.::[?]"), sym("V1=Nil")), + + Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), + + sym("V3=scala.collection.immutable.::[?]") + ) + + formulas foreach { + f => + // build CNF + val tseitinCnf = propToSolvable(f) + val expansionCnf = eqFreePropToSolvableViaDistribution(f) + + // ALL-SAT + val tseitinSolutions = findAllModelsFor(tseitinCnf) + val expansionSolutins = findAllModelsFor(expansionCnf) + + // expand unassigned variables + // (otherwise solutions can not be compared) + val tseitinNoUnassigned = tseitinSolutions.flatMap(expandUnassigned).sorted + val expansionNoUnassigned = expansionSolutins.flatMap(expandUnassigned).sorted + assertEquals(tseitinNoUnassigned, expansionNoUnassigned) + } + } +} + + diff --git a/test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala b/test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala new file mode 100644 index 000000000000..f2926e3e176d --- /dev/null +++ b/test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala @@ -0,0 +1,143 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package scala.tools.nsc.util + +import scala.reflect.io.AbstractFile +import scala.tools.nsc.Settings +import scala.tools.nsc.settings.ClassPathRepresentationType +import scala.tools.util.PathResolverFactory + +/** + * Simple application to compare efficiency of the recursive and the flat classpath representations + */ +object ClassPathImplComparator { + + private class TestSettings extends Settings { + val checkClasses = PathSetting("-checkClasses", "Specify names of classes which should be found separated with ;", "") + val requiredIterations = IntSetting("-requiredIterations", + "Repeat tests specified number of times (to check e.g. impact of caches)", 1, Some((1, Int.MaxValue)), (_: String) => None) + val cpCreationRepetitions = IntSetting("-cpCreationRepetitions", + "Repeat tests specified number of times (to check e.g. impact of caches)", 1, Some((1, Int.MaxValue)), (_: String) => None) + val cpLookupRepetitions = IntSetting("-cpLookupRepetitions", + "Repeat tests specified number of times (to check e.g. impact of caches)", 1, Some((1, Int.MaxValue)), (_: String) => None) + } + + private class DurationStats(name: String) { + private var sum = 0L + private var iterations = 0 + + def noteMeasuredTime(millis: Long): Unit = { + sum += millis + iterations += 1 + } + + def printResults(): Unit = { + val avg = if (iterations == 0) 0 else sum.toDouble / iterations + println(s"$name - total duration: $sum ms; iterations: $iterations; avg: $avg ms") + } + } + + private lazy val defaultClassesToFind = List( + "scala.collection.immutable.List", + "scala.Option", + "scala.Int", + "scala.collection.immutable.Vector", + "scala.util.hashing.MurmurHash3" + ) + + private val oldCpCreationStats = new DurationStats("Old classpath - create") + private val oldCpSearchingStats = new DurationStats("Old classpath - search") + + private val flatCpCreationStats = new DurationStats("Flat classpath - create") + private val flatCpSearchingStats = new DurationStats("Flat classpath - search") + + def main(args: Array[String]): Unit = { + + if (args contains "-help") + usage() + else { + val oldCpSettings = loadSettings(args.toList, ClassPathRepresentationType.Recursive) + val flatCpSettings = loadSettings(args.toList, ClassPathRepresentationType.Flat) + + val classesToCheck = oldCpSettings.checkClasses.value + val classesToFind = + if (classesToCheck.isEmpty) defaultClassesToFind + else classesToCheck.split(";").toList + + def doTest(classPath: => ClassFileLookup[AbstractFile], cpCreationStats: DurationStats, cpSearchingStats: DurationStats, + cpCreationRepetitions: Int, cpLookupRepetitions: Int)= { + + def createClassPaths() = (1 to cpCreationRepetitions).map(_ => classPath).last + def testClassLookup(cp: ClassFileLookup[AbstractFile]): Boolean = (1 to cpCreationRepetitions).foldLeft(true) { + case (a, _) => a && checkExistenceOfClasses(classesToFind)(cp) + } + + val cp = withMeasuredTime("Creating classpath", createClassPaths(), cpCreationStats) + val result = withMeasuredTime("Searching for specified classes", testClassLookup(cp), cpSearchingStats) + println(s"The end of the test case. All expected classes found = $result \n") + } + + (1 to oldCpSettings.requiredIterations.value) foreach { iteration => + if (oldCpSettings.requiredIterations.value > 1) + println(s"Iteration no $iteration") + + println("Recursive (old) classpath representation:") + doTest(PathResolverFactory.create(oldCpSettings).result, oldCpCreationStats, oldCpSearchingStats, + oldCpSettings.cpCreationRepetitions.value, oldCpSettings.cpLookupRepetitions.value) + + println("Flat classpath representation:") + doTest(PathResolverFactory.create(flatCpSettings).result, flatCpCreationStats, flatCpSearchingStats, + flatCpSettings.cpCreationRepetitions.value, flatCpSettings.cpLookupRepetitions.value) + } + + if (oldCpSettings.requiredIterations.value > 1) { + println("\nOld classpath - summary") + oldCpCreationStats.printResults() + oldCpSearchingStats.printResults() + + println("\nFlat classpath - summary") + flatCpCreationStats.printResults() + flatCpSearchingStats.printResults() + } + } + } + + /** + * Prints usage information + */ + private def usage(): Unit = + println("""Use classpath and sourcepath options like in the case of e.g. 'scala' command. + | There are also two additional options: + | -checkClasses Specify names of classes which should be found + | -requiredIterations Repeat tests specified count of times (to check e.g. impact of caches) + | Note: Option -YclasspathImpl will be set automatically for each case. + """.stripMargin.trim) + + private def loadSettings(args: List[String], implType: String) = { + val settings = new TestSettings() + settings.processArguments(args, processAll = true) + settings.YclasspathImpl.value = implType + if (settings.classpath.isDefault) + settings.classpath.value = sys.props("java.class.path") + settings + } + + private def withMeasuredTime[T](operationName: String, f: => T, durationStats: DurationStats): T = { + val startTime = System.currentTimeMillis() + val res = f + val elapsed = System.currentTimeMillis() - startTime + durationStats.noteMeasuredTime(elapsed) + println(s"$operationName - elapsed $elapsed ms") + res + } + + private def checkExistenceOfClasses(classesToCheck: Seq[String])(classPath: ClassFileLookup[AbstractFile]): Boolean = + classesToCheck.foldLeft(true) { + case (res, classToCheck) => + val found = classPath.findClass(classToCheck).isDefined + if (!found) + println(s"Class $classToCheck not found") // of course in this case the measured time will be affected by IO operation + found + } +} diff --git a/test/junit/scala/tools/testing/AssertThrowsTest.scala b/test/junit/scala/tools/testing/AssertThrowsTest.scala index a70519e63c68..d91e450bac99 100644 --- a/test/junit/scala/tools/testing/AssertThrowsTest.scala +++ b/test/junit/scala/tools/testing/AssertThrowsTest.scala @@ -31,4 +31,13 @@ class AssertThrowsTest { } }) -} \ No newline at end of file + @Test + def errorIfNoThrow: Unit = { + try { + assertThrows[Foo] { () } + } catch { + case e: AssertionError => return + } + assert(false, "assertThrows should error if the tested expression does not throw anything") + } +} diff --git a/test/junit/scala/tools/testing/AssertUtil.scala b/test/junit/scala/tools/testing/AssertUtil.scala index 9efac64a9791..83a637783f65 100644 --- a/test/junit/scala/tools/testing/AssertUtil.scala +++ b/test/junit/scala/tools/testing/AssertUtil.scala @@ -1,19 +1,44 @@ package scala.tools package testing +import org.junit.Assert +import Assert.fail +import scala.runtime.ScalaRunTime.stringOf +import scala.collection.{ GenIterable, IterableLike } + /** This module contains additional higher-level assert statements * that are ultimately based on junit.Assert primitives. */ object AssertUtil { - /** Check if exception T (or a subclass) was thrown during evaluation of f. - * If any other exception or throwable is found instead it will be re-thrown. + /** + * Check if throwable T (or a subclass) was thrown during evaluation of f, and that its message + * satisfies the `checkMessage` predicate. + * If any other exception will be re-thrown. */ - def assertThrows[T <: Exception](f: => Any)(implicit manifest: Manifest[T]): Unit = + def assertThrows[T <: Throwable](f: => Any, + checkMessage: String => Boolean = s => true) + (implicit manifest: Manifest[T]): Unit = { try f catch { - case e: Exception => - val clazz = manifest.erasure.asInstanceOf[Class[T]] + case e: Throwable if checkMessage(e.getMessage) => + val clazz = manifest.runtimeClass if (!clazz.isAssignableFrom(e.getClass)) throw e + else return } -} \ No newline at end of file + fail("Expression did not throw!") + } + + /** JUnit-style assertion for `IterableLike.sameElements`. + */ + def assertSameElements[A, B >: A](expected: IterableLike[A, _], actual: GenIterable[B], message: String = ""): Unit = + if (!(expected sameElements actual)) + fail( + f"${ if (message.nonEmpty) s"$message " else "" }expected:<${ stringOf(expected) }> but was:<${ stringOf(actual) }>" + ) + + /** Convenient for testing iterators. + */ + def assertSameElements[A, B >: A](expected: IterableLike[A, _], actual: Iterator[B]): Unit = + assertSameElements(expected, actual.toList, "") +} diff --git a/test/junit/scala/util/t7265.scala b/test/junit/scala/util/SpecVersionTest.scala similarity index 96% rename from test/junit/scala/util/t7265.scala rename to test/junit/scala/util/SpecVersionTest.scala index 71f085d21dab..e3e7a978f27d 100644 --- a/test/junit/scala/util/t7265.scala +++ b/test/junit/scala/util/SpecVersionTest.scala @@ -1,14 +1,11 @@ package scala.util -package test import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.util.PropertiesTrait - /** The java version property uses the spec version * and must work for all "major.minor" and fail otherwise. */ @@ -24,6 +21,7 @@ class SpecVersionTest { override lazy val scalaProps = new java.util.Properties } + // SI-7265 @Test def comparesCorrectly(): Unit = { assert(sut isJavaAtLeast "1.5") diff --git a/test/junit/scala/util/matching/regextract-char.scala b/test/junit/scala/util/matching/CharRegexTest.scala similarity index 100% rename from test/junit/scala/util/matching/regextract-char.scala rename to test/junit/scala/util/matching/CharRegexTest.scala diff --git a/test/junit/scala/util/matching/RegexTest.scala b/test/junit/scala/util/matching/RegexTest.scala index d25842cc577b..5b13397d6a69 100644 --- a/test/junit/scala/util/matching/RegexTest.scala +++ b/test/junit/scala/util/matching/RegexTest.scala @@ -27,4 +27,21 @@ class RegexTest { assertEquals("1", x) assertEquals("1", y) } + + @Test def t8787nullMatch() = { + val r = """\d+""".r + val s: String = null + val x = s match { case r() => 1 ; case _ => 2 } + assertEquals(2, x) + } + + @Test def t8787nullMatcher() = { + val r = """(\d+):(\d+)""".r + val s = "1:2 3:4 5:6" + val z = ((r findAllMatchIn s).toList :+ null) flatMap { + case r(x, y) => Some((x.toInt, y.toInt)) + case _ => None + } + assertEquals(List((1,2),(3,4),(5,6)), z) + } } diff --git a/test/osgi/src/BasicLibrary.scala b/test/osgi/src/BasicLibrary.scala index 6618f021026e..ee8b7634ff7c 100644 --- a/test/osgi/src/BasicLibrary.scala +++ b/test/osgi/src/BasicLibrary.scala @@ -7,19 +7,16 @@ import org.ops4j.pax.exam.CoreOptions._ import org.junit.Test import org.junit.runner.RunWith import org.ops4j.pax.exam -import org.ops4j.pax.exam.junit.{ - Configuration, - ExamReactorStrategy, - JUnit4TestRunner -} -import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory -import org.ops4j.pax.swissbox.framework.ServiceLookup +import org.ops4j.pax.exam.Configuration +import org.ops4j.pax.exam.junit.PaxExam +import org.ops4j.pax.exam.spi.reactors.{ ExamReactorStrategy, PerMethod } +import org.ops4j.pax.swissbox.tracker.ServiceLookup import org.osgi.framework.BundleContext -@RunWith(classOf[JUnit4TestRunner]) -@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory])) +@RunWith(classOf[PaxExam]) +@ExamReactorStrategy(Array(classOf[PerMethod])) class BasicLibraryTest extends ScalaOsgiHelper { @Configuration def config(): Array[exam.Option] = diff --git a/test/osgi/src/BasicReflection.scala b/test/osgi/src/BasicReflection.scala index d601f04f89d2..53ab7e5345f5 100644 --- a/test/osgi/src/BasicReflection.scala +++ b/test/osgi/src/BasicReflection.scala @@ -10,13 +10,10 @@ import org.ops4j.pax.exam.CoreOptions._ import org.junit.Test import org.junit.runner.RunWith import org.ops4j.pax.exam -import org.ops4j.pax.exam.junit.{ - Configuration, - ExamReactorStrategy, - JUnit4TestRunner -} -import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory -import org.ops4j.pax.swissbox.framework.ServiceLookup +import org.ops4j.pax.exam.Configuration +import org.ops4j.pax.exam.junit.PaxExam +import org.ops4j.pax.exam.spi.reactors.{ ExamReactorStrategy, PerMethod } +import org.ops4j.pax.swissbox.tracker.ServiceLookup import org.osgi.framework.BundleContext @@ -38,8 +35,8 @@ class C { object M -@RunWith(classOf[JUnit4TestRunner]) -@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory])) +@RunWith(classOf[PaxExam]) +@ExamReactorStrategy(Array(classOf[PerMethod])) class BasicReflectionTest extends ScalaOsgiHelper { @Configuration diff --git a/test/osgi/src/BasicTest.scala b/test/osgi/src/BasicTest.scala index 109b7b911a06..5adf87ecc1fb 100644 --- a/test/osgi/src/BasicTest.scala +++ b/test/osgi/src/BasicTest.scala @@ -6,21 +6,18 @@ import org.ops4j.pax.exam.CoreOptions._ import org.junit.Test import org.junit.runner.RunWith import org.ops4j.pax.exam -import org.ops4j.pax.exam.junit.{ - Configuration, - ExamReactorStrategy, - JUnit4TestRunner -} -import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory -import org.ops4j.pax.swissbox.framework.ServiceLookup +import org.ops4j.pax.exam.Configuration +import org.ops4j.pax.exam.junit.PaxExam +import org.ops4j.pax.exam.spi.reactors.{ ExamReactorStrategy, PerMethod } +import org.ops4j.pax.swissbox.tracker.ServiceLookup import org.osgi.framework.BundleContext -@RunWith(classOf[JUnit4TestRunner]) -@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory])) +@RunWith(classOf[PaxExam]) +@ExamReactorStrategy(Array(classOf[PerMethod])) class BasicTest extends ScalaOsgiHelper { @Configuration def config(): Array[exam.Option] = { diff --git a/test/osgi/src/ReflectionToolboxTest.scala b/test/osgi/src/ReflectionToolboxTest.scala index bb48078e95c1..a23de18d07b4 100644 --- a/test/osgi/src/ReflectionToolboxTest.scala +++ b/test/osgi/src/ReflectionToolboxTest.scala @@ -8,13 +8,10 @@ import org.ops4j.pax.exam.CoreOptions._ import org.junit.Test import org.junit.runner.RunWith import org.ops4j.pax.exam -import org.ops4j.pax.exam.junit.{ - Configuration, - ExamReactorStrategy, - JUnit4TestRunner -} -import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory -import org.ops4j.pax.swissbox.framework.ServiceLookup +import org.ops4j.pax.exam.Configuration +import org.ops4j.pax.exam.junit.PaxExam +import org.ops4j.pax.exam.spi.reactors.{ ExamReactorStrategy, PerMethod } +import org.ops4j.pax.swissbox.tracker.ServiceLookup import org.osgi.framework.BundleContext @@ -22,8 +19,8 @@ class C { val f1 = 2 } -@RunWith(classOf[JUnit4TestRunner]) -@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory])) +@RunWith(classOf[PaxExam]) +@ExamReactorStrategy(Array(classOf[PerMethod])) class ReflectionToolBoxTest extends ScalaOsgiHelper { @Configuration diff --git a/test/osgi/src/ScalaOsgiHelper.scala b/test/osgi/src/ScalaOsgiHelper.scala index 084afe864340..7ba8883bb8cf 100644 --- a/test/osgi/src/ScalaOsgiHelper.scala +++ b/test/osgi/src/ScalaOsgiHelper.scala @@ -20,19 +20,19 @@ trait ScalaOsgiHelper { def standardOptions: Array[exam.Option] = { val bundles = (allBundleFiles map makeBundle) - bundles ++ Array[exam.Option](felix(), equinox(), junitBundles()) + bundles ++ Array[exam.Option](junitBundles()) // to change the local repo used (for some operations, but not all -- which is why I didn't bother): // systemProperty("org.ops4j.pax.url.mvn.localRepository").value(sys.props("maven.repo.local"))) } def justReflectionOptions: Array[exam.Option] = { val bundles = filteredBundleFiles("scala-library", "scala-reflect") - bundles ++ Array[exam.Option](felix(), equinox(), junitBundles()) + bundles ++ Array[exam.Option](junitBundles()) } def justCoreLibraryOptions: Array[exam.Option] = { val bundles = filteredBundleFiles("scala-library") - bundles ++ Array[exam.Option](felix(), equinox(), junitBundles()) + bundles ++ Array[exam.Option](junitBundles()) } } diff --git a/test/pending/jvm/cf-attributes.scala b/test/pending/jvm/cf-attributes.scala index f4964b63b103..2d08f22d8b04 100644 --- a/test/pending/jvm/cf-attributes.scala +++ b/test/pending/jvm/cf-attributes.scala @@ -62,7 +62,7 @@ object anonymousClasses { //InnerClass: // public final #_; //class anonymousClasses$$anon$1 of class anonymousClasses$ val x = new Foo() { - override def foo() { println("foo (overriden)"); } + override def foo() { println("foo (overridden)"); } def dummy = 0 } } diff --git a/test/pending/jvm/timeout.scala b/test/pending/jvm/timeout.scala index 22b3647dce70..8f29f8ddbe26 100644 --- a/test/pending/jvm/timeout.scala +++ b/test/pending/jvm/timeout.scala @@ -1,4 +1,4 @@ -// Test is in pending because although it suceeds locally, +// Test is in pending because although it succeeds locally, // it takes too long on the machine which runs nightly tests. // // [partest] EXPECTED: 100 < x < 900 diff --git a/test/pending/pos/t3439.scala b/test/pending/pos/t3439.scala deleted file mode 100644 index 425f1aeeb530..000000000000 --- a/test/pending/pos/t3439.scala +++ /dev/null @@ -1,2 +0,0 @@ -abstract class ParametricMessage[M: Manifest](msg: M) { def message = msg } -case class ParametricMessage1[M: Manifest](msg: M, p1: Class[_]) extends ParametricMessage(msg) diff --git a/test/pending/pos/t8363b.scala b/test/pending/pos/t8363b.scala new file mode 100644 index 000000000000..393e2a0237d2 --- /dev/null +++ b/test/pending/pos/t8363b.scala @@ -0,0 +1,7 @@ +class C(a: Any) +class Test { + def foo: Any = { + def form = 0 + class C1 extends C({def x = form; ()}) + } +} diff --git a/test/scaladoc/resources/SI-3314-diagrams.scala b/test/scaladoc/resources/SI-3314-diagrams.scala index b80a97b522a6..7d2cc9447cd6 100644 --- a/test/scaladoc/resources/SI-3314-diagrams.scala +++ b/test/scaladoc/resources/SI-3314-diagrams.scala @@ -7,7 +7,7 @@ package scala.test.scaladoc { * / / / | \ \ \ * Mon Tue Wed Thu Fri Sat Sun * - * - each member should receive an inhertiance diagram: + * - each member should receive an inheritance diagram: * Value * | * | diff --git a/test/scaladoc/resources/SI-8144.scala b/test/scaladoc/resources/SI-8144.scala new file mode 100644 index 000000000000..7b225acb3266 --- /dev/null +++ b/test/scaladoc/resources/SI-8144.scala @@ -0,0 +1,17 @@ +package some.pack + +class SomeType(arg: String) { + + type TypeAlias = String + + def >@<(): TypeAlias = "Tricky method name" + + def >#<(): Int = 1 + +} + +object SomeType { + + val someVal = "Some arbitrary companion object value" + +} diff --git a/test/scaladoc/resources/SI-8514.scala b/test/scaladoc/resources/SI-8514.scala new file mode 100644 index 000000000000..4c5476604bc9 --- /dev/null +++ b/test/scaladoc/resources/SI-8514.scala @@ -0,0 +1,10 @@ +package a { + class DeveloperApi extends scala.annotation.StaticAnnotation + + /** Some doc here */ + @DeveloperApi + class A + + @DeveloperApi + class B +} diff --git a/test/scaladoc/resources/code-indent.scala b/test/scaladoc/resources/code-indent.scala index 88946ffc7f9c..2eee3352b451 100644 --- a/test/scaladoc/resources/code-indent.scala +++ b/test/scaladoc/resources/code-indent.scala @@ -20,6 +20,12 @@ * an alternative * the e l s e branch * }}} + * {{{ + * Trait example { + * Val x = a + * Val y = b + * } + * }}} * NB: Trailing spaces are necessary for this test! * {{{ * l1 diff --git a/test/scaladoc/resources/implicit-inheritance-override.scala b/test/scaladoc/resources/implicit-inheritance-override.scala index 5d692f59adae..b59d2f410d8e 100644 --- a/test/scaladoc/resources/implicit-inheritance-override.scala +++ b/test/scaladoc/resources/implicit-inheritance-override.scala @@ -35,7 +35,7 @@ class DerivedC extends Base { class DerivedD extends Base { /** - * @tparam T The overriden type parameter comment + * @tparam T The overridden type parameter comment */ override def function[T](arg1: T, arg2: String): Double = 3.0d } \ No newline at end of file diff --git a/test/scaladoc/resources/implicits-ambiguating-res.scala b/test/scaladoc/resources/implicits-ambiguating-res.scala index 6ed51366cb2c..90e43ac2ed2a 100644 --- a/test/scaladoc/resources/implicits-ambiguating-res.scala +++ b/test/scaladoc/resources/implicits-ambiguating-res.scala @@ -1,5 +1,5 @@ /** - * Test scaladoc implicits distinguishing -- supress all members by implicit conversion that are shadowed by the + * Test scaladoc implicits distinguishing -- suppress all members by implicit conversion that are shadowed by the * class' own members * * {{{ diff --git a/test/scaladoc/resources/implicits-shadowing-res.scala b/test/scaladoc/resources/implicits-shadowing-res.scala index c5e9493bf3ff..b7f3ceb895c3 100644 --- a/test/scaladoc/resources/implicits-shadowing-res.scala +++ b/test/scaladoc/resources/implicits-shadowing-res.scala @@ -1,5 +1,5 @@ /** - * Test scaladoc implicits distinguishing -- supress all members by implicit conversion that are shadowed by the + * Test scaladoc implicits distinguishing -- suppress all members by implicit conversion that are shadowed by the * class' own members * * {{{ diff --git a/test/scaladoc/run/SI-8479.check b/test/scaladoc/run/SI-8479.check new file mode 100644 index 000000000000..619c56180bb9 --- /dev/null +++ b/test/scaladoc/run/SI-8479.check @@ -0,0 +1 @@ +Done. diff --git a/test/scaladoc/run/SI-8479.scala b/test/scaladoc/run/SI-8479.scala new file mode 100755 index 000000000000..3c913950257c --- /dev/null +++ b/test/scaladoc/run/SI-8479.scala @@ -0,0 +1,32 @@ +import scala.tools.nsc.doc.model._ +import scala.tools.nsc.doc.base._ +import scala.tools.nsc.doc.base.comment._ +import scala.tools.partest.ScaladocModelTest +import java.net.{URI, URL} +import java.io.File + +object Test extends ScaladocModelTest { + + override def code = + """ + |object Test { + | val x = new SparkContext(master = "") + |} + | + |class SparkContext(config: Any) { + | + | /** Scaladoc comment */ + | def this( + | master: String, + | appName: String = "") = this(null) + |} + | + | + """.stripMargin + + override def scaladocSettings = "" + + def testModel(rootPackage: Package) { + // it didn't crash + } +} diff --git a/test/scaladoc/run/t5730.check b/test/scaladoc/run/t5730.check new file mode 100644 index 000000000000..619c56180bb9 --- /dev/null +++ b/test/scaladoc/run/t5730.check @@ -0,0 +1 @@ +Done. diff --git a/test/scaladoc/run/t5730.scala b/test/scaladoc/run/t5730.scala new file mode 100644 index 000000000000..cc4c2444b16e --- /dev/null +++ b/test/scaladoc/run/t5730.scala @@ -0,0 +1,36 @@ +import scala.tools.nsc.doc.base._ +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + + override def code = """ + package scala.test.scaladoc.T5730 + + /** + * A link: + * + * [[scala.Option$ object Option]]. + */ + sealed abstract class A + + case object B extends A + + abstract final class C + """ + + def scaladocSettings = "" + + def testModel(rootPackage: Package) = { + // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s)) + import access._ + + val p = rootPackage._package("scala")._package("test")._package("scaladoc")._package("T5730") + + val a = p._class("A") + val c = p._class("C") + + assert(a.constructors.isEmpty, s"there should be no constructors, found: ${a.constructors}") + assert(c.constructors.isEmpty, s"there should be no constructors, found: ${c.constructors}") + } +} diff --git a/test/scaladoc/run/t6626.check b/test/scaladoc/run/t6626.check new file mode 100644 index 000000000000..de3a6c5c0bf9 --- /dev/null +++ b/test/scaladoc/run/t6626.check @@ -0,0 +1,7 @@ +newSource:10: warning: Could not find any member to link for "SomeUnknownException". + /** + ^ +newSource:10: warning: Could not find any member to link for "IOException". + /** + ^ +Done. diff --git a/test/scaladoc/run/t6626.scala b/test/scaladoc/run/t6626.scala new file mode 100644 index 000000000000..6c61c605d625 --- /dev/null +++ b/test/scaladoc/run/t6626.scala @@ -0,0 +1,42 @@ +import scala.tools.nsc.doc.base._ +import scala.tools.nsc.doc.base.comment._ +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + + override def code = """ + +package org.foo + +class MyException extends Exception + +class MyOtherException extends Exception + +object Foo { + /** + * Test exception linking + * + * @throws org.foo.MyException linked with a fully-qualified name + * @throws MyOtherException linked with a relative name + * @throws SomeUnknownException not linked at all (but with some text) + * @throws IOException + */ + def test(): Unit = ??? +} + """ + + def scaladocSettings = "" + + def testModel(rootPackage: Package) = { + // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s)) + import access._ + + val a = rootPackage._package("org")._package("foo")._object("Foo")._method("test") + val throws = a.comment.get.throws + val allbodies = Body(throws.values.flatMap(_.blocks).toSeq) + + val links = countLinksInBody(allbodies, _.link.isInstanceOf[LinkToTpl[_]]) + assert(links == 2, links + " == 2 (links to MyException and MyOtherException)") + } +} diff --git a/test/scaladoc/run/t7905.check b/test/scaladoc/run/t7905.check new file mode 100644 index 000000000000..619c56180bb9 --- /dev/null +++ b/test/scaladoc/run/t7905.check @@ -0,0 +1 @@ +Done. diff --git a/test/scaladoc/run/t7905.scala b/test/scaladoc/run/t7905.scala new file mode 100644 index 000000000000..857072447036 --- /dev/null +++ b/test/scaladoc/run/t7905.scala @@ -0,0 +1,36 @@ +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + override def code = """ + object A { + val foo = new B { + val bar = new C { + val baz: A.this.type = A.this + } + } + } + + trait B { + type E = bar.D + + val bar: C + } + + trait C { + trait D + } + + trait G { + type F = A.foo.E + + def m(f: F) = f match { + case _: A.foo.bar.D => // error here + } + } + """ + + def scaladocSettings = "" + + def testModel(root: Package) = () +} diff --git a/test/scaladoc/run/t8113.check b/test/scaladoc/run/t8113.check new file mode 100644 index 000000000000..619c56180bb9 --- /dev/null +++ b/test/scaladoc/run/t8113.check @@ -0,0 +1 @@ +Done. diff --git a/test/scaladoc/run/t8113.scala b/test/scaladoc/run/t8113.scala new file mode 100644 index 000000000000..f006213ef299 --- /dev/null +++ b/test/scaladoc/run/t8113.scala @@ -0,0 +1,36 @@ +import scala.tools.nsc.doc.base._ +import scala.tools.nsc.doc.base.comment._ +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + + override def code = """ + /** + * Check out [[http://www.scala-lang.org + * this great website]]! + */ + class Test + """ + + def scaladocSettings = "" + + def testModel(rootPackage: Package) = { + import access._ + + val test = rootPackage._class("Test") + + // find Link + def find(body: Any): Option[Link] = body match { + case l: Link => Some(l) + case s: Seq[_] => s.toList.map(find(_)).flatten.headOption + case p: Product => p.productIterator.toList.map(find(_)).flatten.headOption + case _ => None + } + + val link = find(test.comment.get.body).collect { case Link(ta, Text(ti)) => (ta, ti) } + assert(link.isDefined) + val expected = ("http://www.scala-lang.org", "this great website") + link.foreach {l => assert(l == expected, s"$l != $expected")} + } +} diff --git a/test/scaladoc/run/t8314.check b/test/scaladoc/run/t8314.check new file mode 100644 index 000000000000..aa04c12c8ffd --- /dev/null +++ b/test/scaladoc/run/t8314.check @@ -0,0 +1,3 @@ +Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(This should be ), Monospace(Text(monospaced)))))))))) + +Done. diff --git a/test/scaladoc/run/t8314.scala b/test/scaladoc/run/t8314.scala new file mode 100644 index 000000000000..7f6d6fdb0024 --- /dev/null +++ b/test/scaladoc/run/t8314.scala @@ -0,0 +1,16 @@ +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + override def code = """ + /** This should be `monospaced` */ + class A + """ + + def scaladocSettings = "" + + def testModel(root: Package) = { + import access._ + root._class("A").comment foreach println + } +} diff --git a/test/scaladoc/run/t8407.check b/test/scaladoc/run/t8407.check new file mode 100644 index 000000000000..06c860b3ebeb --- /dev/null +++ b/test/scaladoc/run/t8407.check @@ -0,0 +1,4 @@ +newSource:4: warning: not found: type $NotFound + * @usecase def zipWithIndex: $NotFound + ^ +Done. diff --git a/test/scaladoc/run/t8407.scala b/test/scaladoc/run/t8407.scala new file mode 100644 index 000000000000..2df9f4f1aca8 --- /dev/null +++ b/test/scaladoc/run/t8407.scala @@ -0,0 +1,20 @@ +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + override def code = """ +class C { + /** + * @usecase def zipWithIndex: $NotFound + * + */ + def zipWithIndex: Int = ??? +} + """ + + def scaladocSettings = "" + + def testModel(root: Package) = { + // just testing that it doesn't error out. + } +} diff --git a/test/scaladoc/run/t8557.check b/test/scaladoc/run/t8557.check new file mode 100644 index 000000000000..619c56180bb9 --- /dev/null +++ b/test/scaladoc/run/t8557.check @@ -0,0 +1 @@ +Done. diff --git a/test/scaladoc/run/t8557.scala b/test/scaladoc/run/t8557.scala new file mode 100644 index 000000000000..451f004d7d43 --- /dev/null +++ b/test/scaladoc/run/t8557.scala @@ -0,0 +1,32 @@ +import scala.tools.nsc.doc.base._ +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + + override def code = """ + package scala.test.scaladoc.T8857 + + /** + * A link: + * + * [[scala.Option$ object Option]]. + */ + class A + """ + + // a non-canonical path to scala-library.jar should still work + // this is a bit fragile (depends on the current directory being the root of the repo ; + // ant & partest seem to do that properly) + def scaladocSettings = "-doc-external-doc build/pack/bin/../lib/scala-library.jar#http://www.scala-lang.org/api/current/" + + def testModel(rootPackage: Package) = { + // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s)) + import access._ + + val a = rootPackage._package("scala")._package("test")._package("scaladoc")._package("T8857")._class("A") + + val links = countLinks(a.comment.get, _.link.isInstanceOf[LinkToExternal]) + assert(links == 1, links + " == 1 (the links to external in class A)") + } +} diff --git a/test/scaladoc/run/t8672.check b/test/scaladoc/run/t8672.check new file mode 100644 index 000000000000..d7194c73bf1a --- /dev/null +++ b/test/scaladoc/run/t8672.check @@ -0,0 +1,4 @@ +Some(Chain(List(Text(New in release 1.2.3.4, it works), Text(.)))) +Some(Text(Sentence no period)) +Some(Chain(List(Text(Sentence period at end), Text(.)))) +Done. diff --git a/test/scaladoc/run/t8672.scala b/test/scaladoc/run/t8672.scala new file mode 100644 index 000000000000..8a9b5086bdd6 --- /dev/null +++ b/test/scaladoc/run/t8672.scala @@ -0,0 +1,32 @@ +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + override def code = """ + class C { + + /** + * New in release 1.2.3.4, it works. Next sentence. + * Next Line. + */ + def method1 = 0 + + /** Sentence no period */ + def method2 = 0 + + /** Sentence period at end.*/ + def method3 = 0 + } + """ + + def scaladocSettings = "" + + def testModel(root: Package) = { + import access._ + val ms = List("method1", "method2", "method3") + for (m <- ms) { + val method = root._class("C")._method(m) + println(method.comment.get.body.summary) + } + } +} diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala index 56328ea87535..6b593559910d 100644 --- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala +++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala @@ -149,7 +149,6 @@ object Test extends Properties("HtmlFactory") { result } - def shortComments(root: scala.xml.Node) = XMLUtil.stripGroup(root).descendant.flatMap { case e: scala.xml.Elem => { @@ -417,7 +416,7 @@ object Test extends Properties("HtmlFactory") { checkText("SI_5054_q1.scala")( (None,"""def test(): Int""", true) //Disabled because the full signature is now displayed - //(None,"""def test(implicit lost: Int): Int""", false) + //(None, """def test(implicit lost: Int): Int""", false) ) property("SI-5054: Use cases should keep their flags - final should not be lost") = @@ -486,7 +485,7 @@ object Test extends Properties("HtmlFactory") { """, true), (Some("DerivedD"), """def function[T](arg1: T, arg2: String): Double - T The overriden type parameter comment + T The overridden type parameter comment arg1 The T term comment arg2 The string comment returns The return comment @@ -564,7 +563,7 @@ object Test extends Properties("HtmlFactory") { property("Comment inheritance: Correct explicit inheritance for override") = checkText("explicit-inheritance-override.scala")( (Some("InheritDocDerived"), - """def function[T](arg1: T, arg2: String): Double + """def function[T](arg1: T, arg2: String): Double Starting line Starting line The base comment. And another sentence... @@ -591,7 +590,7 @@ object Test extends Properties("HtmlFactory") { property("Comment inheritance: Correct explicit inheritance for usecase") = checkText("explicit-inheritance-usecase.scala")( (Some("UseCaseInheritDoc"), - """def function[T](arg1: T, arg2: String): Double + """def function[T](arg1: T, arg2: String): Double [use case] Starting line [use case] Starting line The base comment. And another sentence... @@ -660,6 +659,7 @@ object Test extends Properties("HtmlFactory") { s.contains("
        two lines, one useful
        ") && s.contains("
        line1\nline2\nline3\nline4
        ") && s.contains("
        a ragged example\na (condition)\n  the t h e n branch\nan alternative\n  the e l s e branch
        ") && + s.contains("
        Trait example {\n  Val x = a\n  Val y = b\n}
        ") && s.contains("
        l1\n\nl2\n\nl3\n\nl4\n\nl5
        ") } case _ => false @@ -684,7 +684,7 @@ object Test extends Properties("HtmlFactory") { oneAuthor match { case node: scala.xml.Node => { val s = node.toString - s.contains("
        Author:
        ") + s.contains("
        Author:
        ") && s.contains("

        The Only Author\n

        ") } case _ => false @@ -697,8 +697,8 @@ object Test extends Properties("HtmlFactory") { twoAuthors match { case node: scala.xml.Node => { val s = node.toString - s.contains("
        Authors:
        ") - s.contains("

        The First Author\n

        ") + s.contains("
        Authors:
        ") && + s.contains("

        The First Author

        ") && s.contains("

        The Second Author\n

        ") } case _ => false @@ -740,5 +740,83 @@ object Test extends Properties("HtmlFactory") { case node: scala.xml.Node => true case _ => false } + + property("SI-8514: No inconsistencies") = + checkText("SI-8514.scala")( + (Some("a/package"), + """class A extends AnyRef + Some doc here + Some doc here + Annotations @DeveloperApi() + """, true), + (Some("a/package"), + """class B extends AnyRef + Annotations @DeveloperApi() + """, true) + ) + } + + // SI-8144 + { + implicit class AttributesAwareNode(val node: NodeSeq) { + + def \@(attrName: String): String = + node \ ("@" + attrName) text + + def \@(attrName: String, attrValue: String): NodeSeq = + node filter { _ \ ("@" + attrName) exists (_.text == attrValue) } + } + + implicit class AssertionAwareNode(node: scala.xml.NodeSeq) { + + def assertTypeLink(expectedUrl: String): Boolean = { + val linkElement: NodeSeq = node \\ "div" \@ ("id", "definition") \\ "span" \@ ("class", "permalink") \ "a" + linkElement \@ "href" == expectedUrl && linkElement \@ "target" == "_top" + } + + def assertMemberLink(group: String)(memberName: String, expectedUrl: String): Boolean = { + val linkElement: NodeSeq = node \\ "div" \@ ("id", group) \\ "li" \@ ("name", memberName) \\ "span" \@ ("class", "permalink") \ "a" + linkElement \@ "href" == expectedUrl && linkElement \@ "target" == "_top" + } + + def assertValuesLink(memberName: String, expectedUrl: String): Boolean = { + val linkElement: NodeSeq = node \\ "div" \@ ("class", "values members") \\ "li" \@ ("name", memberName) \\ "span" \@ ("class", "permalink") \ "a" + linkElement \@ "href" == expectedUrl && linkElement \@ "target" == "_top" + } + + } + + val files = createTemplates("SI-8144.scala") + + def check(pagePath: String)(f: NodeSeq => org.scalacheck.Prop): org.scalacheck.Prop = + files(pagePath) match { + case node: scala.xml.Node => f(XMLUtil.stripGroup(node)) + case _ => false + } + + property("SI-8144: Members' permalink - package") = check("some/package.html") { node => + ("type link" |: node.assertTypeLink("../index.html#some.package")) && + ("member: some.pack" |: node.assertValuesLink("some.pack", "../index.html#some.package@pack")) + } + + property("SI-8144: Members' permalink - inner package") = check("some/pack/package.html") { node => + ("type link" |: node.assertTypeLink("../../index.html#some.pack.package")) && + ("member: SomeType (object)" |: node.assertValuesLink("some.pack.SomeType", "../../index.html#some.pack.package@SomeType")) && + ("member: SomeType (class)" |: node.assertMemberLink("types")("some.pack.SomeType", "../../index.html#some.pack.package@SomeTypeextendsAnyRef")) + } + + property("SI-8144: Members' permalink - companion object") = check("some/pack/SomeType$.html") { node => + ("type link" |: node.assertTypeLink("../../index.html#some.pack.SomeType$")) && + ("member: someVal" |: node.assertMemberLink("allMembers")("some.pack.SomeType#someVal", "../../index.html#some.pack.SomeType$@someVal:String")) + } + + property("SI-8144: Members' permalink - class") = check("some/pack/SomeType.html") { node => + ("type link" |: node.assertTypeLink("../../index.html#some.pack.SomeType")) && + ("constructor " |: node.assertMemberLink("constructors")("some.pack.SomeType#", "../../index.html#some.pack.SomeType@(arg:String):some.pack.SomeType")) && + ( "member: type TypeAlias" |: node.assertMemberLink("types")("some.pack.SomeType.TypeAlias", "../../index.html#some.pack.SomeType@TypeAlias=String")) && + ( "member: def >#<():Int " |: node.assertValuesLink("some.pack.SomeType#>#<", "../../index.html#some.pack.SomeType@>#<():Int")) && + ( "member: def >@<():TypeAlias " |: node.assertValuesLink("some.pack.SomeType#>@<", "../../index.html#some.pack.SomeType@>@<():SomeType.this.TypeAlias")) + } + } } diff --git a/test/script-tests/README b/test/script-tests/README index 3f5c2ce19c2e..7b3291c40757 100755 --- a/test/script-tests/README +++ b/test/script-tests/README @@ -5,4 +5,9 @@ putting self-contained script tests in here to run some way that doesn't depend on all the platform stars aligning all the time. Feel free to join me. --- extempore, Nov 21 2011 \ No newline at end of file +-- extempore, Nov 21 2011 + +But there's a problem that probably nobody would run such tests so they would become outdated quite quickly. +And therefore they wouldn't work (and even compile) after some time - like this one existing currently. + +-- mpociecha, Oct 9 2014 \ No newline at end of file diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh index 654ba2154721..437c0a0c082c 100755 --- a/tools/binary-repo-lib.sh +++ b/tools/binary-repo-lib.sh @@ -2,15 +2,16 @@ # # Library to push and pull binary artifacts from a remote repository using CURL. - remote_urlget="http://repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap" remote_urlpush="http://private-repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap" libraryJar="$(pwd)/lib/scala-library.jar" desired_ext=".desired.sha1" push_jar="$(pwd)/tools/push.jar" + if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then push_jar="$(cygpath -m "$push_jar")"; fi # Cache dir has .sbt in it to line up with SBT build. -cache_dir="${HOME}/.sbt/cache/scala" +SCALA_BUILD_REPOS_HOME=${SCALA_BUILD_REPOS_HOME:=$HOME} +cache_dir="${SCALA_BUILD_REPOS_HOME}/.sbt/cache/scala" # Checks whether or not curl is installed and issues a warning on failure. checkCurl() { diff --git a/versions.properties b/versions.properties index 9042c4198b9f..cc2097cfbe5e 100644 --- a/versions.properties +++ b/versions.properties @@ -1,25 +1,34 @@ -#Fri, 28 Feb 2014 07:00:05 +0100 -starr.version=2.11.0-RC1 +#Tue, 20 May 2014 10:01:37 +0200 +# NOTE: this file determines the content of the scala-distribution +# via scala-dist-pom.xml and scala-library-all-pom.xml +# when adding new properties that influence a release, +# also add them to the update.versions mechanism in build.xml, +# which is used by scala-release-2.11.x in scala/jenkins-scripts +starr.version=2.11.5 starr.use.released=1 # These are the versions of the modules that go with this release. # These properties are used during PR validation and in dbuild builds. -scala.binary.version=2.11.0-RC1 -# external modules shipped with distribution: -scala-xml.version.number=1.0.0 -scala-parser-combinators.version.number=1.0.0 -scala-continuations-plugin.version.number=1.0.0 -scala-continuations-library.version.number=1.0.0 -scala-swing.version.number=1.0.0 +# e.g. 2.11.0-RC1, 2.11 +scala.binary.version=2.11 +# e.g. 2.11.0-RC1, 2.11.0, 2.11.1-RC1, 2.11.1 +# this defines the dependency on scala-continuations-plugin in scala-dist's pom +scala.full.version=2.11.5 -# these ship with distribution (and scala-library-all depends on them) -akka-actor.version.number=2.3.0-RC4 +# external modules shipped with distribution, as specified by scala-library-all's pom +scala-xml.version.number=1.0.3 +scala-parser-combinators.version.number=1.0.3 +scala-continuations-plugin.version.number=1.0.2 +scala-continuations-library.version.number=1.0.2 +scala-swing.version.number=1.0.1 +akka-actor.version.number=2.3.4 actors-migration.version.number=1.1.0 +jline.version=2.12 # external modules, used internally (not shipped) -partest.version.number=1.0.0 -scalacheck.version.number=1.11.3 +partest.version.number=1.0.1 +scalacheck.version.number=1.11.4 # TODO: modularize the compiler #scala-compiler-doc.version.number=1.0.0-RC1